Merge pull request #21042 from A4-Tacks/qualified-top Improve assist qualified to top when on first segment
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index b6f430f..2891411 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml
@@ -140,7 +140,7 @@ if: matrix.target == 'x86_64-unknown-linux-gnu' env: RUSTC_BOOTSTRAP: 1 - run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std -q + run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps --no-sysroot --no-test $(rustc --print sysroot)/lib/rustlib/src/rust/library/std -q - name: Upload artifacts uses: actions/upload-artifact@v4
diff --git a/Cargo.lock b/Cargo.lock index 12b5f8a..c1dbe6a 100644 --- a/Cargo.lock +++ b/Cargo.lock
@@ -18,12 +18,27 @@ checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] name = "allocator-api2" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] name = "anstyle" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -108,6 +123,21 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + +[[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -135,6 +165,12 @@ checksum = "36f64beae40a84da1b4b26ff2761a5b895c12adc41dc25aaee1c4f2bbfe97a6e" [[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] name = "byteorder" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -151,54 +187,21 @@ [[package]] name = "cargo-platform" -version = "0.2.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84982c6c0ae343635a3a4ee6dedef965513735c8b183caa7289fa6e27399ebd4" +checksum = "122ec45a44b270afd1402f351b782c676b173e3c3fb28d86ff7ebfb4d86a4ee4" dependencies = [ "serde", ] [[package]] -name = "cargo-util-schemas" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e63d2780ac94487eb9f1fea7b0d56300abc9eb488800854ca217f102f5caccca" -dependencies = [ - "semver", - "serde", - "serde-untagged", - "serde-value", - "thiserror 1.0.69", - "toml", - "unicode-xid", - "url", -] - -[[package]] -name = "cargo-util-schemas" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dc1a6f7b5651af85774ae5a34b4e8be397d9cf4bc063b7e6dbd99a841837830" -dependencies = [ - "semver", - "serde", - "serde-untagged", - "serde-value", - "thiserror 2.0.16", - "toml", - "unicode-xid", - "url", -] - -[[package]] name = "cargo_metadata" -version = "0.20.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f7835cfc6135093070e95eb2b53e5d9b5c403dc3a6be6040ee026270aa82502" +checksum = "981a6f317983eec002839b90fae7411a85621410ae591a9cab2ecf5cb5744873" dependencies = [ "camino", "cargo-platform", - "cargo-util-schemas 0.2.0", "semver", "serde", "serde_json", @@ -206,19 +209,10 @@ ] [[package]] -name = "cargo_metadata" -version = "0.21.0" +name = "cast" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cfca2aaa699835ba88faf58a06342a314a950d2b9686165e038286c30316868" -dependencies = [ - "camino", - "cargo-platform", - "cargo-util-schemas 0.8.2", - "semver", - "serde", - "serde_json", - "thiserror 2.0.16", -] +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" @@ -259,6 +253,33 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] name = "clap" version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -314,6 +335,39 @@ ] [[package]] +name = "criterion" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1c047a62b0cc3e145fa84415a3191f628e980b194c2755aa12300a4e6cbd928" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "itertools 0.13.0", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b1bcc0dc7dfae599d84ad0b1a55f80cde8af3725da8313b528da95ef783e338" +dependencies = [ + "cast", + "itertools 0.13.0", +] + +[[package]] name = "critical-section" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -363,6 +417,12 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] name = "ctrlc" version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -516,14 +576,13 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] -name = "erased-serde" -version = "0.4.8" +name = "errno" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "259d404d09818dec19332e31d94558aeb442fea04c817006456c24b5460bbd4b" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ - "serde", - "serde_core", - "typeid", + "libc", + "windows-sys 0.61.0", ] [[package]] @@ -537,6 +596,12 @@ ] [[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] name = "find-msvc-tools" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -559,6 +624,12 @@ ] [[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] name = "foldhash" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -600,12 +671,35 @@ ] [[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", +] + +[[package]] name = "gimli" version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] +name = "half" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" +dependencies = [ + "cfg-if", + "crunchy", + "zerocopy", +] + +[[package]] name = "hash32" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -680,7 +774,7 @@ "hir-ty", "indexmap", "intern", - "itertools", + "itertools 0.14.0", "ra-ap-rustc_type_ir", "rustc-hash 2.1.1", "smallvec", @@ -713,7 +807,7 @@ "hir-expand", "indexmap", "intern", - "itertools", + "itertools 0.14.0", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "mbe", "query-group-macro", @@ -730,7 +824,7 @@ "syntax-bridge", "test-fixture", "test-utils", - "text-size", + "text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "thin-vec", "tracing", "triomphe", @@ -747,7 +841,7 @@ "either", "expect-test", "intern", - "itertools", + "itertools 0.14.0", "mbe", "parser", "query-group-macro", @@ -779,7 +873,7 @@ "hir-expand", "indexmap", "intern", - "itertools", + "itertools 0.14.0", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "macros", "oorandom", @@ -921,7 +1015,7 @@ "ide-db", "ide-diagnostics", "ide-ssr", - "itertools", + "itertools 0.14.0", "macros", "nohash-hasher", "oorandom", @@ -950,7 +1044,7 @@ "expect-test", "hir", "ide-db", - "itertools", + "itertools 0.14.0", "smallvec", "stdx", "syntax", @@ -968,7 +1062,7 @@ "expect-test", "hir", "ide-db", - "itertools", + "itertools 0.14.0", "macros", "smallvec", "stdx", @@ -992,7 +1086,7 @@ "fst", "hir", "indexmap", - "itertools", + "itertools 0.14.0", "line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "macros", "memchr", @@ -1025,7 +1119,7 @@ "expect-test", "hir", "ide-db", - "itertools", + "itertools 0.14.0", "paths", "serde_json", "stdx", @@ -1043,7 +1137,7 @@ "expect-test", "hir", "ide-db", - "itertools", + "itertools 0.14.0", "parser", "syntax", "test-fixture", @@ -1134,6 +1228,15 @@ [[package]] name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" @@ -1154,6 +1257,16 @@ checksum = "a037eddb7d28de1d0fc42411f501b53b75838d313908078d6698d064f3029b24" [[package]] +name = "js-sys" +version = "0.3.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] name = "kqueue" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1231,7 +1344,7 @@ dependencies = [ "nohash-hasher", "oorandom", - "text-size", + "text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1241,10 +1354,16 @@ checksum = "3e27e0ed5a392a7f5ba0b3808a2afccff16c64933312c84b57618b49d1209bd2" dependencies = [ "nohash-hasher", - "text-size", + "text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] name = "litemap" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1259,7 +1378,7 @@ "hir-expand", "ide-db", "intern", - "itertools", + "itertools 0.14.0", "proc-macro-api", "project-model", "span", @@ -1539,15 +1658,6 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] -name = "ordered-float" -version = "2.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" -dependencies = [ - "num-traits", -] - -[[package]] name = "parking_lot" version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1640,6 +1750,34 @@ checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + +[[package]] name = "portable-atomic" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1672,6 +1810,15 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] name = "proc-macro-api" version = "0.0.0" dependencies = [ @@ -1722,7 +1869,7 @@ name = "proc-macro-test" version = "0.0.0" dependencies = [ - "cargo_metadata 0.20.0", + "cargo_metadata", ] [[package]] @@ -1763,11 +1910,11 @@ dependencies = [ "anyhow", "base-db", - "cargo_metadata 0.21.0", + "cargo_metadata", "cfg", "expect-test", "intern", - "itertools", + "itertools 0.14.0", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "paths", "rustc-hash 2.1.1", @@ -1778,12 +1925,32 @@ "span", "stdx", "temp-dir", + "toml", "toolchain", "tracing", "triomphe", ] [[package]] +name = "proptest" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" +dependencies = [ + "bit-set", + "bit-vec", + "bitflags 2.9.4", + "num-traits", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] name = "protobuf" version = "3.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1836,6 +2003,12 @@ ] [[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] name = "quote" version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1845,6 +2018,12 @@ ] [[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] name = "ra-ap-rustc_abi" version = "0.137.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1972,6 +2151,44 @@ ] [[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "rand_xorshift" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" +dependencies = [ + "rand_core", +] + +[[package]] name = "rayon" version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2006,12 +2223,41 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ - "getrandom", + "getrandom 0.2.16", "libredox", "thiserror 2.0.16", ] [[package]] +name = "regex" +version = "1.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" + +[[package]] name = "rowan" version = "0.15.15" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2021,7 +2267,7 @@ "hashbrown 0.14.5", "memoffset", "rustc-hash 1.1.0", - "text-size", + "text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2030,7 +2276,7 @@ dependencies = [ "anyhow", "base64", - "cargo_metadata 0.21.0", + "cargo_metadata", "cfg", "crossbeam-channel", "dhat", @@ -2046,7 +2292,7 @@ "ide-ssr", "indexmap", "intern", - "itertools", + "itertools 0.14.0", "load-cargo", "lsp-server 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types", @@ -2148,12 +2394,37 @@ ] [[package]] +name = "rustix" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" +dependencies = [ + "bitflags 2.9.4", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.0", +] + +[[package]] name = "rustversion" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] +name = "rusty-fork" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + +[[package]] name = "ryu" version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2253,28 +2524,6 @@ ] [[package]] -name = "serde-untagged" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9faf48a4a2d2693be24c6289dbe26552776eb7737074e6722891fadbe6c5058" -dependencies = [ - "erased-serde", - "serde", - "serde_core", - "typeid", -] - -[[package]] -name = "serde-value" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" -dependencies = [ - "ordered-float", - "serde", -] - -[[package]] name = "serde_core" version = "1.0.226" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2321,9 +2570,18 @@ [[package]] name = "serde_spanned" -version = "0.6.9" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" +dependencies = [ + "serde_core", +] + +[[package]] +name = "serde_test" +version = "1.0.177" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f901ee573cab6b3060453d2d5f0bae4e6d628c23c0a962ff9b5f1d7c8d4f1ed" dependencies = [ "serde", ] @@ -2360,6 +2618,20 @@ ] [[package]] +name = "smol_str" +version = "0.3.4" +dependencies = [ + "arbitrary", + "borsh", + "criterion", + "proptest", + "rand", + "serde", + "serde_core", + "serde_json", +] + +[[package]] name = "span" version = "0.0.0" dependencies = [ @@ -2369,7 +2641,7 @@ "salsa", "stdx", "syntax", - "text-size", + "text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "vfs", ] @@ -2389,13 +2661,19 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] name = "stdx" version = "0.0.0" dependencies = [ "backtrace", "crossbeam-channel", "crossbeam-utils", - "itertools", + "itertools 0.14.0", "jod-thread", "libc", "miow", @@ -2431,14 +2709,14 @@ dependencies = [ "either", "expect-test", - "itertools", + "itertools 0.14.0", "parser", "rayon", "rowan", "rustc-hash 2.1.1", "rustc-literal-escaper 0.0.4", "rustc_apfloat", - "smol_str", + "smol_str 0.3.2", "stdx", "test-utils", "tracing", @@ -2466,6 +2744,19 @@ checksum = "83176759e9416cf81ee66cb6508dbfe9c96f20b8b56265a39917551c23c70964" [[package]] +name = "tempfile" +version = "3.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" +dependencies = [ + "fastrand", + "getrandom 0.3.4", + "once_cell", + "rustix", + "windows-sys 0.61.0", +] + +[[package]] name = "tenthash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2497,7 +2788,16 @@ "profile", "rustc-hash 2.1.1", "stdx", - "text-size", + "text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "text-size" +version = "1.1.1" +dependencies = [ + "serde", + "serde_test", + "static_assertions", ] [[package]] @@ -2642,45 +2942,53 @@ ] [[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] name = "toml" -version = "0.8.23" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit", -] - -[[package]] -name = "toml_datetime" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_edit" -version = "0.22.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" dependencies = [ "indexmap", - "serde", + "serde_core", "serde_spanned", "toml_datetime", - "toml_write", + "toml_parser", + "toml_writer", "winnow", ] [[package]] -name = "toml_write" -version = "0.1.2" +name = "toml_datetime" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_parser" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_writer" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2" [[package]] name = "toolchain" @@ -2772,7 +3080,7 @@ "intern", "ra-ap-rustc_lexer", "stdx", - "text-size", + "text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2782,10 +3090,14 @@ checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a" [[package]] -name = "typeid" -version = "1.0.3" +name = "unarray" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + +[[package]] +name = "ungrammar" +version = "1.16.1" [[package]] name = "ungrammar" @@ -2794,6 +3106,14 @@ checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f" [[package]] +name = "ungrammar2json" +version = "1.0.0" +dependencies = [ + "ungrammar 1.16.1", + "write-json", +] + +[[package]] name = "unicase" version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2871,6 +3191,15 @@ ] [[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + +[[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2887,6 +3216,70 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] name = "winapi-util" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -3164,9 +3557,12 @@ version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" -dependencies = [ - "memchr", -] + +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "write-json" @@ -3218,12 +3614,12 @@ "edition", "either", "flate2", - "itertools", + "itertools 0.14.0", "proc-macro2", "quote", "stdx", "time", - "ungrammar", + "ungrammar 1.16.1 (registry+https://github.com/rust-lang/crates.io-index)", "write-json", "xflags", "xshell", @@ -3255,6 +3651,26 @@ ] [[package]] +name = "zerocopy" +version = "0.8.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] name = "zerofrom" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml index ecb2686..35f2fe4 100644 --- a/Cargo.toml +++ b/Cargo.toml
@@ -1,5 +1,5 @@ [workspace] -members = ["xtask/", "lib/*", "crates/*"] +members = ["xtask/", "lib/*", "lib/ungrammar/ungrammar2json", "crates/*"] exclude = ["crates/proc-macro-srv/proc-macro-test/imp"] resolver = "2" @@ -42,7 +42,7 @@ # lsp-server = { path = "lib/lsp-server" } -# ungrammar = { path = "../ungrammar" } +# ungrammar = { path = "lin/ungrammar" } # salsa = { path = "../salsa" } # salsa-macros = { path = "../salsa/components/salsa-macros" } @@ -106,7 +106,7 @@ anyhow = "1.0.98" arrayvec = "0.7.6" bitflags = "2.9.1" -cargo_metadata = "0.21.0" +cargo_metadata = "0.23.0" camino = "1.1.10" crossbeam-channel = "0.5.15" dissimilar = "1.0.10" @@ -134,10 +134,11 @@ rowan = "=0.15.15" # Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work # on impls without it -salsa = { version = "0.24.0", default-features = true, features = [ +salsa = { version = "0.24.0", default-features = false, features = [ "rayon", "salsa_unstable", "macros", + "inventory", ] } salsa-macros = "0.24.0" semver = "1.0.26" @@ -154,6 +155,7 @@ smol_str = "0.3.2" temp-dir = "0.1.16" text-size = "1.1.1" +toml = "0.9.8" tracing = "0.1.41" tracing-tree = "0.4.0" tracing-subscriber = { version = "0.3.20", default-features = false, features = [
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index cac7477..ffd82d5 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs
@@ -867,6 +867,10 @@ pub fn insert(&mut self, k: impl Into<String>, v: impl Into<String>) -> Option<String> { self.entries.insert(k.into(), v.into()) } + + pub fn contains_key(&self, arg: &str) -> bool { + self.entries.contains_key(arg) + } } impl From<Env> for Vec<(String, String)> {
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 0e411bc..90e0aa9 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs
@@ -273,7 +273,7 @@ fn transitive_rev_deps(&self, of: Crate) -> FxHashSet<Crate>; } -pub fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet<Crate> { +fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet<Crate> { // There is a bit of duplication here and in `CrateGraphBuilder` in the same method, but it's not terrible // and removing that is a bit difficult. let mut worklist = vec![crate_id];
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs index 3794cb1..e3bfc5b 100644 --- a/crates/hir-def/src/expr_store/lower.rs +++ b/crates/hir-def/src/expr_store/lower.rs
@@ -2409,7 +2409,11 @@ }; let start = range_part_lower(p.start()); let end = range_part_lower(p.end()); - Pat::Range { start, end } + // FIXME: Exclusive ended pattern range is stabilised + match p.op_kind() { + Some(range_type) => Pat::Range { start, end, range_type }, + None => Pat::Missing, + } } }; let ptr = AstPtr::new(&pat);
diff --git a/crates/hir-def/src/expr_store/pretty.rs b/crates/hir-def/src/expr_store/pretty.rs index 5b9da3c..4ba7093 100644 --- a/crates/hir-def/src/expr_store/pretty.rs +++ b/crates/hir-def/src/expr_store/pretty.rs
@@ -9,7 +9,7 @@ use hir_expand::{Lookup, mod_path::PathKind}; use itertools::Itertools; use span::Edition; -use syntax::ast::HasName; +use syntax::ast::{HasName, RangeOp}; use crate::{ AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId, @@ -510,7 +510,22 @@ } fn print_expr(&mut self, expr: ExprId) { + self.print_expr_in(None, expr); + } + + fn print_expr_in(&mut self, prec: Option<ast::prec::ExprPrecedence>, expr: ExprId) { let expr = &self.store[expr]; + let needs_parens = match (prec, expr.precedence()) { + (Some(ast::prec::ExprPrecedence::LOr), ast::prec::ExprPrecedence::LOr) => false, + (Some(ast::prec::ExprPrecedence::LAnd), ast::prec::ExprPrecedence::LAnd) => false, + (Some(parent), prec) => prec.needs_parentheses_in(parent), + (None, _) => false, + }; + let prec = Some(expr.precedence()); + + if needs_parens { + w!(self, "("); + } match expr { Expr::Missing => w!(self, "�"), @@ -544,7 +559,7 @@ w!(self, "let "); self.print_pat(*pat); w!(self, " = "); - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } Expr::Loop { body, label } => { if let Some(lbl) = label { @@ -554,7 +569,7 @@ self.print_expr(*body); } Expr::Call { callee, args } => { - self.print_expr(*callee); + self.print_expr_in(prec, *callee); w!(self, "("); if !args.is_empty() { self.indented(|p| { @@ -567,7 +582,7 @@ w!(self, ")"); } Expr::MethodCall { receiver, method_name, args, generic_args } => { - self.print_expr(*receiver); + self.print_expr_in(prec, *receiver); w!(self, ".{}", method_name.display(self.db, self.edition)); if let Some(args) = generic_args { w!(self, "::<"); @@ -616,26 +631,26 @@ } if let Some(expr) = expr { self.whitespace(); - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } } Expr::Return { expr } => { w!(self, "return"); if let Some(expr) = expr { self.whitespace(); - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } } Expr::Become { expr } => { w!(self, "become"); self.whitespace(); - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } Expr::Yield { expr } => { w!(self, "yield"); if let Some(expr) = expr { self.whitespace(); - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } } Expr::Yeet { expr } => { @@ -644,7 +659,7 @@ w!(self, "yeet"); if let Some(expr) = expr { self.whitespace(); - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } } Expr::RecordLit { path, fields, spread } => { @@ -670,15 +685,15 @@ w!(self, "}}"); } Expr::Field { expr, name } => { - self.print_expr(*expr); + self.print_expr_in(prec, *expr); w!(self, ".{}", name.display(self.db, self.edition)); } Expr::Await { expr } => { - self.print_expr(*expr); + self.print_expr_in(prec, *expr); w!(self, ".await"); } Expr::Cast { expr, type_ref } => { - self.print_expr(*expr); + self.print_expr_in(prec, *expr); w!(self, " as "); self.print_type_ref(*type_ref); } @@ -690,11 +705,11 @@ if mutability.is_mut() { w!(self, "mut "); } - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } Expr::Box { expr } => { w!(self, "box "); - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } Expr::UnaryOp { expr, op } => { let op = match op { @@ -703,43 +718,32 @@ ast::UnaryOp::Neg => "-", }; w!(self, "{}", op); - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } Expr::BinaryOp { lhs, rhs, op } => { - let (bra, ket) = match op { - None | Some(ast::BinaryOp::Assignment { .. }) => ("", ""), - _ => ("(", ")"), - }; - w!(self, "{}", bra); - self.print_expr(*lhs); - w!(self, "{} ", ket); + self.print_expr_in(prec, *lhs); + self.whitespace(); match op { Some(op) => w!(self, "{}", op), None => w!(self, "�"), // :) } - w!(self, " {}", bra); - self.print_expr(*rhs); - w!(self, "{}", ket); + self.whitespace(); + self.print_expr_in(prec, *rhs); } Expr::Range { lhs, rhs, range_type } => { if let Some(lhs) = lhs { - w!(self, "("); - self.print_expr(*lhs); - w!(self, ") "); + self.print_expr_in(prec, *lhs); } - let range = match range_type { - ast::RangeOp::Exclusive => "..", - ast::RangeOp::Inclusive => "..=", + match range_type { + RangeOp::Exclusive => w!(self, ".."), + RangeOp::Inclusive => w!(self, "..="), }; - w!(self, "{}", range); if let Some(rhs) = rhs { - w!(self, "("); - self.print_expr(*rhs); - w!(self, ") "); + self.print_expr_in(prec, *rhs); } } Expr::Index { base, index } => { - self.print_expr(*base); + self.print_expr_in(prec, *base); w!(self, "["); self.print_expr(*index); w!(self, "]"); @@ -826,9 +830,13 @@ &Expr::Assignment { target, value } => { self.print_pat(target); w!(self, " = "); - self.print_expr(value); + self.print_expr_in(prec, value); } } + + if needs_parens { + w!(self, ")"); + } } fn print_block( @@ -857,6 +865,7 @@ } fn print_pat(&mut self, pat: PatId) { + let prec = Some(ast::prec::ExprPrecedence::Shift); let pat = &self.store[pat]; match pat { @@ -928,13 +937,16 @@ }); w!(self, "}}"); } - Pat::Range { start, end } => { + Pat::Range { start, end, range_type } => { if let Some(start) = start { - self.print_expr(*start); + self.print_expr_in(prec, *start); } - w!(self, "..="); + match range_type { + RangeOp::Inclusive => w!(self, "..="), + RangeOp::Exclusive => w!(self, ".."), + } if let Some(end) = end { - self.print_expr(*end); + self.print_expr_in(prec, *end); } } Pat::Slice { prefix, slice, suffix } => { @@ -954,7 +966,7 @@ w!(self, "]"); } Pat::Path(path) => self.print_path(path), - Pat::Lit(expr) => self.print_expr(*expr), + Pat::Lit(expr) => self.print_expr_in(prec, *expr), Pat::Bind { id, subpat } => { self.print_binding(*id); if let Some(pat) = subpat { @@ -996,7 +1008,7 @@ self.print_expr(*c); } Pat::Expr(expr) => { - self.print_expr(*expr); + self.print_expr_in(prec, *expr); } } } @@ -1181,7 +1193,9 @@ pub(crate) fn print_generic_arg(&mut self, arg: &GenericArg) { match arg { GenericArg::Type(ty) => self.print_type_ref(*ty), - GenericArg::Const(ConstRef { expr }) => self.print_expr(*expr), + GenericArg::Const(ConstRef { expr }) => { + self.print_expr_in(Some(ast::prec::ExprPrecedence::Unambiguous), *expr) + } GenericArg::Lifetime(lt) => self.print_lifetime_ref(*lt), } }
diff --git a/crates/hir-def/src/expr_store/tests/body.rs b/crates/hir-def/src/expr_store/tests/body.rs index c31428b..4a77556 100644 --- a/crates/hir-def/src/expr_store/tests/body.rs +++ b/crates/hir-def/src/expr_store/tests/body.rs
@@ -159,7 +159,7 @@ expect![[r#" fn main() { match builtin#lang(into_iter)( - (0) ..(10) , + 0..10, ) { mut <ra@gennew>11 => loop { match builtin#lang(next)( @@ -580,7 +580,7 @@ let MatchArm { pat, .. } = mtch_arms[1]; match body[pat] { - Pat::Range { start, end } => { + Pat::Range { start, end, range_type: _ } => { let hir_start = &body[start.unwrap()]; let hir_end = &body[end.unwrap()]; @@ -590,3 +590,30 @@ _ => {} } } + +#[test] +fn print_hir_precedences() { + let (db, body, def) = lower( + r#" +fn main() { + _ = &(1 - (2 - 3) + 4 * 5 * (6 + 7)); + _ = 1 + 2 < 3 && true && 4 < 5 && (a || b || c) || d && e; + if let _ = 2 && true && let _ = 3 {} + break a && b || (return) || (return 2); + let r = &2; + let _ = &mut (*r as i32) +} +"#, + ); + + expect![[r#" + fn main() { + _ = &((1 - (2 - 3)) + (4 * 5) * (6 + 7)); + _ = 1 + 2 < 3 && true && 4 < 5 && (a || b || c) || d && e; + if let _ = 2 && true && let _ = 3 {} + break a && b || (return) || (return 2); + let r = &2; + let _ = &mut (*r as i32); + }"#]] + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) +}
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index e70cd2c..66eade2 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs
@@ -322,6 +322,72 @@ InlineAsm(InlineAsm), } +impl Expr { + pub fn precedence(&self) -> ast::prec::ExprPrecedence { + use ast::prec::ExprPrecedence; + + match self { + Expr::Array(_) + | Expr::InlineAsm(_) + | Expr::Block { .. } + | Expr::Unsafe { .. } + | Expr::Const(_) + | Expr::Async { .. } + | Expr::If { .. } + | Expr::Literal(_) + | Expr::Loop { .. } + | Expr::Match { .. } + | Expr::Missing + | Expr::Path(_) + | Expr::RecordLit { .. } + | Expr::Tuple { .. } + | Expr::OffsetOf(_) + | Expr::Underscore => ExprPrecedence::Unambiguous, + + Expr::Await { .. } + | Expr::Call { .. } + | Expr::Field { .. } + | Expr::Index { .. } + | Expr::MethodCall { .. } => ExprPrecedence::Postfix, + + Expr::Box { .. } | Expr::Let { .. } | Expr::UnaryOp { .. } | Expr::Ref { .. } => { + ExprPrecedence::Prefix + } + + Expr::Cast { .. } => ExprPrecedence::Cast, + + Expr::BinaryOp { op, .. } => match op { + None => ExprPrecedence::Unambiguous, + Some(BinaryOp::LogicOp(LogicOp::Or)) => ExprPrecedence::LOr, + Some(BinaryOp::LogicOp(LogicOp::And)) => ExprPrecedence::LAnd, + Some(BinaryOp::CmpOp(_)) => ExprPrecedence::Compare, + Some(BinaryOp::Assignment { .. }) => ExprPrecedence::Assign, + Some(BinaryOp::ArithOp(arith_op)) => match arith_op { + ArithOp::Add | ArithOp::Sub => ExprPrecedence::Sum, + ArithOp::Mul | ArithOp::Div | ArithOp::Rem => ExprPrecedence::Product, + ArithOp::Shl | ArithOp::Shr => ExprPrecedence::Shift, + ArithOp::BitXor => ExprPrecedence::BitXor, + ArithOp::BitOr => ExprPrecedence::BitOr, + ArithOp::BitAnd => ExprPrecedence::BitAnd, + }, + }, + + Expr::Assignment { .. } => ExprPrecedence::Assign, + + Expr::Become { .. } + | Expr::Break { .. } + | Expr::Closure { .. } + | Expr::Return { .. } + | Expr::Yeet { .. } + | Expr::Yield { .. } => ExprPrecedence::Jump, + + Expr::Continue { .. } => ExprPrecedence::Unambiguous, + + Expr::Range { .. } => ExprPrecedence::Range, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct OffsetOf { pub container: TypeRefId, @@ -595,6 +661,7 @@ Range { start: Option<ExprId>, end: Option<ExprId>, + range_type: RangeOp, }, Slice { prefix: Box<[PatId]>,
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index abcf0a3..b5afbf3 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs
@@ -708,6 +708,20 @@ self.item_scope_().0 } + #[inline] + pub fn top_level_def_map(&self) -> &'db DefMap { + self.module_scope.def_map + } + + #[inline] + pub fn is_visible(&self, db: &dyn DefDatabase, visibility: Visibility) -> bool { + visibility.is_visible_from_def_map( + db, + self.module_scope.def_map, + self.module_scope.module_id, + ) + } + pub fn generic_def(&self) -> Option<GenericDefId> { self.scopes().find_map(|scope| match scope { Scope::GenericParams { def, .. } => Some(*def),
diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs index 392b0b0..d21108f 100644 --- a/crates/hir-ty/src/autoderef.rs +++ b/crates/hir-ty/src/autoderef.rs
@@ -13,11 +13,11 @@ use crate::{ TraitEnvironment, db::HirDatabase, - infer::unify::InferenceTable, + infer::InferenceContext, next_solver::{ - Canonical, TraitRef, Ty, TyKind, + Canonical, DbInterner, ParamEnv, TraitRef, Ty, TyKind, TypingMode, infer::{ - InferOk, + DbInternerInferExt, InferCtxt, traits::{Obligation, ObligationCause, PredicateObligations}, }, obligation_ctxt::ObligationCtxt, @@ -38,14 +38,15 @@ env: Arc<TraitEnvironment<'db>>, ty: Canonical<'db, Ty<'db>>, ) -> impl Iterator<Item = Ty<'db>> + use<'db> { - let mut table = InferenceTable::new(db, env, None); - let ty = table.instantiate_canonical(ty); - let mut autoderef = Autoderef::new_no_tracking(&mut table, ty); + let interner = DbInterner::new_with(db, Some(env.krate), env.block); + let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis); + let (ty, _) = infcx.instantiate_canonical(&ty); + let autoderef = Autoderef::new(&infcx, &env, ty); let mut v = Vec::new(); - while let Some((ty, _steps)) = autoderef.next() { + for (ty, _steps) in autoderef { // `ty` may contain unresolved inference variables. Since there's no chance they would be // resolved, just replace with fallback type. - let resolved = autoderef.table.resolve_completely(ty); + let resolved = infcx.resolve_vars_if_possible(ty).replace_infer_with_error(interner); // If the deref chain contains a cycle (e.g. `A` derefs to `B` and `B` derefs to `A`), we // would revisit some already visited types. Stop here to avoid duplication. @@ -105,13 +106,48 @@ trait_target: TypeAliasId, } +// We use a trait here and a generic implementation unfortunately, because sometimes (specifically +// in place_op.rs), you need to have mutable access to the `InferenceContext` while the `Autoderef` +// borrows it. +pub(crate) trait AutoderefCtx<'db> { + fn infcx(&self) -> &InferCtxt<'db>; + fn env(&self) -> &TraitEnvironment<'db>; +} + +pub(crate) struct DefaultAutoderefCtx<'a, 'db> { + infcx: &'a InferCtxt<'db>, + env: &'a TraitEnvironment<'db>, +} +impl<'db> AutoderefCtx<'db> for DefaultAutoderefCtx<'_, 'db> { + #[inline] + fn infcx(&self) -> &InferCtxt<'db> { + self.infcx + } + #[inline] + fn env(&self) -> &TraitEnvironment<'db> { + self.env + } +} + +pub(crate) struct InferenceContextAutoderefCtx<'a, 'b, 'db>(&'a mut InferenceContext<'b, 'db>); +impl<'db> AutoderefCtx<'db> for InferenceContextAutoderefCtx<'_, '_, 'db> { + #[inline] + fn infcx(&self) -> &InferCtxt<'db> { + &self.0.table.infer_ctxt + } + #[inline] + fn env(&self) -> &TraitEnvironment<'db> { + &self.0.table.trait_env + } +} + /// Recursively dereference a type, considering both built-in /// dereferences (`*`) and the `Deref` trait. /// Although called `Autoderef` it can be configured to use the /// `Receiver` trait instead of the `Deref` trait. -pub(crate) struct Autoderef<'a, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> { +pub(crate) struct GeneralAutoderef<'db, Ctx, Steps = Vec<(Ty<'db>, AutoderefKind)>> { // Meta infos: - pub(crate) table: &'a mut InferenceTable<'db>, + ctx: Ctx, traits: Option<AutoderefTraits>, // Current state: @@ -122,7 +158,16 @@ use_receiver_trait: bool, } -impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Iterator for Autoderef<'a, 'db, Steps> { +pub(crate) type Autoderef<'a, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> = + GeneralAutoderef<'db, DefaultAutoderefCtx<'a, 'db>, Steps>; +pub(crate) type InferenceContextAutoderef<'a, 'b, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> = + GeneralAutoderef<'db, InferenceContextAutoderefCtx<'a, 'b, 'db>, Steps>; + +impl<'db, Ctx, Steps> Iterator for GeneralAutoderef<'db, Ctx, Steps> +where + Ctx: AutoderefCtx<'db>, + Steps: TrackAutoderefSteps<'db>, +{ type Item = (Ty<'db>, usize); fn next(&mut self) -> Option<Self::Item> { @@ -148,26 +193,26 @@ // be better to skip this clause and use the Overloaded case only, since &T // and &mut T implement Receiver. But built-in derefs apply equally to Receiver // and Deref, and this has benefits for const and the emitted MIR. - let (kind, new_ty) = if let Some(ty) = - self.state.cur_ty.builtin_deref(self.table.db, self.include_raw_pointers) - { - debug_assert_eq!(ty, self.table.infer_ctxt.resolve_vars_if_possible(ty)); - // NOTE: we may still need to normalize the built-in deref in case - // we have some type like `&<Ty as Trait>::Assoc`, since users of - // autoderef expect this type to have been structurally normalized. - if let TyKind::Alias(..) = ty.kind() { - let (normalized_ty, obligations) = structurally_normalize_ty(self.table, ty)?; - self.state.obligations.extend(obligations); - (AutoderefKind::Builtin, normalized_ty) + let (kind, new_ty) = + if let Some(ty) = self.state.cur_ty.builtin_deref(self.include_raw_pointers) { + debug_assert_eq!(ty, self.infcx().resolve_vars_if_possible(ty)); + // NOTE: we may still need to normalize the built-in deref in case + // we have some type like `&<Ty as Trait>::Assoc`, since users of + // autoderef expect this type to have been structurally normalized. + if let TyKind::Alias(..) = ty.kind() { + let (normalized_ty, obligations) = + structurally_normalize_ty(self.infcx(), self.env().env, ty)?; + self.state.obligations.extend(obligations); + (AutoderefKind::Builtin, normalized_ty) + } else { + (AutoderefKind::Builtin, ty) + } + } else if let Some(ty) = self.overloaded_deref_ty(self.state.cur_ty) { + // The overloaded deref check already normalizes the pointee type. + (AutoderefKind::Overloaded, ty) } else { - (AutoderefKind::Builtin, ty) - } - } else if let Some(ty) = self.overloaded_deref_ty(self.state.cur_ty) { - // The overloaded deref check already normalizes the pointee type. - (AutoderefKind::Overloaded, ty) - } else { - return None; - }; + return None; + }; self.state.steps.push(self.state.cur_ty, kind); debug!( @@ -183,34 +228,84 @@ } impl<'a, 'db> Autoderef<'a, 'db> { - pub(crate) fn new(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self { - Self::new_impl(table, base_ty) + #[inline] + pub(crate) fn new_with_tracking( + infcx: &'a InferCtxt<'db>, + env: &'a TraitEnvironment<'db>, + base_ty: Ty<'db>, + ) -> Self { + Self::new_impl(DefaultAutoderefCtx { infcx, env }, base_ty) + } +} + +impl<'a, 'b, 'db> InferenceContextAutoderef<'a, 'b, 'db> { + #[inline] + pub(crate) fn new_from_inference_context( + ctx: &'a mut InferenceContext<'b, 'db>, + base_ty: Ty<'db>, + ) -> Self { + Self::new_impl(InferenceContextAutoderefCtx(ctx), base_ty) + } + + #[inline] + pub(crate) fn ctx(&mut self) -> &mut InferenceContext<'b, 'db> { + self.ctx.0 } } impl<'a, 'db> Autoderef<'a, 'db, usize> { - pub(crate) fn new_no_tracking(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self { - Self::new_impl(table, base_ty) + #[inline] + pub(crate) fn new( + infcx: &'a InferCtxt<'db>, + env: &'a TraitEnvironment<'db>, + base_ty: Ty<'db>, + ) -> Self { + Self::new_impl(DefaultAutoderefCtx { infcx, env }, base_ty) } } -impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> { - fn new_impl(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self { - Autoderef { +impl<'db, Ctx, Steps> GeneralAutoderef<'db, Ctx, Steps> +where + Ctx: AutoderefCtx<'db>, + Steps: TrackAutoderefSteps<'db>, +{ + #[inline] + fn new_impl(ctx: Ctx, base_ty: Ty<'db>) -> Self { + GeneralAutoderef { state: AutoderefSnapshot { steps: Steps::default(), - cur_ty: table.infer_ctxt.resolve_vars_if_possible(base_ty), + cur_ty: ctx.infcx().resolve_vars_if_possible(base_ty), obligations: PredicateObligations::new(), at_start: true, reached_recursion_limit: false, }, - table, + ctx, traits: None, include_raw_pointers: false, use_receiver_trait: false, } } + #[inline] + fn infcx(&self) -> &InferCtxt<'db> { + self.ctx.infcx() + } + + #[inline] + fn env(&self) -> &TraitEnvironment<'db> { + self.ctx.env() + } + + #[inline] + fn interner(&self) -> DbInterner<'db> { + self.infcx().interner + } + + #[inline] + fn db(&self) -> &'db dyn HirDatabase { + self.interner().db + } + fn autoderef_traits(&mut self) -> Option<AutoderefTraits> { match &mut self.traits { Some(it) => Some(*it), @@ -219,25 +314,23 @@ (|| { Some(AutoderefTraits { trait_: LangItem::Receiver - .resolve_trait(self.table.db, self.table.trait_env.krate)?, + .resolve_trait(self.db(), self.env().krate)?, trait_target: LangItem::ReceiverTarget - .resolve_type_alias(self.table.db, self.table.trait_env.krate)?, + .resolve_type_alias(self.db(), self.env().krate)?, }) })() .or_else(|| { Some(AutoderefTraits { - trait_: LangItem::Deref - .resolve_trait(self.table.db, self.table.trait_env.krate)?, + trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?, trait_target: LangItem::DerefTarget - .resolve_type_alias(self.table.db, self.table.trait_env.krate)?, + .resolve_type_alias(self.db(), self.env().krate)?, }) })? } else { AutoderefTraits { - trait_: LangItem::Deref - .resolve_trait(self.table.db, self.table.trait_env.krate)?, + trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?, trait_target: LangItem::DerefTarget - .resolve_type_alias(self.table.db, self.table.trait_env.krate)?, + .resolve_type_alias(self.db(), self.env().krate)?, } }; Some(*self.traits.insert(traits)) @@ -247,31 +340,32 @@ fn overloaded_deref_ty(&mut self, ty: Ty<'db>) -> Option<Ty<'db>> { debug!("overloaded_deref_ty({:?})", ty); - let interner = self.table.interner(); + let interner = self.interner(); // <ty as Deref>, or whatever the equivalent trait is that we've been asked to walk. let AutoderefTraits { trait_, trait_target } = self.autoderef_traits()?; let trait_ref = TraitRef::new(interner, trait_.into(), [ty]); let obligation = - Obligation::new(interner, ObligationCause::new(), self.table.trait_env.env, trait_ref); + Obligation::new(interner, ObligationCause::new(), self.env().env, trait_ref); // We detect whether the self type implements `Deref` before trying to // structurally normalize. We use `predicate_may_hold_opaque_types_jank` // to support not-yet-defined opaque types. It will succeed for `impl Deref` // but fail for `impl OtherTrait`. - if !self.table.infer_ctxt.predicate_may_hold_opaque_types_jank(&obligation) { + if !self.infcx().predicate_may_hold_opaque_types_jank(&obligation) { debug!("overloaded_deref_ty: cannot match obligation"); return None; } let (normalized_ty, obligations) = structurally_normalize_ty( - self.table, + self.infcx(), + self.env().env, Ty::new_projection(interner, trait_target.into(), [ty]), )?; debug!("overloaded_deref_ty({:?}) = ({:?}, {:?})", ty, normalized_ty, obligations); self.state.obligations.extend(obligations); - Some(self.table.infer_ctxt.resolve_vars_if_possible(normalized_ty)) + Some(self.infcx().resolve_vars_if_possible(normalized_ty)) } /// Returns the final type we ended up with, which may be an unresolved @@ -292,7 +386,6 @@ &self.state.steps } - #[expect(dead_code)] pub(crate) fn reached_recursion_limit(&self) -> bool { self.state.reached_recursion_limit } @@ -316,12 +409,12 @@ } fn structurally_normalize_ty<'db>( - table: &InferenceTable<'db>, + infcx: &InferCtxt<'db>, + param_env: ParamEnv<'db>, ty: Ty<'db>, ) -> Option<(Ty<'db>, PredicateObligations<'db>)> { - let mut ocx = ObligationCtxt::new(&table.infer_ctxt); - let Ok(normalized_ty) = - ocx.structurally_normalize_ty(&ObligationCause::misc(), table.trait_env.env, ty) + let mut ocx = ObligationCtxt::new(infcx); + let Ok(normalized_ty) = ocx.structurally_normalize_ty(&ObligationCause::misc(), param_env, ty) else { // We shouldn't have errors here in the old solver, except for // evaluate/fulfill mismatches, but that's not a reason for an ICE. @@ -334,17 +427,3 @@ Some((normalized_ty, ocx.into_pending_obligations())) } - -pub(crate) fn overloaded_deref_ty<'db>( - table: &InferenceTable<'db>, - ty: Ty<'db>, -) -> Option<InferOk<'db, Ty<'db>>> { - let interner = table.interner(); - - let trait_target = LangItem::DerefTarget.resolve_type_alias(table.db, table.trait_env.krate)?; - - let (normalized_ty, obligations) = - structurally_normalize_ty(table, Ty::new_projection(interner, trait_target.into(), [ty]))?; - - Some(InferOk { value: normalized_ty, obligations }) -}
diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index f1aa06d..70185bb 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs
@@ -851,6 +851,7 @@ fn loops() { check_number( r#" + //- minicore: add, builtin_impls const GOAL: u8 = { let mut x = 0; loop { @@ -871,6 +872,7 @@ ); check_number( r#" + //- minicore: add, builtin_impls const GOAL: u8 = { let mut x = 0; loop { @@ -885,6 +887,7 @@ ); check_number( r#" + //- minicore: add, builtin_impls const GOAL: u8 = { 'a: loop { let x = 'b: loop { @@ -907,7 +910,7 @@ ); check_number( r#" - //- minicore: add + //- minicore: add, builtin_impls const GOAL: u8 = { let mut x = 0; 'a: loop { @@ -1277,7 +1280,7 @@ fn destructing_assignment() { check_number( r#" - //- minicore: add + //- minicore: add, builtin_impls const fn f(i: &mut u8) -> &mut u8 { *i += 1; i @@ -1469,11 +1472,11 @@ fn options() { check_number( r#" - //- minicore: option + //- minicore: option, add, builtin_impls const GOAL: u8 = { let x = Some(2); match x { - Some(y) => 2 * y, + Some(y) => 2 + y, _ => 10, } }; @@ -1482,7 +1485,7 @@ ); check_number( r#" - //- minicore: option + //- minicore: option, add, builtin_impls fn f(x: Option<Option<i32>>) -> i32 { if let Some(y) = x && let Some(z) = y { z @@ -1498,11 +1501,11 @@ ); check_number( r#" - //- minicore: option + //- minicore: option, add, builtin_impls const GOAL: u8 = { let x = None; match x { - Some(y) => 2 * y, + Some(y) => 2 + y, _ => 10, } };
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 9b58abb..98f4209 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs
@@ -3,15 +3,12 @@ use base_db::{Crate, target::TargetLoadError}; use hir_def::{ - AdtId, BlockId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, - GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId, - TypeAliasId, TypeOrConstParamId, VariantId, db::DefDatabase, hir::ExprId, - layout::TargetDataLayout, + AdtId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, + GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId, TypeAliasId, + TypeOrConstParamId, VariantId, db::DefDatabase, hir::ExprId, layout::TargetDataLayout, }; -use hir_expand::name::Name; use la_arena::ArenaMap; use salsa::plumbing::AsId; -use smallvec::SmallVec; use triomphe::Arc; use crate::{ @@ -19,8 +16,7 @@ consteval::ConstEvalError, dyn_compatibility::DynCompatibilityViolation, layout::{Layout, LayoutError}, - lower::{Diagnostics, GenericDefaults, GenericPredicates, ImplTraits}, - method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, + lower::{Diagnostics, GenericDefaults}, mir::{BorrowckResult, MirBody, MirLowerError}, next_solver::{Const, EarlyBinder, GenericArgs, PolyFnSig, TraitRef, Ty, VariancesOf}, }; @@ -190,43 +186,6 @@ def: CallableDefId, ) -> EarlyBinder<'db, PolyFnSig<'db>>; - #[salsa::invoke(crate::lower::return_type_impl_traits)] - fn return_type_impl_traits<'db>( - &'db self, - def: FunctionId, - ) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>>; - - #[salsa::invoke(crate::lower::type_alias_impl_traits)] - fn type_alias_impl_traits<'db>( - &'db self, - def: TypeAliasId, - ) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>>; - - #[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)] - fn generic_predicates_without_parent_with_diagnostics<'db>( - &'db self, - def: GenericDefId, - ) -> (GenericPredicates<'db>, Diagnostics); - - #[salsa::invoke(crate::lower::generic_predicates_without_parent_query)] - #[salsa::transparent] - fn generic_predicates_without_parent<'db>( - &'db self, - def: GenericDefId, - ) -> GenericPredicates<'db>; - - #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] - #[salsa::cycle(cycle_result = crate::lower::generic_predicates_for_param_cycle_result)] - fn generic_predicates_for_param<'db>( - &'db self, - def: GenericDefId, - param_id: TypeOrConstParamId, - assoc_name: Option<Name>, - ) -> GenericPredicates<'db>; - - #[salsa::invoke(crate::lower::generic_predicates_query)] - fn generic_predicates<'db>(&'db self, def: GenericDefId) -> GenericPredicates<'db>; - #[salsa::invoke(crate::lower::trait_environment_for_body_query)] #[salsa::transparent] fn trait_environment_for_body<'db>(&'db self, def: DefWithBodyId) @@ -249,32 +208,6 @@ #[salsa::transparent] fn generic_defaults<'db>(&'db self, def: GenericDefId) -> GenericDefaults<'db>; - #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] - fn inherent_impls_in_crate(&self, krate: Crate) -> Arc<InherentImpls>; - - #[salsa::invoke(InherentImpls::inherent_impls_in_block_query)] - fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>; - - /// Collects all crates in the dependency graph that have impls for the - /// given fingerprint. This is only used for primitive types and types - /// annotated with `rustc_has_incoherent_inherent_impls`; for other types - /// we just look at the crate where the type is defined. - #[salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)] - fn incoherent_inherent_impl_crates( - &self, - krate: Crate, - fp: TyFingerprint, - ) -> SmallVec<[Crate; 2]>; - - #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)] - fn trait_impls_in_crate(&self, krate: Crate) -> Arc<TraitImpls>; - - #[salsa::invoke(TraitImpls::trait_impls_in_block_query)] - fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>; - - #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)] - fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc<TraitImpls>]>; - // Interned IDs for solver integration #[salsa::interned] fn intern_impl_trait_id(&self, id: ImplTraitId<'_>) -> InternedOpaqueTyId;
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs index 0815e62..f6992df 100644 --- a/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -563,6 +563,10 @@ cov_mark::hit!(extern_static_incorrect_case_ignored); return; } + if self.db.attrs(static_id.into()).by_key(sym::no_mangle).exists() { + cov_mark::hit!(no_mangle_static_incorrect_case_ignored); + return; + } self.create_incorrect_case_diagnostic_for_item_name( static_id,
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 0a37966..3b37dc8 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs
@@ -52,6 +52,7 @@ db::{HirDatabase, InternedClosure, InternedCoroutine}, generics::generics, layout::Layout, + lower::GenericPredicates, mir::pad16, next_solver::{ AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, EarlyBinder, @@ -625,23 +626,20 @@ { // FIXME: We shouldn't use `param.id`, it should be removed. We should know the // `GenericDefId` from the formatted type (store it inside the `HirFormatter`). - let bounds = - f.db.generic_predicates(param.id.parent()) - .instantiate_identity() - .into_iter() - .flatten() - .filter(|wc| { - let ty = match wc.kind().skip_binder() { - ClauseKind::Trait(tr) => tr.self_ty(), - ClauseKind::TypeOutlives(t) => t.0, - _ => return false, - }; - let TyKind::Alias(AliasTyKind::Projection, a) = ty.kind() else { - return false; - }; - a == *alias - }) - .collect::<Vec<_>>(); + let bounds = GenericPredicates::query_all(f.db, param.id.parent()) + .iter_identity_copied() + .filter(|wc| { + let ty = match wc.kind().skip_binder() { + ClauseKind::Trait(tr) => tr.self_ty(), + ClauseKind::TypeOutlives(t) => t.0, + _ => return false, + }; + let TyKind::Alias(AliasTyKind::Projection, a) = ty.kind() else { + return false; + }; + a == *alias + }) + .collect::<Vec<_>>(); if !bounds.is_empty() { return f.format_bounds_with(*alias, |f| { write_bounds_like_dyn_trait_with_prefix( @@ -1122,13 +1120,8 @@ _ => unreachable!(), }; let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id); - if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id { - let datas = db - .return_type_impl_traits(func) - .expect("impl trait id without data"); - let data = (*datas) - .as_ref() - .map_bound(|rpit| &rpit.impl_traits[idx].predicates); + if let ImplTraitId::ReturnTypeImplTrait(func, _) = impl_trait_id { + let data = impl_trait_id.predicates(db); let bounds = || data.iter_instantiated_copied(f.interner, ty.args.as_slice()); let mut len = bounds().count(); @@ -1354,43 +1347,24 @@ )); } let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id); - match impl_trait_id { - ImplTraitId::ReturnTypeImplTrait(func, idx) => { - let datas = - db.return_type_impl_traits(func).expect("impl trait id without data"); - let data = - (*datas).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates); - let bounds = data - .iter_instantiated_copied(interner, alias_ty.args.as_slice()) - .collect::<Vec<_>>(); - let krate = func.krate(db); - write_bounds_like_dyn_trait_with_prefix( - f, - "impl", - Either::Left(*self), - &bounds, - SizedByDefault::Sized { anchor: krate }, - )?; + let data = impl_trait_id.predicates(db); + let bounds = data + .iter_instantiated_copied(interner, alias_ty.args.as_slice()) + .collect::<Vec<_>>(); + let krate = match impl_trait_id { + ImplTraitId::ReturnTypeImplTrait(func, _) => { + func.krate(db) // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution } - ImplTraitId::TypeAliasImplTrait(alias, idx) => { - let datas = - db.type_alias_impl_traits(alias).expect("impl trait id without data"); - let data = - (*datas).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates); - let bounds = data - .iter_instantiated_copied(interner, alias_ty.args.as_slice()) - .collect::<Vec<_>>(); - let krate = alias.krate(db); - write_bounds_like_dyn_trait_with_prefix( - f, - "impl", - Either::Left(*self), - &bounds, - SizedByDefault::Sized { anchor: krate }, - )?; - } - } + ImplTraitId::TypeAliasImplTrait(alias, _) => alias.krate(db), + }; + write_bounds_like_dyn_trait_with_prefix( + f, + "impl", + Either::Left(*self), + &bounds, + SizedByDefault::Sized { anchor: krate }, + )?; } TyKind::Closure(id, substs) => { let id = id.0; @@ -1541,11 +1515,8 @@ )? } TypeParamProvenance::ArgumentImplTrait => { - let bounds = db - .generic_predicates(param.id.parent()) - .instantiate_identity() - .into_iter() - .flatten() + let bounds = GenericPredicates::query_all(f.db, param.id.parent()) + .iter_identity_copied() .filter(|wc| match wc.kind().skip_binder() { ClauseKind::Trait(tr) => tr.self_ty() == *self, ClauseKind::Projection(proj) => proj.self_ty() == *self,
diff --git a/crates/hir-ty/src/drop.rs b/crates/hir-ty/src/drop.rs index b09d1fb..522d12d 100644 --- a/crates/hir-ty/src/drop.rs +++ b/crates/hir-ty/src/drop.rs
@@ -9,9 +9,9 @@ use crate::{ TraitEnvironment, consteval, db::HirDatabase, - method_resolution::TyFingerprint, + method_resolution::TraitImpls, next_solver::{ - Ty, TyKind, + SimplifiedType, Ty, TyKind, infer::{InferCtxt, traits::ObligationCause}, obligation_ctxt::ObligationCtxt, }, @@ -27,13 +27,13 @@ return false; }; let impls = match module.containing_block() { - Some(block) => match db.trait_impls_in_block(block) { + Some(block) => match TraitImpls::for_block(db, block) { Some(it) => it, None => return false, }, - None => db.trait_impls_in_crate(module.krate()), + None => &**TraitImpls::for_crate(db, module.krate()), }; - impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some() + !impls.for_trait_and_self_ty(drop_trait, &SimplifiedType::Adt(adt.into())).is_empty() } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
diff --git a/crates/hir-ty/src/dyn_compatibility.rs b/crates/hir-ty/src/dyn_compatibility.rs index 437141e..1bd5981 100644 --- a/crates/hir-ty/src/dyn_compatibility.rs +++ b/crates/hir-ty/src/dyn_compatibility.rs
@@ -18,7 +18,7 @@ use crate::{ ImplTraitId, db::{HirDatabase, InternedOpaqueTyId}, - lower::associated_ty_item_bounds, + lower::{GenericPredicates, associated_ty_item_bounds}, next_solver::{ Binder, Clause, Clauses, DbInterner, EarlyBinder, GenericArgs, Goal, ParamEnv, ParamTy, SolverDefId, TraitPredicate, TraitRef, Ty, TypingMode, infer::DbInternerInferExt, mk_param, @@ -136,11 +136,11 @@ }; let interner = DbInterner::new_with(db, Some(krate), None); - let predicates = db.generic_predicates(def); + let predicates = GenericPredicates::query_explicit(db, def); // FIXME: We should use `explicit_predicates_of` here, which hasn't been implemented to // rust-analyzer yet // https://github.com/rust-lang/rust/blob/ddaf12390d3ffb7d5ba74491a48f3cd528e5d777/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L490 - elaborate::elaborate(interner, predicates.iter().copied()).any(|pred| { + elaborate::elaborate(interner, predicates.iter_identity_copied()).any(|pred| { match pred.kind().skip_binder() { ClauseKind::Trait(trait_pred) => { if sized == trait_pred.def_id().0 @@ -162,8 +162,8 @@ // but we don't have good way to render such locations. // So, just return single boolean value for existence of such `Self` reference fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool { - db.generic_predicates(trait_.into()) - .iter() + GenericPredicates::query_explicit(db, trait_.into()) + .iter_identity_copied() .any(|pred| predicate_references_self(db, trait_, pred, AllowSelfProjection::No)) } @@ -199,7 +199,7 @@ fn predicate_references_self<'db>( db: &'db dyn HirDatabase, trait_: TraitId, - predicate: &Clause<'db>, + predicate: Clause<'db>, allow_self_projection: AllowSelfProjection, ) -> bool { match predicate.kind().skip_binder() { @@ -363,8 +363,8 @@ cb(MethodViolationCode::UndispatchableReceiver)?; } - let predicates = &*db.generic_predicates_without_parent(func.into()); - for pred in predicates { + let predicates = GenericPredicates::query_own(db, func.into()); + for pred in predicates.iter_identity_copied() { let pred = pred.kind().skip_binder(); if matches!(pred, ClauseKind::TypeOutlives(_)) { @@ -440,7 +440,7 @@ let unsized_receiver_ty = receiver_for_self_ty(interner, func, receiver_ty, unsized_self_ty); let param_env = { - let generic_predicates = &*db.generic_predicates(func.into()); + let generic_predicates = GenericPredicates::query_all(db, func.into()); // Self: Unsize<U> let unsize_predicate = @@ -458,7 +458,7 @@ ParamEnv { clauses: Clauses::new_from_iter( interner, - generic_predicates.iter().copied().chain([ + generic_predicates.iter_identity_copied().chain([ unsize_predicate.upcast(interner), trait_predicate.upcast(interner), meta_sized_predicate.upcast(interner),
diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs index 26e03aa..5f02614 100644 --- a/crates/hir-ty/src/generics.rs +++ b/crates/hir-ty/src/generics.rs
@@ -60,11 +60,6 @@ self.params.where_predicates().iter() } - pub(crate) fn has_no_predicates(&self) -> bool { - self.params.has_no_predicates() - && self.parent_generics.as_ref().is_none_or(|g| g.params.has_no_predicates()) - } - pub(crate) fn is_empty(&self) -> bool { self.params.is_empty() && self.parent_generics.as_ref().is_none_or(|g| g.params.is_empty()) }
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 016edb2..02b8ab8 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs
@@ -21,9 +21,11 @@ mod expr; mod fallback; mod mutability; +mod op; mod opaques; mod pat; mod path; +mod place_op; pub(crate) mod unify; use std::{cell::OnceCell, convert::identity, iter, ops::Index}; @@ -45,12 +47,14 @@ use indexmap::IndexSet; use intern::sym; use la_arena::ArenaMap; +use macros::{TypeFoldable, TypeVisitable}; use rustc_ast_ir::Mutability; use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::{ AliasTyKind, TypeFoldable, inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _}, }; +use span::Edition; use stdx::never; use triomphe::Arc; @@ -65,10 +69,13 @@ lower::{ ImplTraitIdx, ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic, }, + method_resolution::{CandidateId, MethodResolutionUnstableFeatures}, mir::MirSpan, next_solver::{ AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, Ty, TyKind, - Tys, abi::Safety, infer::traits::ObligationCause, + Tys, + abi::Safety, + infer::{InferCtxt, traits::ObligationCause}, }, traits::FnTrait, utils::TargetFeatureIsSafeInTarget, @@ -330,16 +337,21 @@ /// At some point, of course, `Box` should move out of the compiler, in which /// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> -> /// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`. -#[derive(Clone, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)] pub struct Adjustment<'db> { - pub kind: Adjust<'db>, + #[type_visitable(ignore)] + #[type_foldable(identity)] + pub kind: Adjust, pub target: Ty<'db>, } impl<'db> Adjustment<'db> { pub fn borrow(interner: DbInterner<'db>, m: Mutability, ty: Ty<'db>, lt: Region<'db>) -> Self { let ty = Ty::new_ref(interner, lt, ty, m); - Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(lt, m)), target: ty } + Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::new(m, AllowTwoPhase::No))), + target: ty, + } } } @@ -357,20 +369,20 @@ /// capable mutable borrows. /// See #49434 for tracking. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub(crate) enum AllowTwoPhase { +pub enum AllowTwoPhase { // FIXME: We should use this when appropriate. Yes, No, } #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum Adjust<'db> { +pub enum Adjust { /// Go from ! to any type. NeverToAny, /// Dereference once, producing a place. Deref(Option<OverloadedDeref>), /// Take the address and produce either a `&` or `*` pointer. - Borrow(AutoBorrow<'db>), + Borrow(AutoBorrow), Pointer(PointerCast), } @@ -381,18 +393,47 @@ #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct OverloadedDeref(pub Option<Mutability>); -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum AutoBorrow<'db> { +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub enum AutoBorrowMutability { + Mut { allow_two_phase_borrow: AllowTwoPhase }, + Not, +} + +impl AutoBorrowMutability { + /// Creates an `AutoBorrowMutability` from a mutability and allowance of two phase borrows. + /// + /// Note that when `mutbl.is_not()`, `allow_two_phase_borrow` is ignored + pub fn new(mutbl: Mutability, allow_two_phase_borrow: AllowTwoPhase) -> Self { + match mutbl { + Mutability::Not => Self::Not, + Mutability::Mut => Self::Mut { allow_two_phase_borrow }, + } + } +} + +impl From<AutoBorrowMutability> for Mutability { + fn from(m: AutoBorrowMutability) -> Self { + match m { + AutoBorrowMutability::Mut { .. } => Mutability::Mut, + AutoBorrowMutability::Not => Mutability::Not, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum AutoBorrow { /// Converts from T to &T. - Ref(Region<'db>, Mutability), + Ref(AutoBorrowMutability), /// Converts from T to *T. RawPtr(Mutability), } -impl<'db> AutoBorrow<'db> { - fn mutability(&self) -> Mutability { - let (AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) = self; - *m +impl AutoBorrow { + fn mutability(self) -> Mutability { + match self { + AutoBorrow::Ref(mutbl) => mutbl.into(), + AutoBorrow::RawPtr(mutbl) => mutbl, + } } } @@ -442,7 +483,7 @@ /// For each struct literal or pattern, records the variant it resolves to. variant_resolutions: FxHashMap<ExprOrPatId, VariantId>, /// For each associated item record what it resolves to - assoc_resolutions: FxHashMap<ExprOrPatId, (AssocItemId, GenericArgs<'db>)>, + assoc_resolutions: FxHashMap<ExprOrPatId, (CandidateId, GenericArgs<'db>)>, /// Whenever a tuple field expression access a tuple field, we allocate a tuple id in /// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of /// that which allows us to resolve a [`TupleFieldId`]s type. @@ -457,7 +498,7 @@ pub(crate) type_of_pat: ArenaMap<PatId, Ty<'db>>, pub(crate) type_of_binding: ArenaMap<BindingId, Ty<'db>>, pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, Ty<'db>>, - type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>, + pub(crate) type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>, /// Whether there are any type-mismatching errors in the result. // FIXME: This isn't as useful as initially thought due to us falling back placeholders to // `TyKind::Error`. @@ -535,16 +576,16 @@ pub fn assoc_resolutions_for_expr( &self, id: ExprId, - ) -> Option<(AssocItemId, GenericArgs<'db>)> { + ) -> Option<(CandidateId, GenericArgs<'db>)> { self.assoc_resolutions.get(&id.into()).copied() } - pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(AssocItemId, GenericArgs<'db>)> { + pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(CandidateId, GenericArgs<'db>)> { self.assoc_resolutions.get(&id.into()).copied() } pub fn assoc_resolutions_for_expr_or_pat( &self, id: ExprOrPatId, - ) -> Option<(AssocItemId, GenericArgs<'db>)> { + ) -> Option<(CandidateId, GenericArgs<'db>)> { match id { ExprOrPatId::ExprId(id) => self.assoc_resolutions_for_expr(id), ExprOrPatId::PatId(id) => self.assoc_resolutions_for_pat(id), @@ -718,7 +759,6 @@ re_erased: Region<'db>, empty_args: GenericArgs<'db>, - empty_tys: Tys<'db>, } impl<'db> InternedStandardTypes<'db> { @@ -754,7 +794,6 @@ re_erased: Region::new_erased(interner), empty_args: GenericArgs::new_from_iter(interner, []), - empty_tys: Tys::new_from_iter(interner, []), } } } @@ -769,8 +808,10 @@ /// and resolve the path via its methods. This will ensure proper error reporting. pub(crate) resolver: Resolver<'db>, target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, + pub(crate) unstable_features: MethodResolutionUnstableFeatures, + pub(crate) edition: Edition, pub(crate) generic_def: GenericDefId, - table: unify::InferenceTable<'db>, + pub(crate) table: unify::InferenceTable<'db>, /// The traits in scope, disregarding block modules. This is used for caching purposes. traits_in_scope: FxHashSet<TraitId>, pub(crate) result: InferenceResult<'db>, @@ -873,6 +914,10 @@ return_ty: types.error, // set in collect_* calls types, target_features: OnceCell::new(), + unstable_features: MethodResolutionUnstableFeatures::from_def_map( + resolver.top_level_def_map(), + ), + edition: resolver.krate().data(db).edition, table, tuple_field_accesses_rev: Default::default(), resume_yield_tys: None, @@ -906,18 +951,15 @@ self.resolver.krate() } - fn target_features<'a>( - db: &dyn HirDatabase, - target_features: &'a OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, - owner: DefWithBodyId, - krate: Crate, - ) -> (&'a TargetFeatures, TargetFeatureIsSafeInTarget) { - let (target_features, target_feature_is_safe) = target_features.get_or_init(|| { - let target_features = match owner { - DefWithBodyId::FunctionId(id) => TargetFeatures::from_attrs(&db.attrs(id.into())), + fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) { + let (target_features, target_feature_is_safe) = self.target_features.get_or_init(|| { + let target_features = match self.owner { + DefWithBodyId::FunctionId(id) => { + TargetFeatures::from_attrs(&self.db.attrs(id.into())) + } _ => TargetFeatures::default(), }; - let target_feature_is_safe = match &krate.workspace_data(db).target { + let target_feature_is_safe = match &self.krate().workspace_data(self.db).target { Ok(target) => crate::utils::target_feature_is_safe_in_target(target), Err(_) => TargetFeatureIsSafeInTarget::No, }; @@ -927,7 +969,7 @@ } #[inline] - pub(crate) fn set_tainted_by_errors(&mut self) { + fn set_tainted_by_errors(&mut self) { self.result.has_errors = true; } @@ -1162,6 +1204,11 @@ self.table.interner() } + #[inline] + pub(crate) fn infcx(&self) -> &InferCtxt<'db> { + &self.table.infer_ctxt + } + fn infer_body(&mut self) { match self.return_coercion { Some(_) => self.infer_return(self.body.body_expr), @@ -1179,7 +1226,7 @@ self.result.type_of_expr.insert(expr, ty); } - fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) { + pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) { if adjustments.is_empty() { return; } @@ -1212,7 +1259,12 @@ self.result.pat_adjustments.entry(pat).or_default().extend(adjustments); } - fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: GenericArgs<'db>) { + pub(crate) fn write_method_resolution( + &mut self, + expr: ExprId, + func: FunctionId, + subst: GenericArgs<'db>, + ) { self.result.method_resolutions.insert(expr, (func, subst)); } @@ -1223,7 +1275,7 @@ fn write_assoc_resolution( &mut self, id: ExprOrPatId, - item: AssocItemId, + item: CandidateId, subs: GenericArgs<'db>, ) { self.result.assoc_resolutions.insert(id, (item, subs)); @@ -1237,7 +1289,7 @@ self.result.type_of_binding.insert(id, ty); } - fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) { + pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) { self.diagnostics.push(diagnostic); } @@ -1284,7 +1336,7 @@ self.process_user_written_ty(ty) } - fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> { + pub(crate) fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> { self.make_ty( type_ref, self.body, @@ -1293,7 +1345,7 @@ ) } - fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty<'db>) -> Const<'db> { + pub(crate) fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty<'db>) -> Const<'db> { let const_ = self.with_ty_lowering( self.body, InferenceTyDiagnosticSource::Body, @@ -1303,7 +1355,7 @@ self.insert_type_vars(const_) } - fn make_path_as_body_const(&mut self, path: &Path, ty: Ty<'db>) -> Const<'db> { + pub(crate) fn make_path_as_body_const(&mut self, path: &Path, ty: Ty<'db>) -> Const<'db> { let const_ = self.with_ty_lowering( self.body, InferenceTyDiagnosticSource::Body, @@ -1317,7 +1369,7 @@ self.types.error } - fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> { + pub(crate) fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> { let lt = self.with_ty_lowering( self.body, InferenceTyDiagnosticSource::Body, @@ -1399,19 +1451,13 @@ } /// Whenever you lower a user-written type, you should call this. - fn process_user_written_ty<T>(&mut self, ty: T) -> T - where - T: TypeFoldable<DbInterner<'db>>, - { + fn process_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { self.table.process_user_written_ty(ty) } /// The difference of this method from `process_user_written_ty()` is that this method doesn't register a well-formed obligation, /// while `process_user_written_ty()` should (but doesn't currently). - fn process_remote_user_written_ty<T>(&mut self, ty: T) -> T - where - T: TypeFoldable<DbInterner<'db>>, - { + fn process_remote_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { self.table.process_remote_user_written_ty(ty) } @@ -1427,18 +1473,72 @@ self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[]) } - fn demand_eqtype(&mut self, expected: Ty<'db>, actual: Ty<'db>) { + fn demand_eqtype( + &mut self, + id: ExprOrPatId, + expected: Ty<'db>, + actual: Ty<'db>, + ) -> Result<(), ()> { + let result = self.demand_eqtype_fixme_no_diag(expected, actual); + if result.is_err() { + self.result.type_mismatches.insert(id, TypeMismatch { expected, actual }); + } + result + } + + fn demand_eqtype_fixme_no_diag( + &mut self, + expected: Ty<'db>, + actual: Ty<'db>, + ) -> Result<(), ()> { let result = self .table - .infer_ctxt - .at(&ObligationCause::new(), self.table.trait_env.env) + .at(&ObligationCause::new()) .eq(expected, actual) .map(|infer_ok| self.table.register_infer_ok(infer_ok)); + result.map_err(drop) + } + + fn demand_suptype(&mut self, expected: Ty<'db>, actual: Ty<'db>) { + let result = self + .table + .at(&ObligationCause::new()) + .sup(expected, actual) + .map(|infer_ok| self.table.register_infer_ok(infer_ok)); if let Err(_err) = result { // FIXME: Emit diagnostic. } } + fn demand_coerce( + &mut self, + expr: ExprId, + checked_ty: Ty<'db>, + expected: Ty<'db>, + allow_two_phase: AllowTwoPhase, + expr_is_read: ExprIsRead, + ) -> Ty<'db> { + let result = self.coerce(expr.into(), checked_ty, expected, allow_two_phase, expr_is_read); + if let Err(_err) = result { + // FIXME: Emit diagnostic. + } + result.unwrap_or(self.types.error) + } + + fn expr_ty(&self, expr: ExprId) -> Ty<'db> { + self.result[expr] + } + + fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> { + let mut ty = None; + if let Some(it) = self.result.expr_adjustments.get(&e) + && let Some(it) = it.last() + { + ty = Some(it.target); + } + ty.unwrap_or_else(|| self.expr_ty(e)) + } + fn resolve_associated_type_with_params( &mut self, inner_ty: Ty<'db>, @@ -1596,9 +1696,7 @@ (ty, _) = path_ctx.lower_partly_resolved_path(resolution, true); tried_resolving_once = true; - ty = self.table.insert_type_vars(ty); - ty = self.table.normalize_associated_types_in(ty); - ty = self.table.structurally_resolve_type(ty); + ty = self.table.process_user_written_ty(ty); if ty.is_ty_error() { return (self.err_ty(), None); } @@ -1709,18 +1807,6 @@ trait_.trait_items(self.db).associated_type_by_name(&Name::new_symbol_root(sym::Output)) } - fn resolve_lang_trait(&self, lang: LangItem) -> Option<TraitId> { - self.resolve_lang_item(lang)?.as_trait() - } - - fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> { - self.resolve_output_on(self.resolve_lang_trait(LangItem::Neg)?) - } - - fn resolve_ops_not_output(&self) -> Option<TypeAliasId> { - self.resolve_output_on(self.resolve_lang_trait(LangItem::Not)?) - } - fn resolve_future_future_output(&self) -> Option<TypeAliasId> { let ItemContainerId::TraitId(trait_) = self .resolve_lang_item(LangItem::IntoFutureIntoFuture)? @@ -1768,24 +1854,17 @@ Some(struct_.into()) } - fn resolve_ops_index_output(&self) -> Option<TypeAliasId> { - self.resolve_output_on(self.resolve_lang_trait(LangItem::Index)?) - } - fn resolve_va_list(&self) -> Option<AdtId> { let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?; Some(struct_.into()) } - fn get_traits_in_scope<'a>( - resolver: &Resolver<'db>, - traits_in_scope: &'a FxHashSet<TraitId>, - ) -> Either<FxHashSet<TraitId>, &'a FxHashSet<TraitId>> { - let mut b_traits = resolver.traits_in_scope_from_block_scopes().peekable(); + pub(crate) fn get_traits_in_scope(&self) -> Either<FxHashSet<TraitId>, &FxHashSet<TraitId>> { + let mut b_traits = self.resolver.traits_in_scope_from_block_scopes().peekable(); if b_traits.peek().is_some() { - Either::Left(traits_in_scope.iter().copied().chain(b_traits).collect()) + Either::Left(self.traits_in_scope.iter().copied().chain(b_traits).collect()) } else { - Either::Right(traits_in_scope) + Either::Right(&self.traits_in_scope) } } }
diff --git a/crates/hir-ty/src/infer/autoderef.rs b/crates/hir-ty/src/infer/autoderef.rs index ba133aa..1af102a 100644 --- a/crates/hir-ty/src/infer/autoderef.rs +++ b/crates/hir-ty/src/infer/autoderef.rs
@@ -6,7 +6,7 @@ use crate::{ Adjust, Adjustment, OverloadedDeref, - autoderef::{Autoderef, AutoderefKind}, + autoderef::{Autoderef, AutoderefCtx, AutoderefKind, GeneralAutoderef}, infer::unify::InferenceTable, next_solver::{ Ty, @@ -15,18 +15,16 @@ }; impl<'db> InferenceTable<'db> { - pub(crate) fn autoderef(&mut self, base_ty: Ty<'db>) -> Autoderef<'_, 'db> { - Autoderef::new(self, base_ty) + pub(crate) fn autoderef(&self, base_ty: Ty<'db>) -> Autoderef<'_, 'db, usize> { + Autoderef::new(&self.infer_ctxt, &self.trait_env, base_ty) + } + + pub(crate) fn autoderef_with_tracking(&self, base_ty: Ty<'db>) -> Autoderef<'_, 'db> { + Autoderef::new_with_tracking(&self.infer_ctxt, &self.trait_env, base_ty) } } -impl<'db> Autoderef<'_, 'db> { - /// Returns the adjustment steps. - pub(crate) fn adjust_steps(mut self) -> Vec<Adjustment<'db>> { - let infer_ok = self.adjust_steps_as_infer_ok(); - self.table.register_infer_ok(infer_ok) - } - +impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> { pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment<'db>>> { let steps = self.steps(); if steps.is_empty() {
diff --git a/crates/hir-ty/src/infer/cast.rs b/crates/hir-ty/src/infer/cast.rs index c128977..8b657dc 100644 --- a/crates/hir-ty/src/infer/cast.rs +++ b/crates/hir-ty/src/infer/cast.rs
@@ -11,7 +11,7 @@ use crate::{ InferenceDiagnostic, db::HirDatabase, - infer::{AllowTwoPhase, InferenceContext, coerce::CoerceNever}, + infer::{AllowTwoPhase, InferenceContext, expr::ExprIsRead}, next_solver::{BoundExistentialPredicates, DbInterner, ParamTy, Ty, TyKind}, }; @@ -120,7 +120,7 @@ self.expr_ty, self.cast_ty, AllowTwoPhase::No, - CoerceNever::Yes, + ExprIsRead::Yes, ) .is_ok() { @@ -167,7 +167,7 @@ self.expr_ty, fn_ptr, AllowTwoPhase::No, - CoerceNever::Yes, + ExprIsRead::Yes, ) .is_ok() { @@ -248,7 +248,7 @@ self.expr_ty, array_ptr_type, AllowTwoPhase::No, - CoerceNever::Yes, + ExprIsRead::Yes, ) .is_ok() { @@ -263,7 +263,7 @@ // This is a less strict condition than rustc's `demand_eqtype`, // but false negative is better than false positive if ctx - .coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, CoerceNever::Yes) + .coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, ExprIsRead::Yes) .is_ok() { return Ok(());
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index 06f8307..54a06eb 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs
@@ -31,7 +31,6 @@ BoundRegionConversionTime, InferOk, InferResult, traits::{ObligationCause, PredicateObligations}, }, - util::explicit_item_bounds, }, traits::FnTrait, }; @@ -255,8 +254,10 @@ .deduce_closure_signature_from_predicates( expected_ty, closure_kind, - explicit_item_bounds(self.interner(), def_id) - .iter_instantiated(self.interner(), args) + def_id + .expect_opaque_ty() + .predicates(self.db) + .iter_instantiated_copied(self.interner(), args.as_slice()) .map(|clause| clause.as_predicate()), ), TyKind::Dynamic(object_type, ..) => {
diff --git a/crates/hir-ty/src/infer/closure/analysis.rs b/crates/hir-ty/src/infer/closure/analysis.rs index 763b145..944b359 100644 --- a/crates/hir-ty/src/infer/closure/analysis.rs +++ b/crates/hir-ty/src/infer/closure/analysis.rs
@@ -11,11 +11,8 @@ Statement, UnaryOp, }, item_tree::FieldsShape, - lang_item::LangItem, resolver::ValueNs, }; -use hir_expand::name::Name; -use intern::sym; use rustc_ast_ir::Mutability; use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _}; @@ -351,10 +348,12 @@ return Some(place); } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { - if matches!( - self.expr_ty_after_adjustments(*expr).kind(), - TyKind::Ref(..) | TyKind::RawPtr(..) - ) { + let is_builtin_deref = match self.expr_ty(*expr).kind() { + TyKind::Ref(..) | TyKind::RawPtr(..) => true, + TyKind::Adt(adt_def, _) if adt_def.is_box() => true, + _ => false, + }; + if is_builtin_deref { let mut place = self.place_of_expr(*expr)?; self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); place.projections.push(ProjectionElem::Deref); @@ -609,28 +608,19 @@ } Expr::Field { expr, name: _ } => self.select_from_expr(*expr), Expr::UnaryOp { expr, op: UnaryOp::Deref } => { - if matches!( - self.expr_ty_after_adjustments(*expr).kind(), - TyKind::Ref(..) | TyKind::RawPtr(..) - ) { - self.select_from_expr(*expr); - } else if let Some((f, _)) = self.result.method_resolution(tgt_expr) { - let mutability = 'b: { - if let Some(deref_trait) = - self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait()) - && let Some(deref_fn) = deref_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::deref_mut)) - { - break 'b deref_fn == f; + if self.result.method_resolution(tgt_expr).is_some() { + // Overloaded deref. + match self.expr_ty_after_adjustments(*expr).kind() { + TyKind::Ref(_, _, mutability) => { + let place = self.place_of_expr(*expr); + match mutability { + Mutability::Mut => self.mutate_expr(*expr, place), + Mutability::Not => self.ref_expr(*expr, place), + } } - false - }; - let place = self.place_of_expr(*expr); - if mutability { - self.mutate_expr(*expr, place); - } else { - self.ref_expr(*expr, place); + // FIXME: Is this correct wrt. raw pointer derefs? + TyKind::RawPtr(..) => self.select_from_expr(*expr), + _ => never!("deref adjustments should include taking a mutable reference"), } } else { self.select_from_expr(*expr); @@ -806,20 +796,6 @@ self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut)); } - fn expr_ty(&self, expr: ExprId) -> Ty<'db> { - self.result[expr] - } - - fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> { - let mut ty = None; - if let Some(it) = self.result.expr_adjustments.get(&e) - && let Some(it) = it.last() - { - ty = Some(it.target); - } - ty.unwrap_or_else(|| self.expr_ty(e)) - } - fn is_upvar(&self, place: &HirPlace<'db>) -> bool { if let Some(c) = self.current_closure { let InternedClosure(_, root) = self.db.lookup_intern_closure(c);
diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs index 40de923..4acf964 100644 --- a/crates/hir-ty/src/infer/coerce.rs +++ b/crates/hir-ty/src/infer/coerce.rs
@@ -44,7 +44,8 @@ use intern::sym; use rustc_ast_ir::Mutability; use rustc_type_ir::{ - BoundVar, TypeAndMut, + BoundVar, DebruijnIndex, TyVid, TypeAndMut, TypeFoldable, TypeFolder, TypeSuperFoldable, + TypeVisitableExt, error::TypeError, inherent::{Const as _, GenericArg as _, IntoKind, Safety, SliceLike, Ty as _}, }; @@ -56,13 +57,16 @@ Adjust, Adjustment, AutoBorrow, PointerCast, TargetFeatures, TraitEnvironment, autoderef::Autoderef, db::{HirDatabase, InternedClosureId}, - infer::{AllowTwoPhase, InferenceContext, TypeMismatch, unify::InferenceTable}, + infer::{ + AllowTwoPhase, AutoBorrowMutability, InferenceContext, TypeMismatch, expr::ExprIsRead, + }, next_solver::{ Binder, BoundConst, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, CallableIdWrapper, Canonical, ClauseKind, CoercePredicate, Const, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, PolyFnSig, PredicateKind, Region, RegionKind, TraitRef, Ty, TyKind, + TypingMode, infer::{ - InferCtxt, InferOk, InferResult, + DbInternerInferExt, InferCtxt, InferOk, InferResult, relate::RelateResult, select::{ImplSource, SelectionError}, traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations}, @@ -72,10 +76,20 @@ utils::TargetFeatureIsSafeInTarget, }; -struct Coerce<'a, 'b, 'db> { - table: &'a mut InferenceTable<'db>, - has_errors: &'a mut bool, - target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures, TargetFeatureIsSafeInTarget), +trait CoerceDelegate<'db> { + fn infcx(&self) -> &InferCtxt<'db>; + fn env(&self) -> &TraitEnvironment<'db>; + fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget); + + fn set_diverging(&mut self, diverging_ty: Ty<'db>); + + fn set_tainted_by_errors(&mut self); + + fn type_var_is_sized(&mut self, var: TyVid) -> bool; +} + +struct Coerce<D> { + delegate: D, use_lub: bool, /// Determines whether or not allow_two_phase_borrow is set on any /// autoref adjustments we create while coercing. We don't want to @@ -109,43 +123,56 @@ Ok(InferOk { value: (adj, target), obligations }) } -impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> { +impl<'db, D> Coerce<D> +where + D: CoerceDelegate<'db>, +{ #[inline] fn set_tainted_by_errors(&mut self) { - *self.has_errors = true; + self.delegate.set_tainted_by_errors(); + } + + #[inline] + fn infcx(&self) -> &InferCtxt<'db> { + self.delegate.infcx() + } + + #[inline] + fn env(&self) -> &TraitEnvironment<'db> { + self.delegate.env() } #[inline] fn interner(&self) -> DbInterner<'db> { - self.table.interner() + self.infcx().interner } #[inline] - fn infer_ctxt(&self) -> &InferCtxt<'db> { - &self.table.infer_ctxt + fn db(&self) -> &'db dyn HirDatabase { + self.interner().db } pub(crate) fn commit_if_ok<T, E>( &mut self, f: impl FnOnce(&mut Self) -> Result<T, E>, ) -> Result<T, E> { - let snapshot = self.table.snapshot(); + let snapshot = self.infcx().start_snapshot(); let result = f(self); match result { Ok(_) => {} Err(_) => { - self.table.rollback_to(snapshot); + self.infcx().rollback_to(snapshot); } } result } - fn unify_raw(&mut self, a: Ty<'db>, b: Ty<'db>) -> InferResult<'db, Ty<'db>> { + fn unify_raw(&self, a: Ty<'db>, b: Ty<'db>) -> InferResult<'db, Ty<'db>> { debug!("unify(a: {:?}, b: {:?}, use_lub: {})", a, b, self.use_lub); - self.commit_if_ok(|this| { - let at = this.infer_ctxt().at(&this.cause, this.table.trait_env.env); + self.infcx().commit_if_ok(|_| { + let at = self.infcx().at(&self.cause, self.env().env); - let res = if this.use_lub { + let res = if self.use_lub { at.lub(b, a) } else { at.sup(b, a) @@ -157,7 +184,7 @@ // Filter these cases out to make sure our coercion is more accurate. match res { Ok(InferOk { value, obligations }) => { - let mut ocx = ObligationCtxt::new(this.infer_ctxt()); + let mut ocx = ObligationCtxt::new(self.infcx()); ocx.register_obligations(obligations); if ocx.try_evaluate_obligations().is_empty() { Ok(InferOk { value, obligations: ocx.into_pending_obligations() }) @@ -182,7 +209,7 @@ a: Ty<'db>, b: Ty<'db>, adjustments: impl IntoIterator<Item = Adjustment<'db>>, - final_adjustment: Adjust<'db>, + final_adjustment: Adjust, ) -> CoerceResult<'db> { self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| { success( @@ -199,15 +226,15 @@ #[instrument(skip(self))] fn coerce(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> { // First, remove any resolved type variables (at the top level, at least): - let a = self.table.shallow_resolve(a); - let b = self.table.shallow_resolve(b); + let a = self.infcx().shallow_resolve(a); + let b = self.infcx().shallow_resolve(b); debug!("Coerce.tys({:?} => {:?})", a, b); // Coercing from `!` to any type is allowed: if a.is_never() { // If we're coercing into an inference var, mark it as possibly diverging. if b.is_infer() { - self.table.set_diverging(b); + self.delegate.set_diverging(b); } if self.coerce_never { @@ -290,12 +317,12 @@ /// fall back to subtyping (`unify_and`). fn coerce_from_inference_variable(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> { debug!("coerce_from_inference_variable(a={:?}, b={:?})", a, b); - debug_assert!(a.is_infer() && self.table.shallow_resolve(a) == a); - debug_assert!(self.table.shallow_resolve(b) == b); + debug_assert!(a.is_infer() && self.infcx().shallow_resolve(a) == a); + debug_assert!(self.infcx().shallow_resolve(b) == b); if b.is_infer() { // Two unresolved type variables: create a `Coerce` predicate. - let target_ty = if self.use_lub { self.table.next_ty_var() } else { b }; + let target_ty = if self.use_lub { self.infcx().next_ty_var() } else { b }; let mut obligations = PredicateObligations::with_capacity(2); for &source_ty in &[a, b] { @@ -303,7 +330,7 @@ obligations.push(Obligation::new( self.interner(), self.cause.clone(), - self.table.trait_env.env, + self.env().env, Binder::dummy(PredicateKind::Coerce(CoercePredicate { a: source_ty, b: target_ty, @@ -335,8 +362,8 @@ mutbl_b: Mutability, ) -> CoerceResult<'db> { debug!("coerce_borrowed_pointer(a={:?}, b={:?})", a, b); - debug_assert!(self.table.shallow_resolve(a) == a); - debug_assert!(self.table.shallow_resolve(b) == b); + debug_assert!(self.infcx().shallow_resolve(a) == a); + debug_assert!(self.infcx().shallow_resolve(b) == b); // If we have a parameter of type `&M T_a` and the value // provided is `expr`, we will be adding an implicit borrow, @@ -355,10 +382,10 @@ let mut first_error = None; let mut r_borrow_var = None; - let mut autoderef = Autoderef::new(self.table, a); + let mut autoderef = Autoderef::new_with_tracking(self.infcx(), self.env(), a); let mut found = None; - while let Some((referent_ty, autoderefs)) = autoderef.next() { + for (referent_ty, autoderefs) in autoderef.by_ref() { if autoderefs == 0 { // Don't let this pass, otherwise it would cause // &T to autoref to &&T. @@ -442,28 +469,18 @@ } else { if r_borrow_var.is_none() { // create var lazily, at most once - let r = autoderef.table.next_region_var(); + let r = self.infcx().next_region_var(); r_borrow_var = Some(r); // [4] above } r_borrow_var.unwrap() }; let derefd_ty_a = Ty::new_ref( - autoderef.table.interner(), + self.interner(), r, referent_ty, mutbl_b, // [1] above ); - // We need to construct a new `Coerce` because of lifetimes. - let mut coerce = Coerce { - table: autoderef.table, - has_errors: self.has_errors, - target_features: self.target_features, - use_lub: self.use_lub, - allow_two_phase: self.allow_two_phase, - coerce_never: self.coerce_never, - cause: self.cause.clone(), - }; - match coerce.unify_raw(derefd_ty_a, b) { + match self.unify_raw(derefd_ty_a, b) { Ok(ok) => { found = Some(ok); break; @@ -515,15 +532,9 @@ autoderef.adjust_steps_as_infer_ok(); obligations.extend(o); - // Now apply the autoref. We have to extract the region out of - // the final ref type we got. - let TyKind::Ref(region, _, _) = ty.kind() else { - panic!("expected a ref type, got {:?}", ty); - }; - adjustments.push(Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl_b)), - target: ty, - }); + // Now apply the autoref. + let mutbl = AutoBorrowMutability::new(mutbl_b, self.allow_two_phase); + adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty }); debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments); @@ -538,8 +549,8 @@ #[instrument(skip(self), level = "debug")] fn coerce_unsized(&mut self, source: Ty<'db>, target: Ty<'db>) -> CoerceResult<'db> { debug!(?source, ?target); - debug_assert!(self.table.shallow_resolve(source) == source); - debug_assert!(self.table.shallow_resolve(target) == target); + debug_assert!(self.infcx().shallow_resolve(source) == source); + debug_assert!(self.infcx().shallow_resolve(target) == target); // We don't apply any coercions incase either the source or target // aren't sufficiently well known but tend to instead just equate @@ -602,8 +613,8 @@ } let traits = ( - LangItem::Unsize.resolve_trait(self.table.db, self.table.trait_env.krate), - LangItem::CoerceUnsized.resolve_trait(self.table.db, self.table.trait_env.krate), + LangItem::Unsize.resolve_trait(self.db(), self.env().krate), + LangItem::CoerceUnsized.resolve_trait(self.db(), self.env().krate), ); let (Some(unsize_did), Some(coerce_unsized_did)) = traits else { debug!("missing Unsize or CoerceUnsized traits"); @@ -620,18 +631,17 @@ (TyKind::Ref(_, ty_a, mutbl_a), TyKind::Ref(_, _, mutbl_b)) => { coerce_mutbls(mutbl_a, mutbl_b)?; - let r_borrow = self.table.next_region_var(); + let r_borrow = self.infcx().next_region_var(); // We don't allow two-phase borrows here, at least for initial // implementation. If it happens that this coercion is a function argument, // the reborrow in coerce_borrowed_ptr will pick it up. - // let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No); - let mutbl = mutbl_b; + let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No); Some(( Adjustment { kind: Adjust::Deref(None), target: ty_a }, Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mutbl)), + kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b), }, )) @@ -655,7 +665,7 @@ // the `CoerceUnsized` target type and the expected type. // We only have the latter, so we use an inference variable // for the former and let type inference do the rest. - let coerce_target = self.table.next_ty_var(); + let coerce_target = self.infcx().next_ty_var(); let mut coercion = self.unify_and( coerce_target, @@ -677,7 +687,7 @@ let mut queue: SmallVec<[PredicateObligation<'db>; 4]> = smallvec![Obligation::new( self.interner(), cause, - self.table.trait_env.env, + self.env().env, TraitRef::new( self.interner(), coerce_unsized_did.into(), @@ -694,14 +704,14 @@ Some(PredicateKind::Clause(ClauseKind::Trait(trait_pred))) if traits.contains(&trait_pred.def_id().0) => { - self.infer_ctxt().resolve_vars_if_possible(trait_pred) + self.infcx().resolve_vars_if_possible(trait_pred) } // Eagerly process alias-relate obligations in new trait solver, // since these can be emitted in the process of solving trait goals, // but we need to constrain vars before processing goals mentioning // them. Some(PredicateKind::AliasRelate(..)) => { - let mut ocx = ObligationCtxt::new(self.infer_ctxt()); + let mut ocx = ObligationCtxt::new(self.infcx()); ocx.register_obligation(obligation); if !ocx.try_evaluate_obligations().is_empty() { return Err(TypeError::Mismatch); @@ -715,7 +725,7 @@ } }; debug!("coerce_unsized resolve step: {:?}", trait_pred); - match self.infer_ctxt().select(&obligation.with(self.interner(), trait_pred)) { + match self.infcx().select(&obligation.with(self.interner(), trait_pred)) { // Uncertain or unimplemented. Ok(None) => { if trait_pred.def_id().0 == unsize_did { @@ -724,7 +734,7 @@ debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred); match (self_ty.kind(), unsize_ty.kind()) { (TyKind::Infer(rustc_type_ir::TyVar(v)), TyKind::Dynamic(..)) - if self.table.type_var_is_sized(v) => + if self.delegate.type_var_is_sized(v) => { debug!("coerce_unsized: have sized infer {:?}", v); coercion.obligations.push(obligation); @@ -794,9 +804,9 @@ &mut self, fn_ty_a: PolyFnSig<'db>, b: Ty<'db>, - adjustment: Option<Adjust<'db>>, + adjustment: Option<Adjust>, ) -> CoerceResult<'db> { - debug_assert!(self.table.shallow_resolve(b) == b); + debug_assert!(self.infcx().shallow_resolve(b) == b); self.commit_if_ok(|this| { if let TyKind::FnPtr(_, hdr_b) = b.kind() @@ -825,15 +835,15 @@ fn coerce_from_fn_pointer(&mut self, fn_ty_a: PolyFnSig<'db>, b: Ty<'db>) -> CoerceResult<'db> { debug!(?fn_ty_a, ?b, "coerce_from_fn_pointer"); - debug_assert!(self.table.shallow_resolve(b) == b); + debug_assert!(self.infcx().shallow_resolve(b) == b); self.coerce_from_safe_fn(fn_ty_a, b, None) } fn coerce_from_fn_item(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> { debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b); - debug_assert!(self.table.shallow_resolve(a) == a); - debug_assert!(self.table.shallow_resolve(b) == b); + debug_assert!(self.infcx().shallow_resolve(a) == a); + debug_assert!(self.infcx().shallow_resolve(b) == b); match b.kind() { TyKind::FnPtr(_, b_hdr) => { @@ -841,11 +851,11 @@ if let TyKind::FnDef(def_id, _) = a.kind() { // Intrinsics are not coercible to function pointers if let CallableDefId::FunctionId(def_id) = def_id.0 { - if FunctionSignature::is_intrinsic(self.table.db, def_id) { + if FunctionSignature::is_intrinsic(self.db(), def_id) { return Err(TypeError::IntrinsicCast); } - let attrs = self.table.db.attrs(def_id.into()); + let attrs = self.db().attrs(def_id.into()); if attrs.by_key(sym::rustc_force_inline).exists() { return Err(TypeError::ForceInlineCast); } @@ -856,7 +866,7 @@ // Allow the coercion if the current function has all the features that would be // needed to call the coercee safely. let (target_features, target_feature_is_safe) = - (self.target_features)(); + self.delegate.target_features(); if target_feature_is_safe == TargetFeatureIsSafeInTarget::No && !target_features.enabled.is_superset(&fn_target_features.enabled) { @@ -887,8 +897,8 @@ args_a: GenericArgs<'db>, b: Ty<'db>, ) -> CoerceResult<'db> { - debug_assert!(self.table.shallow_resolve(a) == a); - debug_assert!(self.table.shallow_resolve(b) == b); + debug_assert!(self.infcx().shallow_resolve(a) == a); + debug_assert!(self.infcx().shallow_resolve(b) == b); match b.kind() { // FIXME: We need to have an `upvars_mentioned()` query: @@ -930,8 +940,8 @@ fn coerce_raw_ptr(&mut self, a: Ty<'db>, b: Ty<'db>, mutbl_b: Mutability) -> CoerceResult<'db> { debug!("coerce_raw_ptr(a={:?}, b={:?})", a, b); - debug_assert!(self.table.shallow_resolve(a) == a); - debug_assert!(self.table.shallow_resolve(b) == b); + debug_assert!(self.infcx().shallow_resolve(a) == a); + debug_assert!(self.infcx().shallow_resolve(b) == b); let (is_ref, mt_a) = match a.kind() { TyKind::Ref(_, ty, mutbl) => (true, TypeAndMut::<DbInterner<'db>> { ty, mutbl }), @@ -960,10 +970,36 @@ } } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(crate) enum CoerceNever { - No, - Yes, +struct InferenceCoercionDelegate<'a, 'b, 'db>(&'a mut InferenceContext<'b, 'db>); + +impl<'db> CoerceDelegate<'db> for InferenceCoercionDelegate<'_, '_, 'db> { + #[inline] + fn infcx(&self) -> &InferCtxt<'db> { + &self.0.table.infer_ctxt + } + #[inline] + fn env(&self) -> &TraitEnvironment<'db> { + &self.0.table.trait_env + } + #[inline] + fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) { + self.0.target_features() + } + + #[inline] + fn set_diverging(&mut self, diverging_ty: Ty<'db>) { + self.0.table.set_diverging(diverging_ty); + } + + #[inline] + fn set_tainted_by_errors(&mut self) { + self.0.set_tainted_by_errors(); + } + + #[inline] + fn type_var_is_sized(&mut self, var: TyVid) -> bool { + self.0.table.type_var_is_sized(var) + } } impl<'db> InferenceContext<'_, 'db> { @@ -977,24 +1013,26 @@ expr_ty: Ty<'db>, mut target: Ty<'db>, allow_two_phase: AllowTwoPhase, - coerce_never: CoerceNever, + expr_is_read: ExprIsRead, ) -> RelateResult<'db, Ty<'db>> { let source = self.table.try_structurally_resolve_type(expr_ty); target = self.table.try_structurally_resolve_type(target); debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target); let cause = ObligationCause::new(); - let krate = self.krate(); + let coerce_never = match expr { + ExprOrPatId::ExprId(idx) => { + self.expr_guaranteed_to_constitute_read_for_never(idx, expr_is_read) + } + // `PatId` is passed for `PatKind::Path`. + ExprOrPatId::PatId(_) => false, + }; let mut coerce = Coerce { - table: &mut self.table, - has_errors: &mut self.result.has_errors, + delegate: InferenceCoercionDelegate(self), cause, allow_two_phase, - coerce_never: matches!(coerce_never, CoerceNever::Yes), + coerce_never, use_lub: false, - target_features: &mut || { - Self::target_features(self.db, &self.target_features, self.owner, krate) - }, }; let ok = coerce.commit_if_ok(|coerce| coerce.coerce(source, target))?; @@ -1157,23 +1195,18 @@ // // NOTE: we set `coerce_never` to `true` here because coercion LUBs only // operate on values and not places, so a never coercion is valid. - let krate = self.krate(); let mut coerce = Coerce { - table: &mut self.table, - has_errors: &mut self.result.has_errors, + delegate: InferenceCoercionDelegate(self), cause: ObligationCause::new(), allow_two_phase: AllowTwoPhase::No, coerce_never: true, use_lub: true, - target_features: &mut || { - Self::target_features(self.db, &self.target_features, self.owner, krate) - }, }; // First try to coerce the new expression to the type of the previous ones, // but only if the new expression has no coercion already applied to it. let mut first_error = None; - if !self.result.expr_adjustments.contains_key(&new) { + if !coerce.delegate.0.result.expr_adjustments.contains_key(&new) { let result = coerce.commit_if_ok(|coerce| coerce.coerce(new_ty, prev_ty)); match result { Ok(ok) => { @@ -1335,8 +1368,9 @@ cause: &ObligationCause, expression: ExprId, expression_ty: Ty<'db>, + expr_is_read: ExprIsRead, ) { - self.coerce_inner(icx, cause, expression, expression_ty, false, false) + self.coerce_inner(icx, cause, expression, expression_ty, false, false, expr_is_read) } /// Indicates that one of the inputs is a "forced unit". This @@ -1357,8 +1391,17 @@ expr: ExprId, cause: &ObligationCause, label_unit_as_expected: bool, + expr_is_read: ExprIsRead, ) { - self.coerce_inner(icx, cause, expr, icx.types.unit, true, label_unit_as_expected) + self.coerce_inner( + icx, + cause, + expr, + icx.types.unit, + true, + label_unit_as_expected, + expr_is_read, + ) } /// The inner coercion "engine". If `expression` is `None`, this @@ -1372,6 +1415,7 @@ mut expression_ty: Ty<'db>, force_unit: bool, label_expression_as_expected: bool, + expr_is_read: ExprIsRead, ) { // Incorporate whatever type inference information we have // until now; in principle we might also want to process @@ -1408,7 +1452,7 @@ expression_ty, self.expected_ty, AllowTwoPhase::No, - CoerceNever::Yes, + expr_is_read, ) } else { match self.expressions { @@ -1504,88 +1548,170 @@ coerce(db, env, tys).is_ok() } +struct HirCoercionDelegate<'a, 'db> { + infcx: &'a InferCtxt<'db>, + env: &'a TraitEnvironment<'db>, + target_features: &'a TargetFeatures, +} + +impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> { + #[inline] + fn infcx(&self) -> &InferCtxt<'db> { + self.infcx + } + #[inline] + fn env(&self) -> &TraitEnvironment<'db> { + self.env + } + fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) { + (self.target_features, TargetFeatureIsSafeInTarget::No) + } + fn set_diverging(&mut self, _diverging_ty: Ty<'db>) {} + fn set_tainted_by_errors(&mut self) {} + fn type_var_is_sized(&mut self, _var: TyVid) -> bool { + false + } +} + fn coerce<'db>( db: &'db dyn HirDatabase, env: Arc<TraitEnvironment<'db>>, tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>, ) -> Result<(Vec<Adjustment<'db>>, Ty<'db>), TypeError<DbInterner<'db>>> { - let mut table = InferenceTable::new(db, env, None); - let interner = table.interner(); - let ((ty1_with_vars, ty2_with_vars), vars) = table.infer_ctxt.instantiate_canonical(tys); + let interner = DbInterner::new_with(db, Some(env.krate), env.block); + let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis); + let ((ty1_with_vars, ty2_with_vars), vars) = infcx.instantiate_canonical(tys); let cause = ObligationCause::new(); // FIXME: Target features. let target_features = TargetFeatures::default(); let mut coerce = Coerce { - table: &mut table, - has_errors: &mut false, + delegate: HirCoercionDelegate { + infcx: &infcx, + env: &env, + target_features: &target_features, + }, cause, allow_two_phase: AllowTwoPhase::No, coerce_never: true, use_lub: false, - target_features: &mut || (&target_features, TargetFeatureIsSafeInTarget::No), }; - let InferOk { value: (adjustments, ty), obligations } = - coerce.coerce(ty1_with_vars, ty2_with_vars)?; - table.register_predicates(obligations); + let infer_ok = coerce.coerce(ty1_with_vars, ty2_with_vars)?; + let mut ocx = ObligationCtxt::new(&infcx); + let (adjustments, ty) = ocx.register_infer_ok_obligations(infer_ok); + _ = ocx.try_evaluate_obligations(); + let (adjustments, ty) = infcx.resolve_vars_if_possible((adjustments, ty)); // default any type vars that weren't unified back to their original bound vars // (kind of hacky) - let mut fallback_ty = |debruijn, infer| { - let var = vars.var_values.iter().position(|arg| { - arg.as_type().is_some_and(|ty| match ty.kind() { - TyKind::Infer(it) => infer == it, - _ => false, - }) - }); - var.map_or_else( - || Ty::new_error(interner, ErrorGuaranteed), - |i| { - Ty::new_bound( - interner, - debruijn, - BoundTy { kind: BoundTyKind::Anon, var: BoundVar::from_usize(i) }, + + struct Resolver<'db> { + interner: DbInterner<'db>, + debruijn: DebruijnIndex, + var_values: GenericArgs<'db>, + } + + impl<'db> TypeFolder<DbInterner<'db>> for Resolver<'db> { + fn cx(&self) -> DbInterner<'db> { + self.interner + } + + fn fold_binder<T>(&mut self, t: Binder<'db, T>) -> Binder<'db, T> + where + T: TypeFoldable<DbInterner<'db>>, + { + self.debruijn.shift_in(1); + let result = t.super_fold_with(self); + self.debruijn.shift_out(1); + result + } + + fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> { + if !t.has_infer() { + return t; + } + + if let TyKind::Infer(infer) = t.kind() { + let var = self.var_values.iter().position(|arg| { + arg.as_type().is_some_and(|ty| match ty.kind() { + TyKind::Infer(it) => infer == it, + _ => false, + }) + }); + var.map_or_else( + || Ty::new_error(self.interner, ErrorGuaranteed), + |i| { + Ty::new_bound( + self.interner, + self.debruijn, + BoundTy { kind: BoundTyKind::Anon, var: BoundVar::from_usize(i) }, + ) + }, ) - }, - ) - }; - let mut fallback_const = |debruijn, infer| { - let var = vars.var_values.iter().position(|arg| { - arg.as_const().is_some_and(|ty| match ty.kind() { - ConstKind::Infer(it) => infer == it, - _ => false, - }) - }); - var.map_or_else( - || Const::new_error(interner, ErrorGuaranteed), - |i| Const::new_bound(interner, debruijn, BoundConst { var: BoundVar::from_usize(i) }), - ) - }; - let mut fallback_region = |debruijn, infer| { - let var = vars.var_values.iter().position(|arg| { - arg.as_region().is_some_and(|ty| match ty.kind() { - RegionKind::ReVar(it) => infer == it, - _ => false, - }) - }); - var.map_or_else( - || Region::error(interner), - |i| { - Region::new_bound( - interner, - debruijn, - BoundRegion { kind: BoundRegionKind::Anon, var: BoundVar::from_usize(i) }, + } else { + t.super_fold_with(self) + } + } + + fn fold_const(&mut self, c: Const<'db>) -> Const<'db> { + if !c.has_infer() { + return c; + } + + if let ConstKind::Infer(infer) = c.kind() { + let var = self.var_values.iter().position(|arg| { + arg.as_const().is_some_and(|ty| match ty.kind() { + ConstKind::Infer(it) => infer == it, + _ => false, + }) + }); + var.map_or_else( + || Const::new_error(self.interner, ErrorGuaranteed), + |i| { + Const::new_bound( + self.interner, + self.debruijn, + BoundConst { var: BoundVar::from_usize(i) }, + ) + }, ) - }, - ) - }; - // FIXME also map the types in the adjustments - // FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferenceTable`. - let ty = table.resolve_with_fallback( - ty, - &mut fallback_ty, - &mut fallback_const, - &mut fallback_region, - ); + } else { + c.super_fold_with(self) + } + } + + fn fold_region(&mut self, r: Region<'db>) -> Region<'db> { + if let RegionKind::ReVar(infer) = r.kind() { + let var = self.var_values.iter().position(|arg| { + arg.as_region().is_some_and(|ty| match ty.kind() { + RegionKind::ReVar(it) => infer == it, + _ => false, + }) + }); + var.map_or_else( + || Region::error(self.interner), + |i| { + Region::new_bound( + self.interner, + self.debruijn, + BoundRegion { + kind: BoundRegionKind::Anon, + var: BoundVar::from_usize(i), + }, + ) + }, + ) + } else { + r + } + } + } + + // FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferCtxt`. + let (adjustments, ty) = (adjustments, ty).fold_with(&mut Resolver { + interner, + debruijn: DebruijnIndex::ZERO, + var_values: vars.var_values, + }); Ok((adjustments, ty)) }
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index b7ab109..7487660 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs
@@ -3,19 +3,18 @@ use std::{iter::repeat_with, mem}; use either::Either; -use hir_def::hir::ClosureKind; use hir_def::{ - BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, - expr_store::path::{GenericArg as HirGenericArg, GenericArgs as HirGenericArgs, Path}, + BlockId, FieldId, GenericDefId, ItemContainerId, Lookup, TupleFieldId, TupleId, + expr_store::path::{GenericArgs as HirGenericArgs, Path}, hir::{ - ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, Expr, ExprId, ExprOrPatId, LabelId, - Literal, Pat, PatId, Statement, UnaryOp, generics::GenericParamDataRef, + Array, AsmOperand, AsmOptions, BinaryOp, BindingAnnotation, Expr, ExprId, ExprOrPatId, + LabelId, Literal, Pat, PatId, Statement, UnaryOp, }, lang_item::{LangItem, LangItemTarget}, resolver::ValueNs, }; +use hir_def::{FunctionId, hir::ClosureKind}; use hir_expand::name::Name; -use intern::sym; use rustc_ast_ir::Mutability; use rustc_type_ir::{ CoroutineArgs, CoroutineArgsParts, InferTy, Interner, @@ -25,32 +24,25 @@ use tracing::debug; use crate::{ - Adjust, Adjustment, AutoBorrow, CallableDefId, DeclContext, DeclOrigin, - IncorrectGenericsLenKind, Rawness, TraitEnvironment, - autoderef::overloaded_deref_ty, + Adjust, Adjustment, CallableDefId, DeclContext, DeclOrigin, Rawness, TraitEnvironment, + autoderef::InferenceContextAutoderef, consteval, db::InternedCoroutine, generics::generics, infer::{ - AllowTwoPhase, BreakableKind, - coerce::{CoerceMany, CoerceNever}, - find_continuable, + AllowTwoPhase, BreakableKind, coerce::CoerceMany, find_continuable, pat::contains_explicit_ref_binding, }, - lang_items::lang_items_for_bin_op, - lower::{ - LifetimeElisionKind, lower_mutability, - path::{GenericArgsLowerer, TypeLikeConst, substs_from_args_and_bindings}, - }, - method_resolution::{self, VisibleFromModule}, + lower::{GenericPredicates, lower_mutability}, + method_resolution::{self, CandidateId, MethodCallee, MethodError}, next_solver::{ - Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, TraitRef, Ty, TyKind, - TypeError, + ErrorGuaranteed, FnSig, GenericArgs, TraitRef, Ty, TyKind, TypeError, infer::{ - InferOk, + BoundRegionConversionTime, InferOk, traits::{Obligation, ObligationCause}, }, obligation_ctxt::ObligationCtxt, + util::clauses_as_obligations, }, traits::FnTrait, }; @@ -103,12 +95,7 @@ ) -> Ty<'db> { let ty = self.infer_expr_inner(expr, expected, is_read); if let Some(target) = expected.only_has_type(&mut self.table) { - let coerce_never = if self.expr_guaranteed_to_constitute_read_for_never(expr, is_read) { - CoerceNever::Yes - } else { - CoerceNever::No - }; - match self.coerce(expr.into(), ty, target, AllowTwoPhase::No, coerce_never) { + match self.coerce(expr.into(), ty, target, AllowTwoPhase::No, is_read) { Ok(res) => res, Err(_) => { self.result @@ -203,6 +190,23 @@ } } + /// Checks if the pattern contains any `ref` or `ref mut` bindings, and if + /// yes whether it contains mutable or just immutables ones. + // + // FIXME(tschottdorf): this is problematic as the HIR is being scraped, but + // ref bindings are be implicit after #42640 (default match binding modes). See issue #44848. + fn contains_explicit_ref_binding(&self, pat: PatId) -> bool { + if let Pat::Bind { id, .. } = self.body[pat] + && matches!(self.body[id].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) + { + return true; + } + + let mut result = false; + self.body.walk_pats_shallow(pat, |pat| result |= self.contains_explicit_ref_binding(pat)); + result + } + fn is_syntactic_place_expr(&self, expr: ExprId) -> bool { match &self.body[expr] { // Lang item paths cannot currently be local variables or statics. @@ -250,6 +254,15 @@ } } + #[expect(clippy::needless_return)] + pub(crate) fn check_lhs_assignable(&self, lhs: ExprId) { + if self.is_syntactic_place_expr(lhs) { + return; + } + + // FIXME: Emit diagnostic. + } + fn infer_expr_coerce_never( &mut self, expr: ExprId, @@ -269,7 +282,7 @@ } if let Some(target) = expected.only_has_type(&mut self.table) { - self.coerce(expr.into(), ty, target, AllowTwoPhase::No, CoerceNever::Yes) + self.coerce(expr.into(), ty, target, AllowTwoPhase::No, ExprIsRead::Yes) .expect("never-to-any coercion should always succeed") } else { ty @@ -320,16 +333,28 @@ expected.coercion_target_type(&mut self.table), &coercion_sites, ); - coerce.coerce(self, &ObligationCause::new(), then_branch, then_ty); + coerce.coerce(self, &ObligationCause::new(), then_branch, then_ty, ExprIsRead::Yes); match else_branch { Some(else_branch) => { let else_ty = self.infer_expr_inner(else_branch, expected, ExprIsRead::Yes); let else_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); - coerce.coerce(self, &ObligationCause::new(), else_branch, else_ty); + coerce.coerce( + self, + &ObligationCause::new(), + else_branch, + else_ty, + ExprIsRead::Yes, + ); self.diverges = condition_diverges | then_diverges & else_diverges; } None => { - coerce.coerce_forced_unit(self, tgt_expr, &ObligationCause::new(), true); + coerce.coerce_forced_unit( + self, + tgt_expr, + &ObligationCause::new(), + true, + ExprIsRead::Yes, + ); self.diverges = condition_diverges; } } @@ -407,12 +432,20 @@ expected, ), Expr::Match { expr, arms } => { - let scrutinee_is_read = arms - .iter() - .all(|arm| self.pat_guaranteed_to_constitute_read_for_never(arm.pat)); + let mut scrutinee_is_read = true; + let mut contains_ref_bindings = false; + for arm in arms { + scrutinee_is_read &= self.pat_guaranteed_to_constitute_read_for_never(arm.pat); + contains_ref_bindings |= self.contains_explicit_ref_binding(arm.pat); + } let scrutinee_is_read = if scrutinee_is_read { ExprIsRead::Yes } else { ExprIsRead::No }; - let input_ty = self.infer_expr(*expr, &Expectation::none(), scrutinee_is_read); + let input_ty = self.demand_scrutinee_type( + *expr, + contains_ref_bindings, + arms.is_empty(), + scrutinee_is_read, + ); if arms.is_empty() { self.diverges = Diverges::Always; @@ -421,7 +454,6 @@ let matchee_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let mut all_arms_diverge = Diverges::Always; for arm in arms.iter() { - let input_ty = self.table.structurally_resolve_type(input_ty); self.infer_top_pat(arm.pat, input_ty, None); } @@ -447,7 +479,13 @@ let arm_ty = self.infer_expr_inner(arm.expr, &expected, ExprIsRead::Yes); all_arms_diverge &= self.diverges; - coerce.coerce(self, &ObligationCause::new(), arm.expr, arm_ty); + coerce.coerce( + self, + &ObligationCause::new(), + arm.expr, + arm_ty, + ExprIsRead::Yes, + ); } self.diverges = matchee_diverges | all_arms_diverge; @@ -499,6 +537,7 @@ &ObligationCause::new(), expr.unwrap_or(tgt_expr), val_ty, + ExprIsRead::Yes, ); // Avoiding borrowck @@ -536,7 +575,7 @@ unit, yield_ty, AllowTwoPhase::No, - CoerceNever::Yes, + ExprIsRead::Yes, ); } resume_ty @@ -662,70 +701,13 @@ } } &Expr::Box { expr } => self.infer_expr_box(expr, expected), - Expr::UnaryOp { expr, op } => { - let inner_ty = self.infer_expr_inner(*expr, &Expectation::none(), ExprIsRead::Yes); - let inner_ty = self.table.try_structurally_resolve_type(inner_ty); - // FIXME: Note down method resolution her - match op { - UnaryOp::Deref => { - if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) - && let Some(deref_fn) = deref_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::deref)) - { - // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that - // the mutability is not wrong, and will be fixed in `self.infer_mut`). - self.write_method_resolution(tgt_expr, deref_fn, self.types.empty_args); - } - if let Some(derefed) = inner_ty.builtin_deref(self.db, true) { - self.table.try_structurally_resolve_type(derefed) - } else { - let infer_ok = overloaded_deref_ty(&self.table, inner_ty); - match infer_ok { - Some(infer_ok) => self.table.register_infer_ok(infer_ok), - None => self.err_ty(), - } - } - } - UnaryOp::Neg => { - match inner_ty.kind() { - // Fast path for builtins - TyKind::Int(_) - | TyKind::Uint(_) - | TyKind::Float(_) - | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) => inner_ty, - // Otherwise we resolve via the std::ops::Neg trait - _ => self - .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()), - } - } - UnaryOp::Not => { - match inner_ty.kind() { - // Fast path for builtins - TyKind::Bool - | TyKind::Int(_) - | TyKind::Uint(_) - | TyKind::Float(_) - | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) => inner_ty, - // Otherwise we resolve via the std::ops::Not trait - _ => self - .resolve_associated_type(inner_ty, self.resolve_ops_not_output()), - } - } - } - } + Expr::UnaryOp { expr, op } => self.infer_unop_expr(*op, *expr, expected, tgt_expr), Expr::BinaryOp { lhs, rhs, op } => match op { - Some(BinaryOp::LogicOp(_)) => { - let bool_ty = self.types.bool; - self.infer_expr_coerce(*lhs, &Expectation::HasType(bool_ty), ExprIsRead::Yes); - let lhs_diverges = self.diverges; - self.infer_expr_coerce(*rhs, &Expectation::HasType(bool_ty), ExprIsRead::Yes); - // Depending on the LHS' value, the RHS can never execute. - self.diverges = lhs_diverges; - bool_ty + Some(BinaryOp::Assignment { op: Some(op) }) => { + self.infer_assign_op_expr(tgt_expr, *op, *lhs, *rhs) } - Some(op) => self.infer_overloadable_binop(*lhs, *op, *rhs, tgt_expr), - _ => self.err_ty(), + Some(op) => self.infer_binop_expr(tgt_expr, *op, *lhs, *rhs), + None => self.err_ty(), }, &Expr::Assignment { target, value } => { // In ordinary (non-destructuring) assignments, the type of @@ -790,8 +772,7 @@ Expr::Range { lhs, rhs, range_type } => { let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none(), ExprIsRead::Yes)); - let rhs_expect = - lhs_ty.as_ref().map_or_else(Expectation::none, |ty| Expectation::has_type(*ty)); + let rhs_expect = lhs_ty.map_or_else(Expectation::none, Expectation::has_type); let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect, ExprIsRead::Yes)); let single_arg_adt = |adt, ty: Ty<'db>| { Ty::new_adt( @@ -833,55 +814,39 @@ } } Expr::Index { base, index } => { - let base_ty = self.infer_expr_inner(*base, &Expectation::none(), ExprIsRead::Yes); - let index_ty = self.infer_expr(*index, &Expectation::none(), ExprIsRead::Yes); + let base_t = self.infer_expr_no_expect(*base, ExprIsRead::Yes); + let idx_t = self.infer_expr_no_expect(*index, ExprIsRead::Yes); - if let Some(index_trait) = self.resolve_lang_trait(LangItem::Index) { - let canonicalized = self.canonicalize(base_ty); - let receiver_adjustments = method_resolution::resolve_indexing_op( - &mut self.table, - canonicalized, - index_trait, - ); - let (self_ty, mut adj) = receiver_adjustments - .map_or((self.err_ty(), Vec::new()), |adj| { - adj.apply(&mut self.table, base_ty) - }); - - // mutability will be fixed up in `InferenceContext::infer_mut`; - adj.push(Adjustment::borrow( - self.interner(), - Mutability::Not, - self_ty, - self.table.next_region_var(), - )); - self.write_expr_adj(*base, adj.into_boxed_slice()); - if let Some(func) = index_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::index)) - { - let subst = GenericArgs::new_from_iter( - self.interner(), - [self_ty.into(), index_ty.into()], + let base_t = self.table.structurally_resolve_type(base_t); + match self.lookup_indexing(tgt_expr, *base, base_t, idx_t) { + Some((trait_index_ty, trait_element_ty)) => { + // two-phase not needed because index_ty is never mutable + self.demand_coerce( + *index, + idx_t, + trait_index_ty, + AllowTwoPhase::No, + ExprIsRead::Yes, ); - self.write_method_resolution(tgt_expr, func, subst); + self.table.select_obligations_where_possible(); + trait_element_ty } - let assoc = self.resolve_ops_index_output(); - self.resolve_associated_type_with_params(self_ty, assoc, &[index_ty.into()]) - } else { - self.err_ty() + // FIXME: Report an error. + None => self.types.error, } } Expr::Tuple { exprs, .. } => { - let mut tys = - match expected.only_has_type(&mut self.table).as_ref().map(|t| t.kind()) { - Some(TyKind::Tuple(substs)) => substs - .iter() - .chain(repeat_with(|| self.table.next_ty_var())) - .take(exprs.len()) - .collect::<Vec<_>>(), - _ => (0..exprs.len()).map(|_| self.table.next_ty_var()).collect(), - }; + let mut tys = match expected + .only_has_type(&mut self.table) + .map(|t| self.table.try_structurally_resolve_type(t).kind()) + { + Some(TyKind::Tuple(substs)) => substs + .iter() + .chain(repeat_with(|| self.table.next_ty_var())) + .take(exprs.len()) + .collect::<Vec<_>>(), + _ => (0..exprs.len()).map(|_| self.table.next_ty_var()).collect(), + }; for (expr, ty) in exprs.iter().zip(tys.iter_mut()) { *ty = @@ -1014,7 +979,7 @@ ty, fnptr_ty, AllowTwoPhase::No, - CoerceNever::Yes, + ExprIsRead::Yes, ); } TyKind::Ref(_, base_ty, mutbl) => { @@ -1024,7 +989,7 @@ ty, ptr_ty, AllowTwoPhase::No, - CoerceNever::Yes, + ExprIsRead::Yes, ); } _ => {} @@ -1073,6 +1038,77 @@ ty } + fn demand_scrutinee_type( + &mut self, + scrut: ExprId, + contains_ref_bindings: bool, + no_arms: bool, + scrutinee_is_read: ExprIsRead, + ) -> Ty<'db> { + // Not entirely obvious: if matches may create ref bindings, we want to + // use the *precise* type of the scrutinee, *not* some supertype, as + // the "scrutinee type" (issue #23116). + // + // arielb1 [writes here in this comment thread][c] that there + // is certainly *some* potential danger, e.g., for an example + // like: + // + // [c]: https://github.com/rust-lang/rust/pull/43399#discussion_r130223956 + // + // ``` + // let Foo(x) = f()[0]; + // ``` + // + // Then if the pattern matches by reference, we want to match + // `f()[0]` as a lexpr, so we can't allow it to be + // coerced. But if the pattern matches by value, `f()[0]` is + // still syntactically a lexpr, but we *do* want to allow + // coercions. + // + // However, *likely* we are ok with allowing coercions to + // happen if there are no explicit ref mut patterns - all + // implicit ref mut patterns must occur behind a reference, so + // they will have the "correct" variance and lifetime. + // + // This does mean that the following pattern would be legal: + // + // ``` + // struct Foo(Bar); + // struct Bar(u32); + // impl Deref for Foo { + // type Target = Bar; + // fn deref(&self) -> &Bar { &self.0 } + // } + // impl DerefMut for Foo { + // fn deref_mut(&mut self) -> &mut Bar { &mut self.0 } + // } + // fn foo(x: &mut Foo) { + // { + // let Bar(z): &mut Bar = x; + // *z = 42; + // } + // assert_eq!(foo.0.0, 42); + // } + // ``` + // + // FIXME(tschottdorf): don't call contains_explicit_ref_binding, which + // is problematic as the HIR is being scraped, but ref bindings may be + // implicit after #42640. We need to make sure that pat_adjustments + // (once introduced) is populated by the time we get here. + // + // See #44848. + if contains_ref_bindings || no_arms { + self.infer_expr_no_expect(scrut, scrutinee_is_read) + } else { + // ...but otherwise we want to use any supertype of the + // scrutinee. This is sort of a workaround, see note (*) in + // `check_pat` for some details. + let scrut_ty = self.table.next_ty_var(); + self.infer_expr_coerce_never(scrut, &Expectation::HasType(scrut_ty), scrutinee_is_read); + scrut_ty + } + } + fn infer_expr_path(&mut self, path: &Path, id: ExprOrPatId, scope_id: ExprId) -> Ty<'db> { let g = self.resolver.update_to_inner_scope(self.db, self.owner, scope_id); let ty = match self.infer_path(path, id) { @@ -1089,6 +1125,47 @@ ty } + fn infer_unop_expr( + &mut self, + unop: UnaryOp, + oprnd: ExprId, + expected: &Expectation<'db>, + expr: ExprId, + ) -> Ty<'db> { + let expected_inner = match unop { + UnaryOp::Not | UnaryOp::Neg => expected, + UnaryOp::Deref => &Expectation::None, + }; + let mut oprnd_t = self.infer_expr_inner(oprnd, expected_inner, ExprIsRead::Yes); + + oprnd_t = self.table.structurally_resolve_type(oprnd_t); + match unop { + UnaryOp::Deref => { + if let Some(ty) = self.lookup_derefing(expr, oprnd, oprnd_t) { + oprnd_t = ty; + } else { + // FIXME: Report an error. + oprnd_t = self.types.error; + } + } + UnaryOp::Not => { + let result = self.infer_user_unop(expr, oprnd_t, unop); + // If it's builtin, we can reuse the type, this helps inference. + if !(oprnd_t.is_integral() || oprnd_t.kind() == TyKind::Bool) { + oprnd_t = result; + } + } + UnaryOp::Neg => { + let result = self.infer_user_unop(expr, oprnd_t, unop); + // If it's builtin, we can reuse the type, this helps inference. + if !oprnd_t.is_numeric() { + oprnd_t = result; + } + } + } + oprnd_t + } + fn infer_async_block( &mut self, tgt_expr: ExprId, @@ -1106,8 +1183,7 @@ let (_, inner_ty) = self.with_breakable_ctx(BreakableKind::Border, None, None, |this| { let ty = this.infer_block(tgt_expr, *id, statements, *tail, None, expected); if let Some(target) = expected.only_has_type(&mut this.table) { - match this.coerce(tgt_expr.into(), ty, target, AllowTwoPhase::No, CoerceNever::Yes) - { + match this.coerce(tgt_expr.into(), ty, target, AllowTwoPhase::No, ExprIsRead::Yes) { Ok(res) => res, Err(_) => { this.result @@ -1220,7 +1296,10 @@ } fn infer_expr_array(&mut self, array: &Array, expected: &Expectation<'db>) -> Ty<'db> { - let elem_ty = match expected.to_option(&mut self.table).as_ref().map(|t| t.kind()) { + let elem_ty = match expected + .to_option(&mut self.table) + .map(|t| self.table.try_structurally_resolve_type(t).kind()) + { Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st, _ => self.table.next_ty_var(), }; @@ -1236,7 +1315,13 @@ let mut coerce = CoerceMany::with_coercion_sites(elem_ty, elements); for &expr in elements.iter() { let cur_elem_ty = self.infer_expr_inner(expr, &expected, ExprIsRead::Yes); - coerce.coerce(self, &ObligationCause::new(), expr, cur_elem_ty); + coerce.coerce( + self, + &ObligationCause::new(), + expr, + cur_elem_ty, + ExprIsRead::Yes, + ); } ( coerce.complete(self), @@ -1250,14 +1335,18 @@ ExprIsRead::Yes, ); let usize = self.types.usize; - match self.body[repeat] { + let len = match self.body[repeat] { Expr::Underscore => { self.write_expr_ty(repeat, usize); + self.table.next_const_var() } - _ => _ = self.infer_expr(repeat, &Expectation::HasType(usize), ExprIsRead::Yes), - } + _ => { + self.infer_expr(repeat, &Expectation::HasType(usize), ExprIsRead::Yes); + consteval::eval_to_const(repeat, self) + } + }; - (elem_ty, consteval::eval_to_const(repeat, self)) + (elem_ty, len) } }; // Try to evaluate unevaluated constant, and insert variable if is not possible. @@ -1274,7 +1363,7 @@ let return_expr_ty = self.infer_expr_inner(expr, &Expectation::HasType(ret_ty), ExprIsRead::Yes); let mut coerce_many = self.return_coercion.take().unwrap(); - coerce_many.coerce(self, &ObligationCause::new(), expr, return_expr_ty); + coerce_many.coerce(self, &ObligationCause::new(), expr, return_expr_ty, ExprIsRead::Yes); self.return_coercion = Some(coerce_many); } @@ -1285,7 +1374,13 @@ self.infer_return(expr); } else { let mut coerce = self.return_coercion.take().unwrap(); - coerce.coerce_forced_unit(self, ret, &ObligationCause::new(), true); + coerce.coerce_forced_unit( + self, + ret, + &ObligationCause::new(), + true, + ExprIsRead::Yes, + ); self.return_coercion = Some(coerce); } } @@ -1350,103 +1445,6 @@ } } - fn infer_overloadable_binop( - &mut self, - lhs: ExprId, - op: BinaryOp, - rhs: ExprId, - tgt_expr: ExprId, - ) -> Ty<'db> { - let lhs_expectation = Expectation::none(); - let is_read = if matches!(op, BinaryOp::Assignment { .. }) { - ExprIsRead::Yes - } else { - ExprIsRead::No - }; - let lhs_ty = self.infer_expr(lhs, &lhs_expectation, is_read); - let rhs_ty = self.table.next_ty_var(); - - let trait_func = lang_items_for_bin_op(op).and_then(|(name, lang_item)| { - let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?; - let func = trait_id.trait_items(self.db).method_by_name(&name)?; - Some((trait_id, func)) - }); - let func = match trait_func { - Some((_, it)) => it, - None => { - // HACK: `rhs_ty` is a general inference variable with no clue at all at this - // point. Passing `lhs_ty` as both operands just to check if `lhs_ty` is a builtin - // type applicable to `op`. - let ret_ty = if self.is_builtin_binop(lhs_ty, lhs_ty, op) { - // Assume both operands are builtin so we can continue inference. No guarantee - // on the correctness, rustc would complain as necessary lang items don't seem - // to exist anyway. - self.enforce_builtin_binop_types(lhs_ty, rhs_ty, op) - } else { - self.err_ty() - }; - - self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty), ExprIsRead::Yes); - - return ret_ty; - } - }; - - // HACK: We can use this substitution for the function because the function itself doesn't - // have its own generic parameters. - let args = GenericArgs::new_from_iter(self.interner(), [lhs_ty.into(), rhs_ty.into()]); - - self.write_method_resolution(tgt_expr, func, args); - - let method_ty = self.db.value_ty(func.into()).unwrap().instantiate(self.interner(), args); - self.register_obligations_for_call(method_ty); - - self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty), ExprIsRead::Yes); - - let ret_ty = match method_ty.callable_sig(self.interner()) { - Some(sig) => { - let sig = sig.skip_binder(); - let p_left = sig.inputs_and_output.as_slice()[0]; - if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) - && let TyKind::Ref(lt, _, mtbl) = p_left.kind() - { - self.write_expr_adj( - lhs, - Box::new([Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(lt, mtbl)), - target: p_left, - }]), - ); - } - let p_right = sig.inputs_and_output.as_slice()[1]; - if matches!(op, BinaryOp::CmpOp(..)) - && let TyKind::Ref(lt, _, mtbl) = p_right.kind() - { - self.write_expr_adj( - rhs, - Box::new([Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(lt, mtbl)), - target: p_right, - }]), - ); - } - sig.output() - } - None => self.err_ty(), - }; - - let ret_ty = self.process_remote_user_written_ty(ret_ty); - - if self.is_builtin_binop(lhs_ty, rhs_ty, op) { - // use knowledge of built-in binary ops, which can sometimes help inference - let builtin_ret = self.enforce_builtin_binop_types(lhs_ty, rhs_ty, op); - self.unify(builtin_ret, ret_ty); - builtin_ret - } else { - ret_ty - } - } - fn infer_block( &mut self, expr: ExprId, @@ -1548,20 +1546,13 @@ // we don't even make an attempt at coercion this.table.new_maybe_never_var() } else if let Some(t) = expected.only_has_type(&mut this.table) { - let coerce_never = if this - .expr_guaranteed_to_constitute_read_for_never(expr, ExprIsRead::Yes) - { - CoerceNever::Yes - } else { - CoerceNever::No - }; if this .coerce( expr.into(), this.types.unit, t, AllowTwoPhase::No, - coerce_never, + ExprIsRead::Yes, ) .is_err() { @@ -1591,7 +1582,7 @@ name: &Name, ) -> Option<(Ty<'db>, Either<FieldId, TupleFieldId>, Vec<Adjustment<'db>>, bool)> { let interner = self.interner(); - let mut autoderef = self.table.autoderef(receiver_ty); + let mut autoderef = self.table.autoderef_with_tracking(receiver_ty); let mut private_field = None; let res = autoderef.by_ref().find_map(|(derefed_ty, _)| { let (field_id, parameters) = match derefed_ty.kind() { @@ -1640,14 +1631,16 @@ Some(match res { Some((field_id, ty)) => { - let adjustments = autoderef.adjust_steps(); + let adjustments = + self.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok()); let ty = self.process_remote_user_written_ty(ty); (ty, field_id, adjustments, true) } None => { let (field_id, subst) = private_field?; - let adjustments = autoderef.adjust_steps(); + let adjustments = + self.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok()); let ty = self.db.field_types(field_id.parent)[field_id.local_id] .instantiate(self.interner(), subst); let ty = self.process_remote_user_written_ty(ty); @@ -1666,6 +1659,7 @@ ) -> Ty<'db> { // Field projections don't constitute reads. let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none(), ExprIsRead::No); + let receiver_ty = self.table.structurally_resolve_type(receiver_ty); if name.is_missing() { // Bail out early, don't even try to look up field. Also, we don't issue an unresolved @@ -1689,46 +1683,39 @@ None => { // no field found, lets attempt to resolve it like a function so that IDE things // work out while people are typing - let canonicalized_receiver = self.canonicalize(receiver_ty); - let resolved = method_resolution::lookup_method( - &canonicalized_receiver, - &mut self.table, - Self::get_traits_in_scope(&self.resolver, &self.traits_in_scope) - .as_ref() - .left_or_else(|&it| it), - VisibleFromModule::Filter(self.resolver.module()), - name, + let resolved = self.lookup_method_including_private( + receiver_ty, + name.clone(), + None, + receiver, + tgt_expr, ); self.push_diagnostic(InferenceDiagnostic::UnresolvedField { expr: tgt_expr, receiver: receiver_ty, name: name.clone(), - method_with_same_name_exists: resolved.is_some(), + method_with_same_name_exists: resolved.is_ok(), }); match resolved { - Some((adjust, func, _)) => { - let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); - let args = self.substs_for_method_call(tgt_expr, func.into(), None); - self.write_expr_adj(receiver, adjustments.into_boxed_slice()); - self.write_method_resolution(tgt_expr, func, args); - - self.check_method_call( - tgt_expr, - &[], - self.db - .value_ty(func.into()) - .unwrap() - .instantiate(self.interner(), args), - ty, - expected, - ) + Ok((func, _is_visible)) => { + self.check_method_call(tgt_expr, &[], func.sig, receiver_ty, expected) } - None => self.err_ty(), + Err(_) => self.err_ty(), } } } } + fn instantiate_erroneous_method(&mut self, def_id: FunctionId) -> MethodCallee<'db> { + // FIXME: Using fresh infer vars for the method args isn't optimal, + // we can do better by going thorough the full probe/confirm machinery. + let args = self.table.fresh_args_for_item(def_id.into()); + let sig = self.db.callable_item_signature(def_id.into()).instantiate(self.interner(), args); + let sig = + self.infcx().instantiate_binder_with_fresh_vars(BoundRegionConversionTime::FnCall, sig); + MethodCallee { def_id, args, sig } + } + fn infer_call( &mut self, tgt_expr: ExprId, @@ -1737,13 +1724,14 @@ expected: &Expectation<'db>, ) -> Ty<'db> { let callee_ty = self.infer_expr(callee, &Expectation::none(), ExprIsRead::Yes); + let callee_ty = self.table.try_structurally_resolve_type(callee_ty); let interner = self.interner(); - let mut derefs = self.table.autoderef(callee_ty); + let mut derefs = InferenceContextAutoderef::new_from_inference_context(self, callee_ty); let (res, derefed_callee) = loop { let Some((callee_deref_ty, _)) = derefs.next() else { break (None, callee_ty); }; - if let Some(res) = derefs.table.callable_sig(callee_deref_ty, args.len()) { + if let Some(res) = derefs.ctx().table.callable_sig(callee_deref_ty, args.len()) { break (Some(res), callee_deref_ty); } }; @@ -1753,7 +1741,8 @@ || res.is_none(); let (param_tys, ret_ty) = match res { Some((func, params, ret_ty)) => { - let mut adjustments = derefs.adjust_steps(); + let infer_ok = derefs.adjust_steps_as_infer_ok(); + let mut adjustments = self.table.register_infer_ok(infer_ok); if let Some(fn_x) = func { self.write_fn_trait_method_resolution( fn_x, @@ -1819,7 +1808,7 @@ indices_to_skip, is_varargs, ); - self.table.normalize_associated_types_in(ret_ty) + ret_ty } fn infer_method_call( @@ -1834,56 +1823,26 @@ let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none(), ExprIsRead::Yes); let receiver_ty = self.table.try_structurally_resolve_type(receiver_ty); - if matches!(receiver_ty.kind(), TyKind::Error(_) | TyKind::Infer(InferTy::TyVar(_))) { - // Don't probe on error type, or on a fully unresolved infer var. - // FIXME: Emit an error if we're probing on an infer var (type annotations needed). - for &arg in args { - // Make sure we infer and record the arguments. - self.infer_expr_no_expect(arg, ExprIsRead::Yes); - } - return receiver_ty; - } - - let canonicalized_receiver = self.canonicalize(receiver_ty); - - let resolved = method_resolution::lookup_method( - &canonicalized_receiver, - &mut self.table, - Self::get_traits_in_scope(&self.resolver, &self.traits_in_scope) - .as_ref() - .left_or_else(|&it| it), - VisibleFromModule::Filter(self.resolver.module()), - method_name, + let resolved = self.lookup_method_including_private( + receiver_ty, + method_name.clone(), + generic_args, + receiver, + tgt_expr, ); match resolved { - Some((adjust, func, visible)) => { + Ok((func, visible)) => { if !visible { self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id: tgt_expr.into(), - item: func.into(), + item: func.def_id.into(), }) } - - let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); - self.write_expr_adj(receiver, adjustments.into_boxed_slice()); - - let gen_args = self.substs_for_method_call(tgt_expr, func.into(), generic_args); - self.write_method_resolution(tgt_expr, func, gen_args); - let interner = DbInterner::new_with(self.db, None, None); - self.check_method_call( - tgt_expr, - args, - self.db - .value_ty(func.into()) - .expect("we have a function def") - .instantiate(interner, gen_args), - ty, - expected, - ) + self.check_method_call(tgt_expr, args, func.sig, receiver_ty, expected) } // Failed to resolve, report diagnostic and try to resolve as call to field access or // assoc function - None => { + Err(_) => { let field_with_same_name_exists = match self.lookup_field(receiver_ty, method_name) { Some((ty, field_id, adjustments, _public)) => { @@ -1894,65 +1853,65 @@ None => None, }; - let assoc_func_with_same_name = method_resolution::iterate_method_candidates( - &canonicalized_receiver, - &mut self.table, - Self::get_traits_in_scope(&self.resolver, &self.traits_in_scope) - .as_ref() - .left_or_else(|&it| it), - VisibleFromModule::Filter(self.resolver.module()), - Some(method_name), - method_resolution::LookupMode::Path, - |_ty, item, visible| match item { - hir_def::AssocItemId::FunctionId(function_id) if visible => { - Some(function_id) - } - _ => None, - }, - ); + let assoc_func_with_same_name = self.with_method_resolution(|ctx| { + if !matches!( + receiver_ty.kind(), + TyKind::Infer(InferTy::TyVar(_)) | TyKind::Error(_) + ) { + ctx.probe_for_name( + method_resolution::Mode::Path, + method_name.clone(), + receiver_ty, + ) + } else { + Err(MethodError::ErrorReported) + } + }); + let assoc_func_with_same_name = match assoc_func_with_same_name { + Ok(method_resolution::Pick { + item: CandidateId::FunctionId(def_id), .. + }) + | Err(MethodError::PrivateMatch(method_resolution::Pick { + item: CandidateId::FunctionId(def_id), + .. + })) => Some(self.instantiate_erroneous_method(def_id)), + _ => None, + }; self.push_diagnostic(InferenceDiagnostic::UnresolvedMethodCall { expr: tgt_expr, receiver: receiver_ty, name: method_name.clone(), field_with_same_name: field_with_same_name_exists, - assoc_func_with_same_name, + assoc_func_with_same_name: assoc_func_with_same_name.map(|it| it.def_id), }); let recovered = match assoc_func_with_same_name { - Some(f) => { - let args = self.substs_for_method_call(tgt_expr, f.into(), generic_args); - let interner = DbInterner::new_with(self.db, None, None); - let f = self - .db - .value_ty(f.into()) - .expect("we have a function def") - .instantiate(interner, args); - let sig = f.callable_sig(self.interner()).expect("we have a function def"); - Some((f, sig, true)) - } + Some(it) => Some(( + Ty::new_fn_def( + self.interner(), + CallableDefId::FunctionId(it.def_id).into(), + it.args, + ), + it.sig, + true, + )), None => field_with_same_name_exists.and_then(|field_ty| { let callable_sig = field_ty.callable_sig(self.interner())?; - Some((field_ty, callable_sig, false)) + Some((field_ty, callable_sig.skip_binder(), false)) }), }; match recovered { - Some((callee_ty, sig, strip_first)) => { - let sig = sig.skip_binder(); - self.check_call( - tgt_expr, - args, - callee_ty, - sig.inputs_and_output - .inputs() - .get(strip_first as usize..) - .unwrap_or(&[]), - sig.output(), - &[], - true, - expected, - ) - } + Some((callee_ty, sig, strip_first)) => self.check_call( + tgt_expr, + args, + callee_ty, + sig.inputs_and_output.inputs().get(strip_first as usize..).unwrap_or(&[]), + sig.output(), + &[], + true, + expected, + ), None => { for &arg in args.iter() { self.infer_expr_no_expect(arg, ExprIsRead::Yes); @@ -1968,38 +1927,20 @@ &mut self, tgt_expr: ExprId, args: &[ExprId], - method_ty: Ty<'db>, + sig: FnSig<'db>, receiver_ty: Ty<'db>, expected: &Expectation<'db>, ) -> Ty<'db> { - self.register_obligations_for_call(method_ty); - let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) = - match method_ty.callable_sig(self.interner()) { - Some(sig) => { - let sig = sig.skip_binder(); - ( - if !sig.inputs_and_output.inputs().is_empty() { - ( - sig.inputs_and_output.as_slice()[0], - sig.inputs_and_output.inputs()[1..].to_vec(), - ) - } else { - (self.types.error, Vec::new()) - }, - sig.output(), - sig.c_variadic, - ) - } - None => { - let formal_receiver_ty = self.table.next_ty_var(); - let ret_ty = self.table.next_ty_var(); - ((formal_receiver_ty, Vec::new()), ret_ty, true) - } - }; + let (formal_receiver_ty, param_tys) = if !sig.inputs_and_output.inputs().is_empty() { + (sig.inputs_and_output.as_slice()[0], &sig.inputs_and_output.inputs()[1..]) + } else { + (self.types.error, &[] as _) + }; + let ret_ty = sig.output(); self.table.unify(formal_receiver_ty, receiver_ty); - self.check_call_arguments(tgt_expr, ¶m_tys, ret_ty, expected, args, &[], is_varargs); - self.table.normalize_associated_types_in(ret_ty) + self.check_call_arguments(tgt_expr, param_tys, ret_ty, expected, args, &[], sig.c_variadic); + ret_ty } /// Generic function that factors out common logic from function calls, @@ -2110,20 +2051,13 @@ // fulfillment error to be more accurate. let coerced_ty = this.table.resolve_vars_with_obligations(coerced_ty); - let coerce_never = if this - .expr_guaranteed_to_constitute_read_for_never(provided_arg, ExprIsRead::Yes) - { - CoerceNever::Yes - } else { - CoerceNever::No - }; let coerce_error = this .coerce( provided_arg.into(), checked_ty, coerced_ty, AllowTwoPhase::Yes, - coerce_never, + ExprIsRead::Yes, ) .err(); if coerce_error.is_some() { @@ -2204,144 +2138,19 @@ if !args_count_matches {} } - fn substs_for_method_call( - &mut self, - expr: ExprId, - def: GenericDefId, - generic_args: Option<&HirGenericArgs>, - ) -> GenericArgs<'db> { - struct LowererCtx<'a, 'b, 'db> { - ctx: &'a mut InferenceContext<'b, 'db>, - expr: ExprId, - } - - impl<'db> GenericArgsLowerer<'db> for LowererCtx<'_, '_, 'db> { - fn report_len_mismatch( - &mut self, - def: GenericDefId, - provided_count: u32, - expected_count: u32, - kind: IncorrectGenericsLenKind, - ) { - self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsLen { - expr: self.expr, - provided_count, - expected_count, - kind, - def, - }); - } - - fn report_arg_mismatch( - &mut self, - param_id: GenericParamId, - arg_idx: u32, - has_self_arg: bool, - ) { - self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsOrder { - expr: self.expr, - param_id, - arg_idx, - has_self_arg, - }); - } - - fn provided_kind( - &mut self, - param_id: GenericParamId, - param: GenericParamDataRef<'_>, - arg: &HirGenericArg, - ) -> GenericArg<'db> { - match (param, arg) { - ( - GenericParamDataRef::LifetimeParamData(_), - HirGenericArg::Lifetime(lifetime), - ) => self.ctx.make_body_lifetime(*lifetime).into(), - (GenericParamDataRef::TypeParamData(_), HirGenericArg::Type(type_ref)) => { - self.ctx.make_body_ty(*type_ref).into() - } - (GenericParamDataRef::ConstParamData(_), HirGenericArg::Const(konst)) => { - let GenericParamId::ConstParamId(const_id) = param_id else { - unreachable!("non-const param ID for const param"); - }; - let const_ty = self.ctx.db.const_param_ty_ns(const_id); - self.ctx.make_body_const(*konst, const_ty).into() - } - _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"), - } - } - - fn provided_type_like_const( - &mut self, - const_ty: Ty<'db>, - arg: TypeLikeConst<'_>, - ) -> Const<'db> { - match arg { - TypeLikeConst::Path(path) => self.ctx.make_path_as_body_const(path, const_ty), - TypeLikeConst::Infer => self.ctx.table.next_const_var(), - } - } - - fn inferred_kind( - &mut self, - _def: GenericDefId, - param_id: GenericParamId, - _param: GenericParamDataRef<'_>, - _infer_args: bool, - _preceding_args: &[GenericArg<'db>], - ) -> GenericArg<'db> { - // Always create an inference var, even when `infer_args == false`. This helps with diagnostics, - // and I think it's also required in the presence of `impl Trait` (that must be inferred). - self.ctx.table.next_var_for_param(param_id) - } - - fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db> { - self.ctx.table.next_var_for_param(param_id) - } - - fn report_elided_lifetimes_in_path( - &mut self, - _def: GenericDefId, - _expected_count: u32, - _hard_error: bool, - ) { - unreachable!("we set `LifetimeElisionKind::Infer`") - } - - fn report_elision_failure(&mut self, _def: GenericDefId, _expected_count: u32) { - unreachable!("we set `LifetimeElisionKind::Infer`") - } - - fn report_missing_lifetime(&mut self, _def: GenericDefId, _expected_count: u32) { - unreachable!("we set `LifetimeElisionKind::Infer`") - } - } - - substs_from_args_and_bindings( - self.db, - self.body, - generic_args, - def, - true, - LifetimeElisionKind::Infer, - false, - None, - &mut LowererCtx { ctx: self, expr }, - ) - } - fn register_obligations_for_call(&mut self, callable_ty: Ty<'db>) { let callable_ty = self.table.try_structurally_resolve_type(callable_ty); if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind() { - let generic_predicates = - self.db.generic_predicates(GenericDefId::from_callable(self.db, fn_def.0)); - if let Some(predicates) = generic_predicates.instantiate(self.interner(), parameters) { - let interner = self.interner(); - let param_env = self.table.trait_env.env; - self.table.register_predicates(predicates.map(|predicate| { - Obligation::new(interner, ObligationCause::new(), param_env, predicate) - })); - } + let generic_predicates = GenericPredicates::query_all( + self.db, + GenericDefId::from_callable(self.db, fn_def.0), + ); + let param_env = self.table.trait_env.env; + self.table.register_predicates(clauses_as_obligations( + generic_predicates.iter_instantiated_copied(self.interner(), parameters.as_slice()), + ObligationCause::new(), + param_env, + )); // add obligation for trait implementation, if this is a trait method match fn_def.0 { CallableDefId::FunctionId(f) => { @@ -2410,122 +2219,6 @@ indices } - /// Dereferences a single level of immutable referencing. - fn deref_ty_if_possible(&mut self, ty: Ty<'db>) -> Ty<'db> { - let ty = self.table.try_structurally_resolve_type(ty); - match ty.kind() { - TyKind::Ref(_, inner, Mutability::Not) => { - self.table.try_structurally_resolve_type(inner) - } - _ => ty, - } - } - - /// Enforces expectations on lhs type and rhs type depending on the operator and returns the - /// output type of the binary op. - fn enforce_builtin_binop_types(&mut self, lhs: Ty<'db>, rhs: Ty<'db>, op: BinaryOp) -> Ty<'db> { - // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work (See rust-lang/rust#57447). - let lhs = self.deref_ty_if_possible(lhs); - let rhs = self.deref_ty_if_possible(rhs); - - let (op, is_assign) = match op { - BinaryOp::Assignment { op: Some(inner) } => (BinaryOp::ArithOp(inner), true), - _ => (op, false), - }; - - let output_ty = match op { - BinaryOp::LogicOp(_) => { - let bool_ = self.types.bool; - self.unify(lhs, bool_); - self.unify(rhs, bool_); - bool_ - } - - BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => { - // result type is same as LHS always - lhs - } - - BinaryOp::ArithOp(_) => { - // LHS, RHS, and result will have the same type - self.unify(lhs, rhs); - lhs - } - - BinaryOp::CmpOp(_) => { - // LHS and RHS will have the same type - self.unify(lhs, rhs); - self.types.bool - } - - BinaryOp::Assignment { op: None } => { - stdx::never!("Simple assignment operator is not binary op."); - lhs - } - - BinaryOp::Assignment { .. } => unreachable!("handled above"), - }; - - if is_assign { self.types.unit } else { output_ty } - } - - fn is_builtin_binop(&mut self, lhs: Ty<'db>, rhs: Ty<'db>, op: BinaryOp) -> bool { - // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work (See rust-lang/rust#57447). - let lhs = self.deref_ty_if_possible(lhs); - let rhs = self.deref_ty_if_possible(rhs); - - let op = match op { - BinaryOp::Assignment { op: Some(inner) } => BinaryOp::ArithOp(inner), - _ => op, - }; - - match op { - BinaryOp::LogicOp(_) => true, - - BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => { - lhs.is_integral() && rhs.is_integral() - } - - BinaryOp::ArithOp( - ArithOp::Add | ArithOp::Sub | ArithOp::Mul | ArithOp::Div | ArithOp::Rem, - ) => { - lhs.is_integral() && rhs.is_integral() - || lhs.is_floating_point() && rhs.is_floating_point() - } - - BinaryOp::ArithOp(ArithOp::BitAnd | ArithOp::BitOr | ArithOp::BitXor) => { - lhs.is_integral() && rhs.is_integral() - || lhs.is_floating_point() && rhs.is_floating_point() - || matches!((lhs.kind(), rhs.kind()), (TyKind::Bool, TyKind::Bool)) - } - - BinaryOp::CmpOp(_) => { - let is_scalar = |kind| { - matches!( - kind, - TyKind::Bool - | TyKind::Char - | TyKind::Int(_) - | TyKind::Uint(_) - | TyKind::Float(_) - | TyKind::FnDef(..) - | TyKind::FnPtr(..) - | TyKind::RawPtr(..) - | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) - ) - }; - is_scalar(lhs.kind()) && is_scalar(rhs.kind()) - } - - BinaryOp::Assignment { op: None } => { - stdx::never!("Simple assignment operator is not binary op."); - false - } - - BinaryOp::Assignment { .. } => unreachable!("handled above"), - } - } - pub(super) fn with_breakable_ctx<T>( &mut self, kind: BreakableKind,
diff --git a/crates/hir-ty/src/infer/fallback.rs b/crates/hir-ty/src/infer/fallback.rs index b1c9146..d0ce8cb 100644 --- a/crates/hir-ty/src/infer/fallback.rs +++ b/crates/hir-ty/src/infer/fallback.rs
@@ -160,7 +160,7 @@ }; debug!("fallback_if_possible(ty={:?}): defaulting to `{:?}`", ty, fallback); - self.demand_eqtype(ty, fallback); + _ = self.demand_eqtype_fixme_no_diag(ty, fallback); true }
diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index 71a9c94..a257547 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs
@@ -1,25 +1,15 @@ //! Finds if an expression is an immutable context or a mutable context, which is used in selecting //! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar. -use hir_def::{ - hir::{ - Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement, - UnaryOp, - }, - lang_item::LangItem, +use hir_def::hir::{ + Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement, UnaryOp, }; -use hir_expand::name::Name; -use intern::sym; use rustc_ast_ir::Mutability; -use rustc_type_ir::inherent::IntoKind; -use crate::next_solver::infer::traits::{Obligation, ObligationCause}; -use crate::next_solver::{GenericArgs, TraitRef}; use crate::{ - Adjust, Adjustment, AutoBorrow, OverloadedDeref, - infer::{Expectation, InferenceContext, expr::ExprIsRead}, + Adjust, AutoBorrow, OverloadedDeref, + infer::{InferenceContext, place_op::PlaceOp}, lower::lower_mutability, - next_solver::TyKind, }; impl<'db> InferenceContext<'_, 'db> { @@ -28,13 +18,33 @@ } fn infer_mut_expr(&mut self, tgt_expr: ExprId, mut mutability: Mutability) { + let krate = self.krate(); if let Some(adjustments) = self.result.expr_adjustments.get_mut(&tgt_expr) { - for adj in adjustments.iter_mut().rev() { + let mut adjustments = adjustments.iter_mut().rev().peekable(); + while let Some(adj) = adjustments.next() { match &mut adj.kind { Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => (), - Adjust::Deref(Some(d)) => *d = OverloadedDeref(Some(mutability)), + Adjust::Deref(Some(d)) => { + if mutability == Mutability::Mut { + let source_ty = match adjustments.peek() { + Some(prev_adj) => prev_adj.target, + None => self.result.type_of_expr[tgt_expr], + }; + if let Some(infer_ok) = Self::try_mutable_overloaded_place_op( + &self.table, + krate, + source_ty, + None, + PlaceOp::Deref, + ) { + self.table.register_predicates(infer_ok.obligations); + } + *d = OverloadedDeref(Some(mutability)); + } + } Adjust::Borrow(b) => match b { - AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m) => mutability = *m, + AutoBorrow::Ref(m) => mutability = (*m).into(), + AutoBorrow::RawPtr(m) => mutability = *m, }, } } @@ -128,75 +138,15 @@ self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } &Expr::Index { base, index } => { - if mutability == Mutability::Mut - && let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) - && let Some(index_trait) = - LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate) - && let Some(index_fn) = index_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::index_mut)) - { - *f = index_fn; - let mut base_ty = None; - let base_adjustments = - self.result.expr_adjustments.get_mut(&base).and_then(|it| it.last_mut()); - if let Some(Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)), - target, - }) = base_adjustments - { - if let TyKind::Ref(_, ty, _) = target.kind() { - base_ty = Some(ty); - } - *mutability = Mutability::Mut; - } - - // Apply `IndexMut` obligation for non-assignee expr - if let Some(base_ty) = base_ty { - let index_ty = if let Some(ty) = self.result.type_of_expr.get(index) { - *ty - } else { - self.infer_expr(index, &Expectation::none(), ExprIsRead::Yes) - }; - let trait_ref = TraitRef::new( - self.interner(), - index_trait.into(), - GenericArgs::new_from_iter( - self.interner(), - [base_ty.into(), index_ty.into()], - ), - ); - self.table.register_predicate(Obligation::new( - self.interner(), - ObligationCause::new(), - self.table.trait_env.env, - trait_ref, - )); - } + if mutability == Mutability::Mut { + self.convert_place_op_to_mutable(PlaceOp::Index, tgt_expr, base, Some(index)); } self.infer_mut_expr(base, mutability); self.infer_mut_expr(index, Mutability::Not); } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { - let mut mutability = mutability; - if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) - && mutability == Mutability::Mut - && let Some(deref_trait) = - LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate) - { - let ty = self.result.type_of_expr.get(*expr); - let is_mut_ptr = ty.is_some_and(|ty| { - let ty = self.table.shallow_resolve(*ty); - matches!(ty.kind(), TyKind::RawPtr(_, Mutability::Mut)) - }); - if is_mut_ptr { - mutability = Mutability::Not; - } else if let Some(deref_fn) = deref_trait - .trait_items(self.db) - .method_by_name(&Name::new_symbol_root(sym::deref_mut)) - { - *f = deref_fn; - } + if mutability == Mutability::Mut { + self.convert_place_op_to_mutable(PlaceOp::Deref, tgt_expr, *expr, None); } self.infer_mut_expr(*expr, mutability); }
diff --git a/crates/hir-ty/src/infer/op.rs b/crates/hir-ty/src/infer/op.rs new file mode 100644 index 0000000..88319a8 --- /dev/null +++ b/crates/hir-ty/src/infer/op.rs
@@ -0,0 +1,468 @@ +//! Inference of binary and unary operators. + +use std::collections::hash_map; + +use hir_def::{GenericParamId, TraitId, hir::ExprId, lang_item::LangItem}; +use intern::{Symbol, sym}; +use rustc_ast_ir::Mutability; +use rustc_type_ir::inherent::{IntoKind, Ty as _}; +use syntax::ast::{ArithOp, BinaryOp, UnaryOp}; +use tracing::debug; + +use crate::{ + Adjust, Adjustment, AutoBorrow, + infer::{AllowTwoPhase, AutoBorrowMutability, Expectation, InferenceContext, expr::ExprIsRead}, + method_resolution::{MethodCallee, TreatNotYetDefinedOpaques}, + next_solver::{ + GenericArgs, TraitRef, Ty, TyKind, + fulfill::NextSolverError, + infer::traits::{Obligation, ObligationCause}, + obligation_ctxt::ObligationCtxt, + }, +}; + +impl<'a, 'db> InferenceContext<'a, 'db> { + /// Checks a `a <op>= b` + pub(crate) fn infer_assign_op_expr( + &mut self, + expr: ExprId, + op: ArithOp, + lhs: ExprId, + rhs: ExprId, + ) -> Ty<'db> { + let (lhs_ty, rhs_ty, return_ty) = + self.infer_overloaded_binop(expr, lhs, rhs, BinaryOp::Assignment { op: Some(op) }); + + let category = BinOpCategory::from(op); + let ty = if !lhs_ty.is_ty_var() + && !rhs_ty.is_ty_var() + && is_builtin_binop(lhs_ty, rhs_ty, category) + { + self.enforce_builtin_binop_types(lhs_ty, rhs_ty, category); + self.types.unit + } else { + return_ty + }; + + self.check_lhs_assignable(lhs); + + ty + } + + /// Checks a potentially overloaded binary operator. + pub(crate) fn infer_binop_expr( + &mut self, + expr: ExprId, + op: BinaryOp, + lhs_expr: ExprId, + rhs_expr: ExprId, + ) -> Ty<'db> { + debug!( + "check_binop(expr.hir_id={:?}, expr={:?}, op={:?}, lhs_expr={:?}, rhs_expr={:?})", + expr, expr, op, lhs_expr, rhs_expr + ); + + match op { + BinaryOp::LogicOp(_) => { + // && and || are a simple case. + self.infer_expr_coerce( + lhs_expr, + &Expectation::HasType(self.types.bool), + ExprIsRead::Yes, + ); + let lhs_diverges = self.diverges; + self.infer_expr_coerce( + rhs_expr, + &Expectation::HasType(self.types.bool), + ExprIsRead::Yes, + ); + + // Depending on the LHS' value, the RHS can never execute. + self.diverges = lhs_diverges; + + self.types.bool + } + _ => { + // Otherwise, we always treat operators as if they are + // overloaded. This is the way to be most flexible w/r/t + // types that get inferred. + let (lhs_ty, rhs_ty, return_ty) = + self.infer_overloaded_binop(expr, lhs_expr, rhs_expr, op); + + // Supply type inference hints if relevant. Probably these + // hints should be enforced during select as part of the + // `consider_unification_despite_ambiguity` routine, but this + // more convenient for now. + // + // The basic idea is to help type inference by taking + // advantage of things we know about how the impls for + // scalar types are arranged. This is important in a + // scenario like `1_u32 << 2`, because it lets us quickly + // deduce that the result type should be `u32`, even + // though we don't know yet what type 2 has and hence + // can't pin this down to a specific impl. + let category = BinOpCategory::from(op); + if !lhs_ty.is_ty_var() + && !rhs_ty.is_ty_var() + && is_builtin_binop(lhs_ty, rhs_ty, category) + { + let builtin_return_ty = + self.enforce_builtin_binop_types(lhs_ty, rhs_ty, category); + _ = self.demand_eqtype(expr.into(), builtin_return_ty, return_ty); + builtin_return_ty + } else { + return_ty + } + } + } + } + + fn enforce_builtin_binop_types( + &mut self, + lhs_ty: Ty<'db>, + rhs_ty: Ty<'db>, + category: BinOpCategory, + ) -> Ty<'db> { + debug_assert!(is_builtin_binop(lhs_ty, rhs_ty, category)); + + // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work. + // (See https://github.com/rust-lang/rust/issues/57447.) + let (lhs_ty, rhs_ty) = (deref_ty_if_possible(lhs_ty), deref_ty_if_possible(rhs_ty)); + + match category { + BinOpCategory::Shortcircuit => { + self.demand_suptype(self.types.bool, lhs_ty); + self.demand_suptype(self.types.bool, rhs_ty); + self.types.bool + } + + BinOpCategory::Shift => { + // result type is same as LHS always + lhs_ty + } + + BinOpCategory::Math | BinOpCategory::Bitwise => { + // both LHS and RHS and result will have the same type + self.demand_suptype(lhs_ty, rhs_ty); + lhs_ty + } + + BinOpCategory::Comparison => { + // both LHS and RHS and result will have the same type + self.demand_suptype(lhs_ty, rhs_ty); + self.types.bool + } + } + } + + fn infer_overloaded_binop( + &mut self, + expr: ExprId, + lhs_expr: ExprId, + rhs_expr: ExprId, + op: BinaryOp, + ) -> (Ty<'db>, Ty<'db>, Ty<'db>) { + debug!("infer_overloaded_binop(expr.hir_id={:?}, op={:?})", expr, op); + + let lhs_ty = match op { + BinaryOp::Assignment { .. } => { + // rust-lang/rust#52126: We have to use strict + // equivalence on the LHS of an assign-op like `+=`; + // overwritten or mutably-borrowed places cannot be + // coerced to a supertype. + self.infer_expr_no_expect(lhs_expr, ExprIsRead::Yes) + } + _ => { + // Find a suitable supertype of the LHS expression's type, by coercing to + // a type variable, to pass as the `Self` to the trait, avoiding invariant + // trait matching creating lifetime constraints that are too strict. + // e.g., adding `&'a T` and `&'b T`, given `&'x T: Add<&'x T>`, will result + // in `&'a T <: &'x T` and `&'b T <: &'x T`, instead of `'a = 'b = 'x`. + let lhs_ty = self.infer_expr_no_expect(lhs_expr, ExprIsRead::No); + let fresh_var = self.table.next_ty_var(); + self.demand_coerce(lhs_expr, lhs_ty, fresh_var, AllowTwoPhase::No, ExprIsRead::No) + } + }; + let lhs_ty = self.table.resolve_vars_with_obligations(lhs_ty); + + // N.B., as we have not yet type-checked the RHS, we don't have the + // type at hand. Make a variable to represent it. The whole reason + // for this indirection is so that, below, we can check the expr + // using this variable as the expected type, which sometimes lets + // us do better coercions than we would be able to do otherwise, + // particularly for things like `String + &String`. + let rhs_ty_var = self.table.next_ty_var(); + let result = self.lookup_op_method( + lhs_ty, + Some((rhs_expr, rhs_ty_var)), + self.lang_item_for_bin_op(op), + ); + + // see `NB` above + let rhs_ty = + self.infer_expr_coerce(rhs_expr, &Expectation::HasType(rhs_ty_var), ExprIsRead::No); + let rhs_ty = self.table.resolve_vars_with_obligations(rhs_ty); + + let return_ty = match result { + Ok(method) => { + let by_ref_binop = !is_op_by_value(op); + if (matches!(op, BinaryOp::Assignment { .. }) || by_ref_binop) + && let TyKind::Ref(_, _, mutbl) = + method.sig.inputs_and_output.inputs()[0].kind() + { + let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes); + let autoref = Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), + target: method.sig.inputs_and_output.inputs()[0], + }; + self.write_expr_adj(lhs_expr, Box::new([autoref])); + } + if by_ref_binop + && let TyKind::Ref(_, _, mutbl) = + method.sig.inputs_and_output.inputs()[1].kind() + { + // Allow two-phase borrows for binops in initial deployment + // since they desugar to methods + let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes); + + let autoref = Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), + target: method.sig.inputs_and_output.inputs()[1], + }; + // HACK(eddyb) Bypass checks due to reborrows being in + // some cases applied on the RHS, on top of which we need + // to autoref, which is not allowed by write_expr_adj. + // self.write_expr_adj(rhs_expr, Box::new([autoref])); + match self.result.expr_adjustments.entry(rhs_expr) { + hash_map::Entry::Occupied(mut entry) => { + let mut adjustments = Vec::from(std::mem::take(entry.get_mut())); + adjustments.reserve_exact(1); + adjustments.push(autoref); + entry.insert(adjustments.into_boxed_slice()); + } + hash_map::Entry::Vacant(entry) => { + entry.insert(Box::new([autoref])); + } + }; + } + self.write_method_resolution(expr, method.def_id, method.args); + + method.sig.output() + } + Err(_errors) => { + // FIXME: Report diagnostic. + self.types.error + } + }; + + (lhs_ty, rhs_ty, return_ty) + } + + pub(crate) fn infer_user_unop( + &mut self, + ex: ExprId, + operand_ty: Ty<'db>, + op: UnaryOp, + ) -> Ty<'db> { + match self.lookup_op_method(operand_ty, None, self.lang_item_for_unop(op)) { + Ok(method) => { + self.write_method_resolution(ex, method.def_id, method.args); + method.sig.output() + } + Err(_errors) => { + // FIXME: Report diagnostic. + self.types.error + } + } + } + + fn lookup_op_method( + &mut self, + lhs_ty: Ty<'db>, + opt_rhs: Option<(ExprId, Ty<'db>)>, + (opname, trait_did): (Symbol, Option<TraitId>), + ) -> Result<MethodCallee<'db>, Vec<NextSolverError<'db>>> { + let Some(trait_did) = trait_did else { + // Bail if the operator trait is not defined. + return Err(vec![]); + }; + + debug!( + "lookup_op_method(lhs_ty={:?}, opname={:?}, trait_did={:?})", + lhs_ty, opname, trait_did + ); + + let opt_rhs_ty = opt_rhs.map(|it| it.1); + let cause = ObligationCause::new(); + + // We don't consider any other candidates if this lookup fails + // so we can freely treat opaque types as inference variables here + // to allow more code to compile. + let treat_opaques = TreatNotYetDefinedOpaques::AsInfer; + let method = self.table.lookup_method_for_operator( + cause.clone(), + opname, + trait_did, + lhs_ty, + opt_rhs_ty, + treat_opaques, + ); + match method { + Some(ok) => { + let method = self.table.register_infer_ok(ok); + self.table.select_obligations_where_possible(); + Ok(method) + } + None => { + // Guide inference for the RHS expression if it's provided -- + // this will allow us to better error reporting, at the expense + // of making some error messages a bit more specific. + if let Some((rhs_expr, rhs_ty)) = opt_rhs + && rhs_ty.is_ty_var() + { + self.infer_expr_coerce(rhs_expr, &Expectation::HasType(rhs_ty), ExprIsRead::No); + } + + // Construct an obligation `self_ty : Trait<input_tys>` + let args = GenericArgs::for_item( + self.interner(), + trait_did.into(), + |param_idx, param_id, _| match param_id { + GenericParamId::LifetimeParamId(_) | GenericParamId::ConstParamId(_) => { + unreachable!("did not expect operand trait to have lifetime/const args") + } + GenericParamId::TypeParamId(_) => { + if param_idx == 0 { + lhs_ty.into() + } else { + opt_rhs_ty.expect("expected RHS for binop").into() + } + } + }, + ); + let obligation = Obligation::new( + self.interner(), + cause, + self.table.trait_env.env, + TraitRef::new_from_args(self.interner(), trait_did.into(), args), + ); + let mut ocx = ObligationCtxt::new(self.infcx()); + ocx.register_obligation(obligation); + Err(ocx.evaluate_obligations_error_on_ambiguity()) + } + } + } + + fn lang_item_for_bin_op(&self, op: BinaryOp) -> (Symbol, Option<TraitId>) { + let (method_name, trait_lang_item) = + crate::lang_items::lang_items_for_bin_op(op).expect("invalid operator provided"); + (method_name, trait_lang_item.resolve_trait(self.db, self.krate())) + } + + fn lang_item_for_unop(&self, op: UnaryOp) -> (Symbol, Option<TraitId>) { + let (method_name, trait_lang_item) = match op { + UnaryOp::Not => (sym::not, LangItem::Not), + UnaryOp::Neg => (sym::neg, LangItem::Neg), + UnaryOp::Deref => panic!("Deref is not overloadable"), + }; + (method_name, trait_lang_item.resolve_trait(self.db, self.krate())) + } +} + +// Binary operator categories. These categories summarize the behavior +// with respect to the builtin operations supported. +#[derive(Clone, Copy)] +enum BinOpCategory { + /// &&, || -- cannot be overridden + Shortcircuit, + + /// <<, >> -- when shifting a single integer, rhs can be any + /// integer type. For simd, types must match. + Shift, + + /// +, -, etc -- takes equal types, produces same type as input, + /// applicable to ints/floats/simd + Math, + + /// &, |, ^ -- takes equal types, produces same type as input, + /// applicable to ints/floats/simd/bool + Bitwise, + + /// ==, !=, etc -- takes equal types, produces bools, except for simd, + /// which produce the input type + Comparison, +} + +impl From<BinaryOp> for BinOpCategory { + fn from(op: BinaryOp) -> BinOpCategory { + match op { + BinaryOp::LogicOp(_) => BinOpCategory::Shortcircuit, + BinaryOp::ArithOp(op) | BinaryOp::Assignment { op: Some(op) } => op.into(), + BinaryOp::CmpOp(_) => BinOpCategory::Comparison, + BinaryOp::Assignment { op: None } => unreachable!( + "assignment is lowered into `Expr::Assignment`, not into `Expr::BinaryOp`" + ), + } + } +} + +impl From<ArithOp> for BinOpCategory { + fn from(op: ArithOp) -> BinOpCategory { + use ArithOp::*; + match op { + Shl | Shr => BinOpCategory::Shift, + Add | Sub | Mul | Div | Rem => BinOpCategory::Math, + BitXor | BitAnd | BitOr => BinOpCategory::Bitwise, + } + } +} + +/// Returns `true` if the binary operator takes its arguments by value. +fn is_op_by_value(op: BinaryOp) -> bool { + !matches!(op, BinaryOp::CmpOp(_)) +} + +/// Dereferences a single level of immutable referencing. +fn deref_ty_if_possible(ty: Ty<'_>) -> Ty<'_> { + match ty.kind() { + TyKind::Ref(_, ty, Mutability::Not) => ty, + _ => ty, + } +} + +/// Returns `true` if this is a built-in arithmetic operation (e.g., +/// u32 + u32, i16x4 == i16x4) and false if these types would have to be +/// overloaded to be legal. There are two reasons that we distinguish +/// builtin operations from overloaded ones (vs trying to drive +/// everything uniformly through the trait system and intrinsics or +/// something like that): +/// +/// 1. Builtin operations can trivially be evaluated in constants. +/// 2. For comparison operators applied to SIMD types the result is +/// not of type `bool`. For example, `i16x4 == i16x4` yields a +/// type like `i16x4`. This means that the overloaded trait +/// `PartialEq` is not applicable. +/// +/// Reason #2 is the killer. I tried for a while to always use +/// overloaded logic and just check the types in constants/codegen after +/// the fact, and it worked fine, except for SIMD types. -nmatsakis +fn is_builtin_binop<'db>(lhs: Ty<'db>, rhs: Ty<'db>, category: BinOpCategory) -> bool { + // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work. + // (See https://github.com/rust-lang/rust/issues/57447.) + let (lhs, rhs) = (deref_ty_if_possible(lhs), deref_ty_if_possible(rhs)); + + match category { + BinOpCategory::Shortcircuit => true, + BinOpCategory::Shift => lhs.is_integral() && rhs.is_integral(), + BinOpCategory::Math => { + lhs.is_integral() && rhs.is_integral() + || lhs.is_floating_point() && rhs.is_floating_point() + } + BinOpCategory::Bitwise => { + lhs.is_integral() && rhs.is_integral() + || lhs.is_floating_point() && rhs.is_floating_point() + || lhs.is_bool() && rhs.is_bool() + } + BinOpCategory::Comparison => lhs.is_scalar() && rhs.is_scalar(), + } +}
diff --git a/crates/hir-ty/src/infer/opaques.rs b/crates/hir-ty/src/infer/opaques.rs index f7719f5..ba4b53a 100644 --- a/crates/hir-ty/src/infer/opaques.rs +++ b/crates/hir-ty/src/infer/opaques.rs
@@ -109,7 +109,7 @@ let expected = EarlyBinder::bind(ty.ty).instantiate(interner, opaque_type_key.args); - self.demand_eqtype(expected, hidden_type.ty); + _ = self.demand_eqtype_fixme_no_diag(expected, hidden_type.ty); } self.result.type_of_opaque.insert(def_id, ty.ty);
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs index 8019844..d8b02de 100644 --- a/crates/hir-ty/src/infer/pat.rs +++ b/crates/hir-ty/src/infer/pat.rs
@@ -1,6 +1,6 @@ //! Type inference for patterns. -use std::iter::repeat_with; +use std::{cmp, iter}; use hir_def::{ HasModule, @@ -11,16 +11,16 @@ use rustc_ast_ir::Mutability; use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, SliceLike, Ty as _}; use stdx::TupleExt; +use syntax::ast::RangeOp; use crate::{ DeclContext, DeclOrigin, InferenceDiagnostic, consteval::{self, try_const_usize, usize_const}, infer::{ - AllowTwoPhase, BindingMode, Expectation, InferenceContext, TypeMismatch, - coerce::CoerceNever, expr::ExprIsRead, + AllowTwoPhase, BindingMode, Expectation, InferenceContext, TypeMismatch, expr::ExprIsRead, }, lower::lower_mutability, - next_solver::{GenericArgs, Ty, TyKind}, + next_solver::{GenericArgs, Ty, TyKind, Tys, infer::traits::ObligationCause}, }; impl<'db> InferenceContext<'_, 'db> { @@ -184,42 +184,61 @@ /// Ellipses found in the original pattern or expression must be filtered out. pub(super) fn infer_tuple_pat_like( &mut self, + pat: PatId, expected: Ty<'db>, default_bm: BindingMode, ellipsis: Option<u32>, - subs: &[PatId], + elements: &[PatId], decl: Option<DeclContext>, ) -> Ty<'db> { - let expected = self.table.structurally_resolve_type(expected); - let expectations = match expected.kind() { - TyKind::Tuple(parameters) => parameters, - _ => self.types.empty_tys, - }; - - let ((pre, post), n_uncovered_patterns) = match ellipsis { - Some(idx) => { - (subs.split_at(idx as usize), expectations.len().saturating_sub(subs.len())) + let mut expected_len = elements.len(); + if ellipsis.is_some() { + // Require known type only when `..` is present. + if let TyKind::Tuple(tys) = self.table.structurally_resolve_type(expected).kind() { + expected_len = tys.len(); } - None => ((subs, &[][..]), 0), + } + let max_len = cmp::max(expected_len, elements.len()); + + let element_tys_iter = (0..max_len).map(|_| self.table.next_ty_var()); + let element_tys = Tys::new_from_iter(self.interner(), element_tys_iter); + let pat_ty = Ty::new(self.interner(), TyKind::Tuple(element_tys)); + if self.demand_eqtype(pat.into(), expected, pat_ty).is_err() + && let TyKind::Tuple(expected) = expected.kind() + { + // Equate expected type with the infer vars, for better diagnostics. + for (expected, elem_ty) in iter::zip(expected, element_tys) { + _ = self + .table + .at(&ObligationCause::dummy()) + .eq(expected, elem_ty) + .map(|infer_ok| self.table.register_infer_ok(infer_ok)); + } + } + let (before_ellipsis, after_ellipsis) = match ellipsis { + Some(ellipsis) => { + let element_tys = element_tys.as_slice(); + // Don't check patterns twice. + let from_end_start = cmp::max( + element_tys.len().saturating_sub(elements.len() - ellipsis as usize), + ellipsis as usize, + ); + ( + element_tys.get(..ellipsis as usize).unwrap_or(element_tys), + element_tys.get(from_end_start..).unwrap_or_default(), + ) + } + None => (element_tys.as_slice(), &[][..]), }; - let mut expectations_iter = - expectations.iter().chain(repeat_with(|| self.table.next_ty_var())); - - let mut inner_tys = Vec::with_capacity(n_uncovered_patterns + subs.len()); - - inner_tys.extend(expectations_iter.by_ref().take(n_uncovered_patterns + subs.len())); - - // Process pre - for (ty, pat) in inner_tys.iter_mut().zip(pre) { - *ty = self.infer_pat(*pat, *ty, default_bm, decl); + for (&elem, &elem_ty) in iter::zip(elements, before_ellipsis.iter().chain(after_ellipsis)) { + self.infer_pat(elem, elem_ty, default_bm, decl); } - - // Process post - for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) { - *ty = self.infer_pat(*pat, *ty, default_bm, decl); + if let Some(uncovered) = elements.get(element_tys.len()..) { + for &elem in uncovered { + self.infer_pat(elem, self.types.error, default_bm, decl); + } } - - Ty::new_tup_from_iter(self.interner(), inner_tys.into_iter()) + pat_ty } /// The resolver needs to be updated to the surrounding expression when inside assignment @@ -273,7 +292,7 @@ let ty = match &self.body[pat] { Pat::Tuple { args, ellipsis } => { - self.infer_tuple_pat_like(expected, default_bm, *ellipsis, args, decl) + self.infer_tuple_pat_like(pat, expected, default_bm, *ellipsis, args, decl) } Pat::Or(pats) => { for pat in pats.iter() { @@ -306,7 +325,7 @@ expected, ty_inserted_vars, AllowTwoPhase::No, - CoerceNever::Yes, + ExprIsRead::No, ) { Ok(coerced_ty) => { self.write_pat_ty(pat, coerced_ty); @@ -331,9 +350,51 @@ self.infer_slice_pat(expected, prefix, *slice, suffix, default_bm, decl) } Pat::Wild => expected, - Pat::Range { .. } => { - // FIXME: do some checks here. - expected + Pat::Range { start, end, range_type } => { + // FIXME: Expectation + let lhs_expectation = Expectation::none(); + let lhs_ty = + start.map(|start| self.infer_expr(start, &lhs_expectation, ExprIsRead::Yes)); + let rhs_expectation = lhs_ty.map_or_else(Expectation::none, Expectation::HasType); + let rhs_ty = end.map(|end| self.infer_expr(end, &rhs_expectation, ExprIsRead::Yes)); + let single_arg_adt = |adt, ty: Ty<'db>| { + Ty::new_adt( + self.interner(), + adt, + GenericArgs::new_from_iter(self.interner(), [ty.into()]), + ) + }; + match (range_type, lhs_ty, rhs_ty) { + (RangeOp::Exclusive, None, None) => match self.resolve_range_full() { + Some(adt) => Ty::new_adt(self.interner(), adt, self.types.empty_args), + None => self.err_ty(), + }, + (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() { + Some(adt) => single_arg_adt(adt, ty), + None => self.err_ty(), + }, + (RangeOp::Inclusive, None, Some(ty)) => { + match self.resolve_range_to_inclusive() { + Some(adt) => single_arg_adt(adt, ty), + None => self.err_ty(), + } + } + (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() { + Some(adt) => single_arg_adt(adt, ty), + None => self.err_ty(), + }, + (RangeOp::Inclusive, Some(_), Some(ty)) => { + match self.resolve_range_inclusive() { + Some(adt) => single_arg_adt(adt, ty), + None => self.err_ty(), + } + } + (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() { + Some(adt) => single_arg_adt(adt, ty), + None => self.err_ty(), + }, + (RangeOp::Inclusive, _, None) => self.err_ty(), + } } &Pat::Lit(expr) => { // Don't emit type mismatches again, the expression lowering already did that. @@ -357,7 +418,7 @@ GenericArgs::fill_with_defaults( self.interner(), box_adt.into(), - std::iter::once(inner_ty.into()).chain(alloc_ty.map(Into::into)), + iter::once(inner_ty.into()).chain(alloc_ty.map(Into::into)), |_, id, _| self.table.next_var_for_param(id), ), ) @@ -374,16 +435,17 @@ Pat::Expr(expr) => { let old_inside_assign = std::mem::replace(&mut self.inside_assignment, false); // LHS of assignment doesn't constitute reads. + let expr_is_read = ExprIsRead::No; let result = - self.infer_expr_coerce(*expr, &Expectation::has_type(expected), ExprIsRead::No); + self.infer_expr_coerce(*expr, &Expectation::has_type(expected), expr_is_read); // We are returning early to avoid the unifiability check below. let lhs_ty = self.insert_type_vars_shallow(result); let ty = match self.coerce( - pat.into(), + (*expr).into(), expected, lhs_ty, AllowTwoPhase::No, - CoerceNever::Yes, + expr_is_read, ) { Ok(ty) => ty, Err(_) => { @@ -416,7 +478,7 @@ .result .pat_adjustments .get(&pat) - .and_then(|it| it.first()) + .and_then(|it| it.last()) .unwrap_or(&self.result.type_of_pat[pat]) } @@ -469,9 +531,9 @@ let bound_ty = match mode { BindingMode::Ref(mutability) => { let inner_lt = self.table.next_region_var(); - Ty::new_ref(self.interner(), inner_lt, inner_ty, mutability) + Ty::new_ref(self.interner(), inner_lt, expected, mutability) } - BindingMode::Move => inner_ty, + BindingMode::Move => expected, }; self.write_pat_ty(pat, inner_ty); self.write_binding_ty(binding, bound_ty);
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index 9ade842..6e3d158 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs
@@ -13,11 +13,12 @@ InferenceDiagnostic, ValueTyDefId, generics::generics, infer::diagnostics::InferenceTyLoweringContext as TyLoweringContext, - lower::LifetimeElisionKind, - method_resolution::{self, VisibleFromModule}, + lower::{GenericPredicates, LifetimeElisionKind}, + method_resolution::{self, CandidateId, MethodError}, next_solver::{ GenericArg, GenericArgs, TraitRef, Ty, infer::traits::{Obligation, ObligationCause}, + util::clauses_as_obligations, }, }; @@ -31,7 +32,7 @@ } ValuePathResolution::NonGeneric(ty) => return Some(ty), }; - let args = self.process_remote_user_written_ty(substs); + let args = self.insert_type_vars(substs); self.add_required_obligations_for_value_path(generic_def, args); @@ -221,14 +222,14 @@ def: GenericDefId, subst: GenericArgs<'db>, ) { - let predicates = self.db.generic_predicates(def); let interner = self.interner(); + let predicates = GenericPredicates::query_all(self.db, def); let param_env = self.table.trait_env.env; - if let Some(predicates) = predicates.instantiate(self.interner(), subst) { - self.table.register_predicates(predicates.map(|predicate| { - Obligation::new(interner, ObligationCause::new(), param_env, predicate) - })); - } + self.table.register_predicates(clauses_as_obligations( + predicates.iter_instantiated_copied(interner, subst.as_slice()), + ObligationCause::new(), + param_env, + )); // We need to add `Self: Trait` obligation when `def` is a trait assoc item. let container = match def { @@ -265,7 +266,7 @@ match item { AssocItemId::FunctionId(func) => { if segment.name == &self.db.function_signature(func).name { - Some(AssocItemId::FunctionId(func)) + Some(CandidateId::FunctionId(func)) } else { None } @@ -273,7 +274,7 @@ AssocItemId::ConstId(konst) => { if self.db.const_signature(konst).name.as_ref() == Some(segment.name) { - Some(AssocItemId::ConstId(konst)) + Some(CandidateId::ConstId(konst)) } else { None } @@ -282,9 +283,8 @@ } })?; let def = match item { - AssocItemId::FunctionId(f) => ValueNs::FunctionId(f), - AssocItemId::ConstId(c) => ValueNs::ConstId(c), - AssocItemId::TypeAliasId(_) => unreachable!(), + CandidateId::FunctionId(f) => ValueNs::FunctionId(f), + CandidateId::ConstId(c) => ValueNs::ConstId(c), }; self.write_assoc_resolution(id, item, trait_ref.args); @@ -305,39 +305,23 @@ return Some(result); } - let canonical_ty = self.canonicalize(ty); - - let mut not_visible = None; - let res = method_resolution::iterate_method_candidates( - &canonical_ty, - &mut self.table, - Self::get_traits_in_scope(&self.resolver, &self.traits_in_scope) - .as_ref() - .left_or_else(|&it| it), - VisibleFromModule::Filter(self.resolver.module()), - Some(name), - method_resolution::LookupMode::Path, - |_ty, item, visible| { - if visible { - Some((item, true)) - } else { - if not_visible.is_none() { - not_visible = Some((item, false)); - } - None + let res = self.with_method_resolution(|ctx| { + ctx.probe_for_name(method_resolution::Mode::Path, name.clone(), ty) + }); + let (item, visible) = match res { + Ok(res) => (res.item, true), + Err(error) => match error { + MethodError::PrivateMatch(candidate_id) => (candidate_id.item, false), + _ => { + self.push_diagnostic(InferenceDiagnostic::UnresolvedAssocItem { id }); + return None; } }, - ); - let res = res.or(not_visible); - if res.is_none() { - self.push_diagnostic(InferenceDiagnostic::UnresolvedAssocItem { id }); - } - let (item, visible) = res?; + }; let (def, container) = match item { - AssocItemId::FunctionId(f) => (ValueNs::FunctionId(f), f.lookup(self.db).container), - AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container), - AssocItemId::TypeAliasId(_) => unreachable!(), + CandidateId::FunctionId(f) => (ValueNs::FunctionId(f), f.lookup(self.db).container), + CandidateId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container), }; let substs = match container { ItemContainerId::ImplId(impl_id) => { @@ -372,6 +356,10 @@ self.write_assoc_resolution(id, item, substs); if !visible { + let item = match item { + CandidateId::FunctionId(it) => it.into(), + CandidateId::ConstId(it) => it.into(), + }; self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item }); } Some((def, substs))
diff --git a/crates/hir-ty/src/infer/place_op.rs b/crates/hir-ty/src/infer/place_op.rs new file mode 100644 index 0000000..50018bb --- /dev/null +++ b/crates/hir-ty/src/infer/place_op.rs
@@ -0,0 +1,329 @@ +//! Inference of *place operators*: deref and indexing (operators that create places, as opposed to values). + +use base_db::Crate; +use hir_def::{hir::ExprId, lang_item::LangItem}; +use intern::sym; +use rustc_ast_ir::Mutability; +use rustc_type_ir::inherent::{IntoKind, Ty as _}; +use tracing::debug; + +use crate::{ + Adjust, Adjustment, AutoBorrow, PointerCast, + autoderef::InferenceContextAutoderef, + infer::{AllowTwoPhase, AutoBorrowMutability, InferenceContext, unify::InferenceTable}, + method_resolution::{MethodCallee, TreatNotYetDefinedOpaques}, + next_solver::{ + ClauseKind, Ty, TyKind, + infer::{ + InferOk, + traits::{Obligation, ObligationCause}, + }, + }, +}; + +#[derive(Debug, Copy, Clone)] +pub(super) enum PlaceOp { + Deref, + Index, +} + +impl<'a, 'db> InferenceContext<'a, 'db> { + pub(super) fn try_overloaded_deref( + &self, + base_ty: Ty<'db>, + ) -> Option<InferOk<'db, MethodCallee<'db>>> { + self.try_overloaded_place_op(base_ty, None, PlaceOp::Deref) + } + + /// For the overloaded place expressions (`*x`, `x[3]`), the trait + /// returns a type of `&T`, but the actual type we assign to the + /// *expression* is `T`. So this function just peels off the return + /// type by one layer to yield `T`. + fn make_overloaded_place_return_type(&self, method: MethodCallee<'db>) -> Ty<'db> { + // extract method return type, which will be &T; + let ret_ty = method.sig.output(); + + // method returns &T, but the type as visible to user is T, so deref + ret_ty.builtin_deref(true).unwrap() + } + + /// Type-check `*oprnd_expr` with `oprnd_expr` type-checked already. + pub(super) fn lookup_derefing( + &mut self, + expr: ExprId, + oprnd_expr: ExprId, + oprnd_ty: Ty<'db>, + ) -> Option<Ty<'db>> { + if let Some(ty) = oprnd_ty.builtin_deref(true) { + return Some(ty); + } + + let ok = self.try_overloaded_deref(oprnd_ty)?; + let method = self.table.register_infer_ok(ok); + if let TyKind::Ref(_, _, Mutability::Not) = method.sig.inputs_and_output.inputs()[0].kind() + { + self.write_expr_adj( + oprnd_expr, + Box::new([Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)), + target: method.sig.inputs_and_output.inputs()[0], + }]), + ); + } else { + panic!("input to deref is not a ref?"); + } + let ty = self.make_overloaded_place_return_type(method); + self.write_method_resolution(expr, method.def_id, method.args); + Some(ty) + } + + /// Type-check `*base_expr[index_expr]` with `base_expr` and `index_expr` type-checked already. + pub(super) fn lookup_indexing( + &mut self, + expr: ExprId, + base_expr: ExprId, + base_ty: Ty<'db>, + idx_ty: Ty<'db>, + ) -> Option<(/*index type*/ Ty<'db>, /*element type*/ Ty<'db>)> { + // FIXME(#18741) -- this is almost but not quite the same as the + // autoderef that normal method probing does. They could likely be + // consolidated. + + let mut autoderef = InferenceContextAutoderef::new_from_inference_context(self, base_ty); + let mut result = None; + while result.is_none() && autoderef.next().is_some() { + result = Self::try_index_step(expr, base_expr, &mut autoderef, idx_ty); + } + result + } + + /// To type-check `base_expr[index_expr]`, we progressively autoderef + /// (and otherwise adjust) `base_expr`, looking for a type which either + /// supports builtin indexing or overloaded indexing. + /// This loop implements one step in that search; the autoderef loop + /// is implemented by `lookup_indexing`. + fn try_index_step( + expr: ExprId, + base_expr: ExprId, + autoderef: &mut InferenceContextAutoderef<'_, 'a, 'db>, + index_ty: Ty<'db>, + ) -> Option<(/*index type*/ Ty<'db>, /*element type*/ Ty<'db>)> { + let ty = autoderef.final_ty(); + let adjusted_ty = autoderef.ctx().table.structurally_resolve_type(ty); + debug!( + "try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \ + index_ty={:?})", + expr, base_expr, adjusted_ty, index_ty + ); + + for unsize in [false, true] { + let mut self_ty = adjusted_ty; + if unsize { + // We only unsize arrays here. + if let TyKind::Array(element_ty, ct) = adjusted_ty.kind() { + let ctx = autoderef.ctx(); + ctx.table.register_predicate(Obligation::new( + ctx.interner(), + ObligationCause::new(), + ctx.table.trait_env.env, + ClauseKind::ConstArgHasType(ct, ctx.types.usize), + )); + self_ty = Ty::new_slice(ctx.interner(), element_ty); + } else { + continue; + } + } + + // If some lookup succeeds, write callee into table and extract index/element + // type from the method signature. + // If some lookup succeeded, install method in table + let input_ty = autoderef.ctx().table.next_ty_var(); + let method = + autoderef.ctx().try_overloaded_place_op(self_ty, Some(input_ty), PlaceOp::Index); + + if let Some(result) = method { + debug!("try_index_step: success, using overloaded indexing"); + let method = autoderef.ctx().table.register_infer_ok(result); + + let infer_ok = autoderef.adjust_steps_as_infer_ok(); + let mut adjustments = autoderef.ctx().table.register_infer_ok(infer_ok); + if let TyKind::Ref(region, _, Mutability::Not) = + method.sig.inputs_and_output.inputs()[0].kind() + { + adjustments.push(Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)), + target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty), + }); + } else { + panic!("input to index is not a ref?"); + } + if unsize { + adjustments.push(Adjustment { + kind: Adjust::Pointer(PointerCast::Unsize), + target: method.sig.inputs_and_output.inputs()[0], + }); + } + autoderef.ctx().write_expr_adj(base_expr, adjustments.into_boxed_slice()); + + autoderef.ctx().write_method_resolution(expr, method.def_id, method.args); + + return Some((input_ty, autoderef.ctx().make_overloaded_place_return_type(method))); + } + } + + None + } + + /// Try to resolve an overloaded place op. We only deal with the immutable + /// variant here (Deref/Index). In some contexts we would need the mutable + /// variant (DerefMut/IndexMut); those would be later converted by + /// `convert_place_derefs_to_mutable`. + pub(super) fn try_overloaded_place_op( + &self, + base_ty: Ty<'db>, + opt_rhs_ty: Option<Ty<'db>>, + op: PlaceOp, + ) -> Option<InferOk<'db, MethodCallee<'db>>> { + debug!("try_overloaded_place_op({:?},{:?})", base_ty, op); + + let (Some(imm_tr), imm_op) = (match op { + PlaceOp::Deref => (LangItem::Deref.resolve_trait(self.db, self.krate()), sym::deref), + PlaceOp::Index => (LangItem::Index.resolve_trait(self.db, self.krate()), sym::index), + }) else { + // Bail if `Deref` or `Index` isn't defined. + return None; + }; + + // FIXME(trait-system-refactor-initiative#231): we may want to treat + // opaque types as rigid here to support `impl Deref<Target = impl Index<usize>>`. + let treat_opaques = TreatNotYetDefinedOpaques::AsInfer; + self.table.lookup_method_for_operator( + ObligationCause::new(), + imm_op, + imm_tr, + base_ty, + opt_rhs_ty, + treat_opaques, + ) + } + + pub(super) fn try_mutable_overloaded_place_op( + table: &InferenceTable<'db>, + krate: Crate, + base_ty: Ty<'db>, + opt_rhs_ty: Option<Ty<'db>>, + op: PlaceOp, + ) -> Option<InferOk<'db, MethodCallee<'db>>> { + debug!("try_mutable_overloaded_place_op({:?},{:?})", base_ty, op); + + let (Some(mut_tr), mut_op) = (match op { + PlaceOp::Deref => (LangItem::DerefMut.resolve_trait(table.db, krate), sym::deref_mut), + PlaceOp::Index => (LangItem::IndexMut.resolve_trait(table.db, krate), sym::index_mut), + }) else { + // Bail if `DerefMut` or `IndexMut` isn't defined. + return None; + }; + + // We have to replace the operator with the mutable variant for the + // program to compile, so we don't really have a choice here and want + // to just try using `DerefMut` even if its not in the item bounds + // of the opaque. + let treat_opaques = TreatNotYetDefinedOpaques::AsInfer; + table.lookup_method_for_operator( + ObligationCause::new(), + mut_op, + mut_tr, + base_ty, + opt_rhs_ty, + treat_opaques, + ) + } + + pub(super) fn convert_place_op_to_mutable( + &mut self, + op: PlaceOp, + expr: ExprId, + base_expr: ExprId, + index_expr: Option<ExprId>, + ) { + debug!("convert_place_op_to_mutable({:?}, {:?}, {:?})", op, expr, base_expr); + if !self.result.method_resolutions.contains_key(&expr) { + debug!("convert_place_op_to_mutable - builtin, nothing to do"); + return; + } + + // Need to deref because overloaded place ops take self by-reference. + let base_ty = self + .expr_ty_after_adjustments(base_expr) + .builtin_deref(false) + .expect("place op takes something that is not a ref"); + + let arg_ty = match op { + PlaceOp::Deref => None, + PlaceOp::Index => { + // We would need to recover the `T` used when we resolve `<_ as Index<T>>::index` + // in try_index_step. This is the arg at index 1. + // + // FIXME: rustc does not use the type of `index_expr` with the following explanation. + // + // Note: we should *not* use `expr_ty` of index_expr here because autoderef + // during coercions can cause type of index_expr to differ from `T` (#72002). + // We also could not use `expr_ty_adjusted` of index_expr because reborrowing + // during coercions can also cause type of index_expr to differ from `T`, + // which can potentially cause regionck failure (#74933). + Some(self.expr_ty_after_adjustments( + index_expr.expect("`PlaceOp::Index` should have `index_expr`"), + )) + } + }; + let method = + Self::try_mutable_overloaded_place_op(&self.table, self.krate(), base_ty, arg_ty, op); + let method = match method { + Some(ok) => self.table.register_infer_ok(ok), + // Couldn't find the mutable variant of the place op, keep the + // current, immutable version. + None => return, + }; + debug!("convert_place_op_to_mutable: method={:?}", method); + self.result.method_resolutions.insert(expr, (method.def_id, method.args)); + + let TyKind::Ref(region, _, Mutability::Mut) = + method.sig.inputs_and_output.inputs()[0].kind() + else { + panic!("input to mutable place op is not a mut ref?"); + }; + + // Convert the autoref in the base expr to mutable with the correct + // region and mutability. + let base_expr_ty = self.expr_ty(base_expr); + let interner = self.interner(); + if let Some(adjustments) = self.result.expr_adjustments.get_mut(&base_expr) { + let mut source = base_expr_ty; + for adjustment in &mut adjustments[..] { + if let Adjust::Borrow(AutoBorrow::Ref(..)) = adjustment.kind { + debug!("convert_place_op_to_mutable: converting autoref {:?}", adjustment); + let mutbl = AutoBorrowMutability::Mut { + // Deref/indexing can be desugared to a method call, + // so maybe we could use two-phase here. + // See the documentation of AllowTwoPhase for why that's + // not the case today. + allow_two_phase_borrow: AllowTwoPhase::No, + }; + adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(mutbl)); + adjustment.target = Ty::new_ref(interner, region, source, mutbl.into()); + } + source = adjustment.target; + } + + // If we have an autoref followed by unsizing at the end, fix the unsize target. + if let [ + .., + Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. }, + Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target }, + ] = adjustments[..] + { + *target = method.sig.inputs_and_output.inputs()[0]; + } + } + } +}
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index 59e8d84..fb195d4 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs
@@ -7,9 +7,8 @@ use intern::sym; use rustc_hash::FxHashSet; use rustc_type_ir::{ - DebruijnIndex, InferConst, InferTy, RegionVid, TyVid, TypeFoldable, TypeFolder, - TypeSuperFoldable, TypeVisitableExt, UpcastFrom, - inherent::{Const as _, IntoKind, Ty as _}, + TyVid, TypeFoldable, TypeVisitableExt, UpcastFrom, + inherent::{Const as _, GenericArg as _, IntoKind, SliceLike, Ty as _}, solve::{Certainty, GoalSource}, }; use smallvec::SmallVec; @@ -18,15 +17,14 @@ use crate::{ TraitEnvironment, db::HirDatabase, - infer::InferenceContext, next_solver::{ - self, AliasTy, Binder, Canonical, ClauseKind, Const, ConstKind, DbInterner, - ErrorGuaranteed, GenericArg, GenericArgs, Predicate, PredicateKind, Region, RegionKind, - SolverDefId, TraitRef, Ty, TyKind, TypingMode, + AliasTy, Canonical, ClauseKind, Const, DbInterner, ErrorGuaranteed, GenericArg, + GenericArgs, Goal, Predicate, PredicateKind, Region, SolverDefId, Term, TraitRef, Ty, + TyKind, TypingMode, fulfill::{FulfillmentCtxt, NextSolverError}, infer::{ DbInternerInferExt, InferCtxt, InferOk, InferResult, - at::ToTrace, + at::{At, ToTrace}, snapshot::CombinedSnapshot, traits::{Obligation, ObligationCause, PredicateObligation}, }, @@ -38,15 +36,6 @@ }, }; -impl<'db> InferenceContext<'_, 'db> { - pub(super) fn canonicalize<T>(&mut self, t: T) -> rustc_type_ir::Canonical<DbInterner<'db>, T> - where - T: rustc_type_ir::TypeFoldable<DbInterner<'db>>, - { - self.table.canonicalize(t) - } -} - struct NestedObligationsForSelfTy<'a, 'db> { ctx: &'a InferenceTable<'db>, self_ty: TyVid, @@ -292,10 +281,7 @@ T: TypeFoldable<DbInterner<'db>> + Clone, { let ty = self.resolve_vars_with_obligations(ty); - self.infer_ctxt - .at(&ObligationCause::new(), self.trait_env.env) - .deeply_normalize(ty.clone()) - .unwrap_or(ty) + self.at(&ObligationCause::new()).deeply_normalize(ty.clone()).unwrap_or(ty) } /// Works almost same as [`Self::normalize_associated_types_in`], but this also resolves shallow @@ -316,19 +302,19 @@ .unwrap_or(alias) } - pub(crate) fn next_ty_var(&mut self) -> Ty<'db> { + pub(crate) fn next_ty_var(&self) -> Ty<'db> { self.infer_ctxt.next_ty_var() } - pub(crate) fn next_const_var(&mut self) -> Const<'db> { + pub(crate) fn next_const_var(&self) -> Const<'db> { self.infer_ctxt.next_const_var() } - pub(crate) fn next_int_var(&mut self) -> Ty<'db> { + pub(crate) fn next_int_var(&self) -> Ty<'db> { self.infer_ctxt.next_int_var() } - pub(crate) fn next_float_var(&mut self) -> Ty<'db> { + pub(crate) fn next_float_var(&self) -> Ty<'db> { self.infer_ctxt.next_float_var() } @@ -338,101 +324,12 @@ var } - pub(crate) fn next_region_var(&mut self) -> Region<'db> { + pub(crate) fn next_region_var(&self) -> Region<'db> { self.infer_ctxt.next_region_var() } - pub(crate) fn next_var_for_param(&mut self, id: GenericParamId) -> GenericArg<'db> { - match id { - GenericParamId::TypeParamId(_) => self.next_ty_var().into(), - GenericParamId::ConstParamId(_) => self.next_const_var().into(), - GenericParamId::LifetimeParamId(_) => self.next_region_var().into(), - } - } - - pub(crate) fn resolve_with_fallback<T>( - &mut self, - t: T, - fallback_ty: &mut dyn FnMut(DebruijnIndex, InferTy) -> Ty<'db>, - fallback_const: &mut dyn FnMut(DebruijnIndex, InferConst) -> Const<'db>, - fallback_region: &mut dyn FnMut(DebruijnIndex, RegionVid) -> Region<'db>, - ) -> T - where - T: TypeFoldable<DbInterner<'db>>, - { - struct Resolver<'a, 'db> { - table: &'a mut InferenceTable<'db>, - binder: DebruijnIndex, - fallback_ty: &'a mut dyn FnMut(DebruijnIndex, InferTy) -> Ty<'db>, - fallback_const: &'a mut dyn FnMut(DebruijnIndex, InferConst) -> Const<'db>, - fallback_region: &'a mut dyn FnMut(DebruijnIndex, RegionVid) -> Region<'db>, - } - - impl<'db> TypeFolder<DbInterner<'db>> for Resolver<'_, 'db> { - fn cx(&self) -> DbInterner<'db> { - self.table.interner() - } - - fn fold_binder<T>(&mut self, t: Binder<'db, T>) -> Binder<'db, T> - where - T: TypeFoldable<DbInterner<'db>>, - { - self.binder.shift_in(1); - let result = t.super_fold_with(self); - self.binder.shift_out(1); - result - } - - fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> { - if !t.has_infer() { - return t; - } - - if let TyKind::Infer(infer) = t.kind() { - (self.fallback_ty)(self.binder, infer) - } else { - t.super_fold_with(self) - } - } - - fn fold_const(&mut self, c: Const<'db>) -> Const<'db> { - if !c.has_infer() { - return c; - } - - if let ConstKind::Infer(infer) = c.kind() { - (self.fallback_const)(self.binder, infer) - } else { - c.super_fold_with(self) - } - } - - fn fold_region(&mut self, r: Region<'db>) -> Region<'db> { - if let RegionKind::ReVar(infer) = r.kind() { - (self.fallback_region)(self.binder, infer) - } else { - r - } - } - } - - t.fold_with(&mut Resolver { - table: self, - binder: DebruijnIndex::ZERO, - fallback_ty, - fallback_const, - fallback_region, - }) - } - - pub(crate) fn instantiate_canonical<T>( - &mut self, - canonical: rustc_type_ir::Canonical<DbInterner<'db>, T>, - ) -> T - where - T: rustc_type_ir::TypeFoldable<DbInterner<'db>>, - { - self.infer_ctxt.instantiate_canonical(&canonical).0 + pub(crate) fn next_var_for_param(&self, id: GenericParamId) -> GenericArg<'db> { + self.infer_ctxt.next_var_for_param(id) } pub(crate) fn resolve_completely<T>(&mut self, value: T) -> T @@ -456,7 +353,11 @@ /// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the /// caller needs to deal with them. pub(crate) fn try_unify<T: ToTrace<'db>>(&mut self, t1: T, t2: T) -> InferResult<'db, ()> { - self.infer_ctxt.at(&ObligationCause::new(), self.trait_env.env).eq(t1, t2) + self.at(&ObligationCause::new()).eq(t1, t2) + } + + pub(crate) fn at<'a>(&'a self, cause: &'a ObligationCause) -> At<'a, 'db> { + self.infer_ctxt.at(cause, self.trait_env.env) } pub(crate) fn shallow_resolve(&self, ty: Ty<'db>) -> Ty<'db> { @@ -486,15 +387,6 @@ self.infer_ctxt.fresh_args_for_item(def) } - /// Like `fresh_args_for_item()`, but first uses the args from `first`. - pub(crate) fn fill_rest_fresh_args( - &self, - def_id: SolverDefId, - first: impl IntoIterator<Item = GenericArg<'db>>, - ) -> GenericArgs<'db> { - self.infer_ctxt.fill_rest_fresh_args(def_id, first) - } - /// Try to resolve `ty` to a structural type, normalizing aliases. /// /// In case there is still ambiguity, the returned type may be an inference @@ -535,17 +427,6 @@ self.fulfillment_cx = snapshot.obligations; } - #[tracing::instrument(skip_all)] - pub(crate) fn run_in_snapshot<T>( - &mut self, - f: impl FnOnce(&mut InferenceTable<'db>) -> T, - ) -> T { - let snapshot = self.snapshot(); - let result = f(self); - self.rollback_to(snapshot); - result - } - pub(crate) fn commit_if_ok<T, E>( &mut self, f: impl FnOnce(&mut InferenceTable<'db>) -> Result<T, E>, @@ -566,22 +447,19 @@ /// choice (during e.g. method resolution or deref). #[tracing::instrument(level = "debug", skip(self))] pub(crate) fn try_obligation(&mut self, predicate: Predicate<'db>) -> NextTraitSolveResult { - let goal = next_solver::Goal { param_env: self.trait_env.env, predicate }; + let goal = Goal { param_env: self.trait_env.env, predicate }; let canonicalized = self.canonicalize(goal); next_trait_solve_canonical_in_ctxt(&self.infer_ctxt, canonicalized) } pub(crate) fn register_obligation(&mut self, predicate: Predicate<'db>) { - let goal = next_solver::Goal { param_env: self.trait_env.env, predicate }; + let goal = Goal { param_env: self.trait_env.env, predicate }; self.register_obligation_in_env(goal) } #[tracing::instrument(level = "debug", skip(self))] - fn register_obligation_in_env( - &mut self, - goal: next_solver::Goal<'db, next_solver::Predicate<'db>>, - ) { + fn register_obligation_in_env(&mut self, goal: Goal<'db, Predicate<'db>>) { let result = next_trait_solve_in_ctxt(&self.infer_ctxt, goal); tracing::debug!(?result); match result { @@ -619,7 +497,7 @@ self.fulfillment_cx.register_predicate_obligation(&self.infer_ctxt, obligation); } - pub(super) fn register_predicates<I>(&mut self, obligations: I) + pub(crate) fn register_predicates<I>(&mut self, obligations: I) where I: IntoIterator<Item = PredicateObligation<'db>>, { @@ -628,6 +506,23 @@ }); } + /// checking later, during regionck, that `arg` is well-formed. + pub(crate) fn register_wf_obligation(&mut self, term: Term<'db>, cause: ObligationCause) { + self.register_predicate(Obligation::new( + self.interner(), + cause, + self.trait_env.env, + ClauseKind::WellFormed(term), + )); + } + + /// Registers obligations that all `args` are well-formed. + pub(crate) fn add_wf_bounds(&mut self, args: GenericArgs<'db>) { + for term in args.iter().filter_map(|it| it.as_term()) { + self.register_wf_obligation(term, ObligationCause::new()); + } + } + pub(crate) fn callable_sig( &mut self, ty: Ty<'db>, @@ -714,26 +609,20 @@ } /// Whenever you lower a user-written type, you should call this. - pub(crate) fn process_user_written_ty<T>(&mut self, ty: T) -> T - where - T: TypeFoldable<DbInterner<'db>>, - { + pub(crate) fn process_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { self.process_remote_user_written_ty(ty) // FIXME: Register a well-formed obligation. } /// The difference of this method from `process_user_written_ty()` is that this method doesn't register a well-formed obligation, /// while `process_user_written_ty()` should (but doesn't currently). - pub(crate) fn process_remote_user_written_ty<T>(&mut self, ty: T) -> T - where - T: TypeFoldable<DbInterner<'db>>, - { + pub(crate) fn process_remote_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> { let ty = self.insert_type_vars(ty); // See https://github.com/rust-lang/rust/blob/cdb45c87e2cd43495379f7e867e3cc15dcee9f93/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs#L487-L495: // Even though the new solver only lazily normalizes usually, here we eagerly normalize so that not everything needs // to normalize before inspecting the `TyKind`. // FIXME(next-solver): We should not deeply normalize here, only shallowly. - self.normalize_associated_types_in(ty) + self.try_structurally_resolve_type(ty) } /// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it. @@ -852,7 +741,7 @@ { let value = if self.should_normalize { let cause = ObligationCause::new(); - let at = self.ctx.infer_ctxt.at(&cause, self.ctx.trait_env.env); + let at = self.ctx.at(&cause); let universes = vec![None; outer_exclusive_binder(value).as_usize()]; match deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals( at, value, universes,
diff --git a/crates/hir-ty/src/lang_items.rs b/crates/hir-ty/src/lang_items.rs index 3ef7f50..d0d0aa7 100644 --- a/crates/hir-ty/src/lang_items.rs +++ b/crates/hir-ty/src/lang_items.rs
@@ -1,8 +1,7 @@ //! Functions to detect special lang items use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags}; -use hir_expand::name::Name; -use intern::sym; +use intern::{Symbol, sym}; use crate::db::HirDatabase; @@ -11,48 +10,48 @@ db.struct_signature(id).flags.contains(StructFlags::IS_BOX) } -pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> { +pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Symbol, LangItem)> { use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering}; Some(match op { BinaryOp::LogicOp(_) => return None, BinaryOp::ArithOp(aop) => match aop { - ArithOp::Add => (Name::new_symbol_root(sym::add), LangItem::Add), - ArithOp::Mul => (Name::new_symbol_root(sym::mul), LangItem::Mul), - ArithOp::Sub => (Name::new_symbol_root(sym::sub), LangItem::Sub), - ArithOp::Div => (Name::new_symbol_root(sym::div), LangItem::Div), - ArithOp::Rem => (Name::new_symbol_root(sym::rem), LangItem::Rem), - ArithOp::Shl => (Name::new_symbol_root(sym::shl), LangItem::Shl), - ArithOp::Shr => (Name::new_symbol_root(sym::shr), LangItem::Shr), - ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor), LangItem::BitXor), - ArithOp::BitOr => (Name::new_symbol_root(sym::bitor), LangItem::BitOr), - ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand), LangItem::BitAnd), + ArithOp::Add => (sym::add, LangItem::Add), + ArithOp::Mul => (sym::mul, LangItem::Mul), + ArithOp::Sub => (sym::sub, LangItem::Sub), + ArithOp::Div => (sym::div, LangItem::Div), + ArithOp::Rem => (sym::rem, LangItem::Rem), + ArithOp::Shl => (sym::shl, LangItem::Shl), + ArithOp::Shr => (sym::shr, LangItem::Shr), + ArithOp::BitXor => (sym::bitxor, LangItem::BitXor), + ArithOp::BitOr => (sym::bitor, LangItem::BitOr), + ArithOp::BitAnd => (sym::bitand, LangItem::BitAnd), }, BinaryOp::Assignment { op: Some(aop) } => match aop { - ArithOp::Add => (Name::new_symbol_root(sym::add_assign), LangItem::AddAssign), - ArithOp::Mul => (Name::new_symbol_root(sym::mul_assign), LangItem::MulAssign), - ArithOp::Sub => (Name::new_symbol_root(sym::sub_assign), LangItem::SubAssign), - ArithOp::Div => (Name::new_symbol_root(sym::div_assign), LangItem::DivAssign), - ArithOp::Rem => (Name::new_symbol_root(sym::rem_assign), LangItem::RemAssign), - ArithOp::Shl => (Name::new_symbol_root(sym::shl_assign), LangItem::ShlAssign), - ArithOp::Shr => (Name::new_symbol_root(sym::shr_assign), LangItem::ShrAssign), - ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor_assign), LangItem::BitXorAssign), - ArithOp::BitOr => (Name::new_symbol_root(sym::bitor_assign), LangItem::BitOrAssign), - ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand_assign), LangItem::BitAndAssign), + ArithOp::Add => (sym::add_assign, LangItem::AddAssign), + ArithOp::Mul => (sym::mul_assign, LangItem::MulAssign), + ArithOp::Sub => (sym::sub_assign, LangItem::SubAssign), + ArithOp::Div => (sym::div_assign, LangItem::DivAssign), + ArithOp::Rem => (sym::rem_assign, LangItem::RemAssign), + ArithOp::Shl => (sym::shl_assign, LangItem::ShlAssign), + ArithOp::Shr => (sym::shr_assign, LangItem::ShrAssign), + ArithOp::BitXor => (sym::bitxor_assign, LangItem::BitXorAssign), + ArithOp::BitOr => (sym::bitor_assign, LangItem::BitOrAssign), + ArithOp::BitAnd => (sym::bitand_assign, LangItem::BitAndAssign), }, BinaryOp::CmpOp(cop) => match cop { - CmpOp::Eq { negated: false } => (Name::new_symbol_root(sym::eq), LangItem::PartialEq), - CmpOp::Eq { negated: true } => (Name::new_symbol_root(sym::ne), LangItem::PartialEq), + CmpOp::Eq { negated: false } => (sym::eq, LangItem::PartialEq), + CmpOp::Eq { negated: true } => (sym::ne, LangItem::PartialEq), CmpOp::Ord { ordering: Ordering::Less, strict: false } => { - (Name::new_symbol_root(sym::le), LangItem::PartialOrd) + (sym::le, LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Less, strict: true } => { - (Name::new_symbol_root(sym::lt), LangItem::PartialOrd) + (sym::lt, LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Greater, strict: false } => { - (Name::new_symbol_root(sym::ge), LangItem::PartialOrd) + (sym::ge, LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Greater, strict: true } => { - (Name::new_symbol_root(sym::gt), LangItem::PartialOrd) + (sym::gt, LangItem::PartialOrd) } }, BinaryOp::Assignment { op: None } => return None,
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index fdacc1d..b29c7d2 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs
@@ -89,13 +89,12 @@ could_coerce, could_unify, could_unify_deeply, }; pub use lower::{ - LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId, + GenericPredicates, ImplTraits, LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId, associated_type_shorthand_candidates, diagnostics::*, }; -pub use method_resolution::check_orphan_rules; pub use next_solver::interner::{attach_db, attach_db_allow_change, with_attached_db}; pub use target_feature::TargetFeatures; -pub use traits::TraitEnvironment; +pub use traits::{TraitEnvironment, check_orphan_rules}; pub use utils::{ TargetFeatureIsSafeInTarget, Unsafety, all_super_traits, direct_super_traits, is_fn_unsafe_to_call, target_feature_is_safe_in_target,
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index a181ae0..a20c299 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs
@@ -8,12 +8,9 @@ pub(crate) mod diagnostics; pub(crate) mod path; -use std::{ - cell::OnceCell, - iter, mem, - ops::{self, Deref, Not as _}, -}; +use std::{cell::OnceCell, iter, mem}; +use arrayvec::ArrayVec; use base_db::Crate; use either::Either; use hir_def::{ @@ -45,7 +42,7 @@ AliasTyKind, BoundVarIndexKind, ConstKind, DebruijnIndex, ExistentialPredicate, ExistentialProjection, ExistentialTraitRef, FnSig, OutlivesPredicate, TyKind::{self}, - TypeVisitableExt, + TypeVisitableExt, Upcast, inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _}, }; use salsa::plumbing::AsId; @@ -56,13 +53,13 @@ use crate::{ FnAbi, ImplTraitId, TraitEnvironment, TyLoweringDiagnostic, TyLoweringDiagnosticKind, consteval::intern_const_ref, - db::HirDatabase, + db::{HirDatabase, InternedOpaqueTyId}, generics::{Generics, generics, trait_self_param_idx}, next_solver::{ - AliasTy, Binder, BoundExistentialPredicates, Clause, Clauses, Const, DbInterner, - EarlyBinder, EarlyParamRegion, ErrorGuaranteed, GenericArg, GenericArgs, ParamConst, - ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate, TraitRef, Ty, Tys, - UnevaluatedConst, abi::Safety, + AliasTy, Binder, BoundExistentialPredicates, Clause, ClauseKind, Clauses, Const, + DbInterner, EarlyBinder, EarlyParamRegion, ErrorGuaranteed, GenericArg, GenericArgs, + ParamConst, ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate, TraitRef, + Ty, Tys, UnevaluatedConst, abi::Safety, }, }; @@ -75,7 +72,7 @@ #[derive(PartialEq, Eq, Debug, Hash)] pub struct ImplTrait<'db> { - pub(crate) predicates: Vec<Clause<'db>>, + pub(crate) predicates: Box<[Clause<'db>]>, } pub type ImplTraitIdx<'db> = Idx<ImplTrait<'db>>; @@ -473,7 +470,7 @@ let idx = self .impl_trait_mode .opaque_type_data - .alloc(ImplTrait { predicates: Vec::default() }); + .alloc(ImplTrait { predicates: Box::default() }); let impl_trait_id = origin.either( |f| ImplTraitId::ReturnTypeImplTrait(f, idx), @@ -916,8 +913,7 @@ }); predicates.extend(sized_clause); } - predicates.shrink_to_fit(); - predicates + predicates.into_boxed_slice() }); ImplTrait { predicates } } @@ -982,50 +978,89 @@ Some((trait_ref, create_diagnostics(ctx.diagnostics))) } -pub(crate) fn return_type_impl_traits<'db>( - db: &'db dyn HirDatabase, - def: hir_def::FunctionId, -) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>> { - // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe - let data = db.function_signature(def); - let resolver = def.resolver(db); - let mut ctx_ret = - TyLoweringContext::new(db, &resolver, &data.store, def.into(), LifetimeElisionKind::Infer) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); - if let Some(ret_type) = data.ret_type { - let _ret = ctx_ret.lower_ty(ret_type); - } - let return_type_impl_traits = - ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data }; - if return_type_impl_traits.impl_traits.is_empty() { - None - } else { - Some(Arc::new(EarlyBinder::bind(return_type_impl_traits))) +impl<'db> ImplTraitId<'db> { + #[inline] + pub fn predicates(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> { + let (impl_traits, idx) = match self { + ImplTraitId::ReturnTypeImplTrait(owner, idx) => { + (ImplTraits::return_type_impl_traits(db, owner), idx) + } + ImplTraitId::TypeAliasImplTrait(owner, idx) => { + (ImplTraits::type_alias_impl_traits(db, owner), idx) + } + }; + impl_traits + .as_deref() + .expect("owner should have opaque type") + .as_ref() + .map_bound(|it| &*it.impl_traits[idx].predicates) } } -pub(crate) fn type_alias_impl_traits<'db>( - db: &'db dyn HirDatabase, - def: hir_def::TypeAliasId, -) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>> { - let data = db.type_alias_signature(def); - let resolver = def.resolver(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &data.store, - def.into(), - LifetimeElisionKind::AnonymousReportError, - ) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); - if let Some(type_ref) = data.ty { - let _ty = ctx.lower_ty(type_ref); +impl InternedOpaqueTyId { + #[inline] + pub fn predicates<'db>(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> { + self.loc(db).predicates(db) } - let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data }; - if type_alias_impl_traits.impl_traits.is_empty() { - None - } else { - Some(Arc::new(EarlyBinder::bind(type_alias_impl_traits))) +} + +#[salsa::tracked] +impl<'db> ImplTraits<'db> { + #[salsa::tracked(returns(ref), unsafe(non_update_return_type))] + pub(crate) fn return_type_impl_traits( + db: &'db dyn HirDatabase, + def: hir_def::FunctionId, + ) -> Option<Box<EarlyBinder<'db, ImplTraits<'db>>>> { + // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe + let data = db.function_signature(def); + let resolver = def.resolver(db); + let mut ctx_ret = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::Infer, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + if let Some(ret_type) = data.ret_type { + let _ret = ctx_ret.lower_ty(ret_type); + } + let mut return_type_impl_traits = + ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data }; + if return_type_impl_traits.impl_traits.is_empty() { + None + } else { + return_type_impl_traits.impl_traits.shrink_to_fit(); + Some(Box::new(EarlyBinder::bind(return_type_impl_traits))) + } + } + + #[salsa::tracked(returns(ref), unsafe(non_update_return_type))] + pub(crate) fn type_alias_impl_traits( + db: &'db dyn HirDatabase, + def: hir_def::TypeAliasId, + ) -> Option<Box<EarlyBinder<'db, ImplTraits<'db>>>> { + let data = db.type_alias_signature(def); + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::AnonymousReportError, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + if let Some(type_ref) = data.ty { + let _ty = ctx.lower_ty(type_ref); + } + let mut type_alias_impl_traits = + ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data }; + if type_alias_impl_traits.impl_traits.is_empty() { + None + } else { + type_alias_impl_traits.impl_traits.shrink_to_fit(); + Some(Box::new(EarlyBinder::bind(type_alias_impl_traits))) + } } } @@ -1331,12 +1366,13 @@ /// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but /// these are fine: `T: Foo<U::Item>, U: Foo<()>`. #[tracing::instrument(skip(db), ret)] -pub(crate) fn generic_predicates_for_param_query<'db>( +#[salsa::tracked(returns(ref), unsafe(non_update_return_type), cycle_result = generic_predicates_for_param_cycle_result)] +pub(crate) fn generic_predicates_for_param<'db>( db: &'db dyn HirDatabase, def: GenericDefId, param_id: TypeOrConstParamId, assoc_name: Option<Name>, -) -> GenericPredicates<'db> { +) -> EarlyBinder<'db, Box<[Clause<'db>]>> { let generics = generics(db, def); let interner = DbInterner::new_with(db, None, None); let resolver = def.resolver(db); @@ -1436,44 +1472,140 @@ predicates.extend(implicitly_sized_predicates); }; } - GenericPredicates(predicates.is_empty().not().then(|| predicates.into())) + EarlyBinder::bind(predicates.into_boxed_slice()) } -pub(crate) fn generic_predicates_for_param_cycle_result( - _db: &dyn HirDatabase, +pub(crate) fn generic_predicates_for_param_cycle_result<'db>( + _db: &'db dyn HirDatabase, _def: GenericDefId, _param_id: TypeOrConstParamId, _assoc_name: Option<Name>, -) -> GenericPredicates<'_> { - GenericPredicates(None) +) -> EarlyBinder<'db, Box<[Clause<'db>]>> { + EarlyBinder::bind(Box::new([])) +} + +#[inline] +pub(crate) fn type_alias_bounds<'db>( + db: &'db dyn HirDatabase, + type_alias: TypeAliasId, +) -> EarlyBinder<'db, &'db [Clause<'db>]> { + type_alias_bounds_with_diagnostics(db, type_alias).0.as_ref().map_bound(|it| &**it) +} + +#[salsa::tracked(returns(ref), unsafe(non_update_return_type))] +pub fn type_alias_bounds_with_diagnostics<'db>( + db: &'db dyn HirDatabase, + type_alias: TypeAliasId, +) -> (EarlyBinder<'db, Box<[Clause<'db>]>>, Diagnostics) { + let type_alias_data = db.type_alias_signature(type_alias); + let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + type_alias.into(), + LifetimeElisionKind::AnonymousReportError, + ); + let interner = ctx.interner; + let def_id = type_alias.into(); + + let item_args = GenericArgs::identity_for_item(interner, def_id); + let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args); + + let mut bounds = Vec::new(); + for bound in &type_alias_data.bounds { + ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| { + bounds.push(pred); + }); + } + + if !ctx.unsized_types.contains(&interner_ty) { + let sized_trait = LangItem::Sized + .resolve_trait(ctx.db, interner.krate.expect("Must have interner.krate")); + if let Some(sized_trait) = sized_trait { + let trait_ref = TraitRef::new_from_args( + interner, + sized_trait.into(), + GenericArgs::new_from_iter(interner, [interner_ty.into()]), + ); + bounds.push(trait_ref.upcast(interner)); + }; + } + + (EarlyBinder::bind(bounds.into_boxed_slice()), create_diagnostics(ctx.diagnostics)) } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericPredicates<'db>(Option<Arc<[Clause<'db>]>>); +pub struct GenericPredicates<'db> { + // The order is the following: first, if `parent_is_trait == true`, comes the implicit trait predicate for the + // parent. Then come the explicit predicates for the parent, then the explicit trait predicate for the child, + // then the implicit trait predicate for the child, if `is_trait` is `true`. + predicates: EarlyBinder<'db, Box<[Clause<'db>]>>, + own_predicates_start: u32, + is_trait: bool, + parent_is_trait: bool, +} +#[salsa::tracked] impl<'db> GenericPredicates<'db> { - #[inline] - pub fn instantiate( - &self, - interner: DbInterner<'db>, - args: GenericArgs<'db>, - ) -> Option<impl Iterator<Item = Clause<'db>>> { - self.0 - .as_ref() - .map(|it| EarlyBinder::bind(it.iter().copied()).iter_instantiated(interner, args)) - } - - #[inline] - pub fn instantiate_identity(&self) -> Option<impl Iterator<Item = Clause<'db>>> { - self.0.as_ref().map(|it| it.iter().copied()) + /// Resolve the where clause(s) of an item with generics. + /// + /// Diagnostics are computed only for this item's predicates, not for parents. + #[salsa::tracked(returns(ref), unsafe(non_update_return_type))] + pub fn query_with_diagnostics( + db: &'db dyn HirDatabase, + def: GenericDefId, + ) -> (GenericPredicates<'db>, Diagnostics) { + generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true) } } -impl<'db> ops::Deref for GenericPredicates<'db> { - type Target = [Clause<'db>]; +impl<'db> GenericPredicates<'db> { + #[inline] + pub fn query(db: &'db dyn HirDatabase, def: GenericDefId) -> &'db GenericPredicates<'db> { + &Self::query_with_diagnostics(db, def).0 + } - fn deref(&self) -> &Self::Target { - self.0.as_deref().unwrap_or(&[]) + #[inline] + pub fn query_all( + db: &'db dyn HirDatabase, + def: GenericDefId, + ) -> EarlyBinder<'db, &'db [Clause<'db>]> { + Self::query(db, def).all_predicates() + } + + #[inline] + pub fn query_own( + db: &'db dyn HirDatabase, + def: GenericDefId, + ) -> EarlyBinder<'db, &'db [Clause<'db>]> { + Self::query(db, def).own_predicates() + } + + #[inline] + pub fn query_explicit( + db: &'db dyn HirDatabase, + def: GenericDefId, + ) -> EarlyBinder<'db, &'db [Clause<'db>]> { + Self::query(db, def).explicit_predicates() + } + + #[inline] + pub fn all_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> { + self.predicates.as_ref().map_bound(|it| &**it) + } + + #[inline] + pub fn own_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> { + self.predicates.as_ref().map_bound(|it| &it[self.own_predicates_start as usize..]) + } + + /// Returns the predicates, minus the implicit `Self: Trait` predicate for a trait. + #[inline] + pub fn explicit_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> { + self.predicates.as_ref().map_bound(|it| { + &it[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)] + }) } } @@ -1492,136 +1624,31 @@ db: &'db dyn HirDatabase, def: GenericDefId, ) -> Arc<TraitEnvironment<'db>> { - let generics = generics(db, def); - if generics.has_no_predicates() && generics.is_empty() { - return TraitEnvironment::empty(def.krate(db)); - } - - let resolver = def.resolver(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - generics.store(), - def, - LifetimeElisionKind::AnonymousReportError, - ); - let mut traits_in_scope = Vec::new(); - let mut clauses = Vec::new(); - for maybe_parent_generics in - std::iter::successors(Some(&generics), |generics| generics.parent_generics()) - { - ctx.store = maybe_parent_generics.store(); - for pred in maybe_parent_generics.where_predicates() { - for pred in ctx.lower_where_predicate(pred, false, &generics, PredicateFilter::All) { - if let rustc_type_ir::ClauseKind::Trait(tr) = pred.kind().skip_binder() { - traits_in_scope.push((tr.self_ty(), tr.def_id().0)); - } - clauses.push(pred); - } - } - } - - if let Some(trait_id) = def.assoc_trait_container(db) { - // add `Self: Trait<T1, T2, ...>` to the environment in trait - // function default implementations (and speculative code - // inside consts or type aliases) - cov_mark::hit!(trait_self_implements_self); - let trait_ref = TraitRef::identity(ctx.interner, trait_id.into()); - let clause = Clause(Predicate::new( - ctx.interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::Trait( - TraitPredicate { trait_ref, polarity: rustc_type_ir::PredicatePolarity::Positive }, - ))), - )); - clauses.push(clause); - } - - let explicitly_unsized_tys = ctx.unsized_types; - - let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); - if let Some(sized_trait) = sized_trait { - let (mut generics, mut def_id) = - (crate::next_solver::generics::generics(db, def.into()), def); - loop { - let self_idx = trait_self_param_idx(db, def_id); - for (idx, p) in generics.own_params.iter().enumerate() { - if let Some(self_idx) = self_idx - && p.index() as usize == self_idx - { - continue; - } - let GenericParamId::TypeParamId(param_id) = p.id else { - continue; - }; - let idx = idx as u32 + generics.parent_count as u32; - let param_ty = Ty::new_param(ctx.interner, param_id, idx); - if explicitly_unsized_tys.contains(¶m_ty) { - continue; - } - let trait_ref = TraitRef::new_from_args( - ctx.interner, - sized_trait.into(), - GenericArgs::new_from_iter(ctx.interner, [param_ty.into()]), - ); - let clause = Clause(Predicate::new( - ctx.interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - )); - clauses.push(clause); - } - - if let Some(g) = generics.parent { - generics = crate::next_solver::generics::generics(db, g.into()); - def_id = g; - } else { - break; - } - } - } - - let clauses = rustc_type_ir::elaborate::elaborate(ctx.interner, clauses); - let clauses = Clauses::new_from_iter(ctx.interner, clauses); + let module = def.module(db); + let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block()); + let predicates = GenericPredicates::query_all(db, def); + let traits_in_scope = predicates + .iter_identity_copied() + .filter_map(|pred| match pred.kind().skip_binder() { + ClauseKind::Trait(tr) => Some((tr.self_ty(), tr.def_id().0)), + _ => None, + }) + .collect(); + let clauses = rustc_type_ir::elaborate::elaborate(interner, predicates.iter_identity_copied()); + let clauses = Clauses::new_from_iter(interner, clauses); let env = ParamEnv { clauses }; - TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env) + // FIXME: We should normalize projections here, like rustc does. + + TraitEnvironment::new(module.krate(), module.containing_block(), traits_in_scope, env) } -#[derive(Copy, Clone, Debug)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] pub(crate) enum PredicateFilter { SelfTrait, All, } -/// Resolve the where clause(s) of an item with generics. -#[tracing::instrument(skip(db))] -pub(crate) fn generic_predicates_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, -) -> GenericPredicates<'db> { - generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true).0 -} - -pub(crate) fn generic_predicates_without_parent_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, -) -> GenericPredicates<'db> { - generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def).0 -} - -/// Resolve the where clause(s) of an item with generics, -/// except the ones inherited from the parent -pub(crate) fn generic_predicates_without_parent_with_diagnostics_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, -) -> (GenericPredicates<'db>, Diagnostics) { - generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def) -} - /// Resolve the where clause(s) of an item with generics, /// with a given filter #[tracing::instrument(skip(db, filter), ret)] @@ -1644,15 +1671,35 @@ def, LifetimeElisionKind::AnonymousReportError, ); + let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); let mut predicates = Vec::new(); - for maybe_parent_generics in + let all_generics = std::iter::successors(Some(&generics), |generics| generics.parent_generics()) - { - ctx.store = maybe_parent_generics.store(); - for pred in maybe_parent_generics.where_predicates() { - tracing::debug!(?pred); - if filter(maybe_parent_generics.def()) { + .collect::<ArrayVec<_, 2>>(); + let mut is_trait = false; + let mut parent_is_trait = false; + if all_generics.len() > 1 { + add_implicit_trait_predicate( + interner, + all_generics.last().unwrap().def(), + predicate_filter, + &mut predicates, + &mut parent_is_trait, + ); + } + // We need to lower parent predicates first - see the comment below lowering of implicit `Sized` predicates + // for why. + let mut own_predicates_start = 0; + for &maybe_parent_generics in all_generics.iter().rev() { + let current_def_predicates_start = predicates.len(); + // Collect only diagnostics from the child, not including parents. + ctx.diagnostics.clear(); + + if filter(maybe_parent_generics.def()) { + ctx.store = maybe_parent_generics.store(); + for pred in maybe_parent_generics.where_predicates() { + tracing::debug!(?pred); predicates.extend(ctx.lower_where_predicate( pred, false, @@ -1660,66 +1707,132 @@ predicate_filter, )); } + + push_const_arg_has_type_predicates(db, &mut predicates, maybe_parent_generics); + + if let Some(sized_trait) = sized_trait { + let mut add_sized_clause = |param_idx, param_id, param_data| { + let ( + GenericParamId::TypeParamId(param_id), + GenericParamDataRef::TypeParamData(param_data), + ) = (param_id, param_data) + else { + return; + }; + + if param_data.provenance == TypeParamProvenance::TraitSelf { + return; + } + + let param_ty = Ty::new_param(interner, param_id, param_idx); + if ctx.unsized_types.contains(¶m_ty) { + return; + } + let trait_ref = TraitRef::new_from_args( + interner, + sized_trait.into(), + GenericArgs::new_from_iter(interner, [param_ty.into()]), + ); + let clause = Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )); + predicates.push(clause); + }; + let parent_params_len = maybe_parent_generics.len_parent(); + maybe_parent_generics.iter_self().enumerate().for_each( + |(param_idx, (param_id, param_data))| { + add_sized_clause( + (param_idx + parent_params_len) as u32, + param_id, + param_data, + ); + }, + ); + } + + // We do not clear `ctx.unsized_types`, as the `?Sized` clause of a child (e.g. an associated type) can + // be declared on the parent (e.g. the trait). It is nevertheless fine to register the implicit `Sized` + // predicates before lowering the child, as a child cannot define a `?Sized` predicate for its parent. + // But we do have to lower the parent first. + } + + if maybe_parent_generics.def() == def { + own_predicates_start = current_def_predicates_start as u32; } } - let explicitly_unsized_tys = ctx.unsized_types; + add_implicit_trait_predicate(interner, def, predicate_filter, &mut predicates, &mut is_trait); - let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); - if let Some(sized_trait) = sized_trait { - let mut add_sized_clause = |param_idx, param_id, param_data| { - let ( - GenericParamId::TypeParamId(param_id), - GenericParamDataRef::TypeParamData(param_data), - ) = (param_id, param_data) - else { - return; - }; + let diagnostics = create_diagnostics(ctx.diagnostics); + let predicates = GenericPredicates { + own_predicates_start, + is_trait, + parent_is_trait, + predicates: EarlyBinder::bind(predicates.into_boxed_slice()), + }; + return (predicates, diagnostics); - if param_data.provenance == TypeParamProvenance::TraitSelf { - return; - } - - let param_ty = Ty::new_param(interner, param_id, param_idx); - if explicitly_unsized_tys.contains(¶m_ty) { - return; - } - let trait_ref = TraitRef::new_from_args( - interner, - sized_trait.into(), - GenericArgs::new_from_iter(interner, [param_ty.into()]), - ); - let clause = Clause(Predicate::new( - interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - )); - predicates.push(clause); - }; - if generics.parent_generics().is_some_and(|parent| filter(parent.def())) { - generics.iter_parent().enumerate().for_each(|(param_idx, (param_id, param_data))| { - add_sized_clause(param_idx as u32, param_id, param_data); - }); - } - if filter(def) { - let parent_params_len = generics.len_parent(); - generics.iter_self().enumerate().for_each(|(param_idx, (param_id, param_data))| { - add_sized_clause((param_idx + parent_params_len) as u32, param_id, param_data); - }); + fn add_implicit_trait_predicate<'db>( + interner: DbInterner<'db>, + def: GenericDefId, + predicate_filter: PredicateFilter, + predicates: &mut Vec<Clause<'db>>, + set_is_trait: &mut bool, + ) { + // For traits, add `Self: Trait` predicate. This is + // not part of the predicates that a user writes, but it + // is something that one must prove in order to invoke a + // method or project an associated type. + // + // In the chalk setup, this predicate is not part of the + // "predicates" for a trait item. But it is useful in + // rustc because if you directly (e.g.) invoke a trait + // method like `Trait::method(...)`, you must naturally + // prove that the trait applies to the types that were + // used, and adding the predicate into this list ensures + // that this is done. + if let GenericDefId::TraitId(def_id) = def + && predicate_filter == PredicateFilter::All + { + *set_is_trait = true; + predicates.push(TraitRef::identity(interner, def_id.into()).upcast(interner)); } } +} - // FIXME: rustc gathers more predicates by recursing through resulting trait predicates. - // See https://github.com/rust-lang/rust/blob/76c5ed2847cdb26ef2822a3a165d710f6b772217/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L689-L715 +fn push_const_arg_has_type_predicates<'db>( + db: &'db dyn HirDatabase, + predicates: &mut Vec<Clause<'db>>, + generics: &Generics, +) { + let interner = DbInterner::new_with(db, None, None); + let const_params_offset = generics.len_parent() + generics.len_lifetimes_self(); + for (param_index, (param_idx, param_data)) in generics.iter_self_type_or_consts().enumerate() { + if !matches!(param_data, TypeOrConstParamData::ConstParamData(_)) { + continue; + } - ( - GenericPredicates(predicates.is_empty().not().then(|| predicates.into())), - create_diagnostics(ctx.diagnostics), - ) + let param_id = ConstParamId::from_unchecked(TypeOrConstParamId { + parent: generics.def(), + local_id: param_idx, + }); + predicates.push(Clause( + ClauseKind::ConstArgHasType( + Const::new_param( + interner, + ParamConst { id: param_id, index: (param_index + const_params_offset) as u32 }, + ), + db.const_param_ty_ns(param_id), + ) + .upcast(interner), + )); + } } /// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound. @@ -2112,7 +2225,8 @@ |pred| pred != def && pred == GenericDefId::TraitId(trait_ref.def_id.0), ) .0 - .deref() + .predicates + .instantiate_identity() { tracing::debug!(?pred); let sup_trait_ref = match pred.kind().skip_binder() { @@ -2158,10 +2272,11 @@ } let predicates = - db.generic_predicates_for_param(def, param_id.into(), assoc_name.clone()); + generic_predicates_for_param(db, def, param_id.into(), assoc_name.clone()); predicates - .iter() - .find_map(|pred| match (*pred).kind().skip_binder() { + .as_ref() + .iter_identity_copied() + .find_map(|pred| match pred.kind().skip_binder() { rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate), _ => None, })
diff --git a/crates/hir-ty/src/lower/path.rs b/crates/hir-ty/src/lower/path.rs index 9ba0da6..6d3ce74 100644 --- a/crates/hir-ty/src/lower/path.rs +++ b/crates/hir-ty/src/lower/path.rs
@@ -774,7 +774,7 @@ } } - fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db> { + fn parent_arg(&mut self, _param_idx: u32, param_id: GenericParamId) -> GenericArg<'db> { match param_id { GenericParamId::TypeParamId(_) => { Ty::new_error(self.ctx.ctx.interner, ErrorGuaranteed).into() @@ -992,7 +992,7 @@ preceding_args: &[GenericArg<'db>], ) -> GenericArg<'db>; - fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db>; + fn parent_arg(&mut self, param_idx: u32, param_id: GenericParamId) -> GenericArg<'db>; } /// Returns true if there was an error. @@ -1129,7 +1129,9 @@ let mut substs = Vec::with_capacity(def_generics.len()); - substs.extend(def_generics.iter_parent_id().map(|id| ctx.parent_arg(id))); + substs.extend( + def_generics.iter_parent_id().enumerate().map(|(idx, id)| ctx.parent_arg(idx as u32, id)), + ); let mut args = args_slice.iter().enumerate().peekable(); let mut params = def_generics.iter_self().peekable();
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 1e30897..799bfb3 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs
@@ -2,683 +2,349 @@ //! For details about how this works in rustc, see the method lookup page in the //! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html) //! and the corresponding code mostly in rustc_hir_analysis/check/method/probe.rs. -use std::ops::ControlFlow; + +mod confirm; +mod probe; + +use either::Either; +use hir_expand::name::Name; +use span::Edition; +use tracing::{debug, instrument}; use base_db::Crate; use hir_def::{ - AdtId, AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup, - ModuleId, TraitId, TypeAliasId, + AssocItemId, BlockId, ConstId, FunctionId, GenericParamId, HasModule, ImplId, ItemContainerId, + ModuleId, TraitId, + expr_store::path::GenericArgs as HirGenericArgs, + hir::ExprId, nameres::{DefMap, block_def_map, crate_def_map}, - signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags}, + resolver::Resolver, }; -use hir_expand::name::Name; -use intern::sym; -use rustc_ast_ir::Mutability; +use intern::{Symbol, sym}; use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::{ - FloatTy, IntTy, TypeVisitableExt, UintTy, - inherent::{ - AdtDef, BoundExistentialPredicates, GenericArgs as _, IntoKind, SliceLike, Ty as _, - }, + TypeVisitableExt, + fast_reject::{TreatParams, simplify_type}, + inherent::{BoundExistentialPredicates, IntoKind, SliceLike}, }; -use smallvec::{SmallVec, smallvec}; -use stdx::never; +use stdx::impl_from; use triomphe::Arc; use crate::{ - TraitEnvironment, - autoderef::{self, AutoderefKind}, + TraitEnvironment, all_super_traits, db::HirDatabase, - infer::{Adjust, Adjustment, OverloadedDeref, PointerCast, unify::InferenceTable}, - lang_items::is_box, + infer::{InferenceContext, unify::InferenceTable}, + lower::GenericPredicates, next_solver::{ - Canonical, DbInterner, ErrorGuaranteed, GenericArgs, Goal, Predicate, Region, SolverDefId, - TraitRef, Ty, TyKind, TypingMode, + Binder, ClauseKind, DbInterner, FnSig, GenericArgs, PredicateKind, SimplifiedType, + SolverDefId, TraitRef, Ty, TyKind, TypingMode, infer::{ - DbInternerInferExt, InferCtxt, + BoundRegionConversionTime, DbInternerInferExt, InferCtxt, InferOk, select::ImplSource, - traits::{Obligation, ObligationCause, PredicateObligation}, + traits::{Obligation, ObligationCause, PredicateObligations}, }, obligation_ctxt::ObligationCtxt, + util::clauses_as_obligations, }, - traits::next_trait_solve_canonical_in_ctxt, - utils::all_super_traits, }; -/// This is used as a key for indexing impls. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub enum TyFingerprint { - // These are lang item impls: - Str, - Slice, - Array, - Never, - Ref(Mutability), - RawPtr(Mutability), - Bool, - Char, - Int(IntTy), - Uint(UintTy), - Float(FloatTy), - // These can have user-defined impls: - Adt(hir_def::AdtId), - Dyn(TraitId), - ForeignType(TypeAliasId), - // These only exist for trait impls - Unit, - Unnameable, - Function(u32), -} - -impl TyFingerprint { - /// Creates a TyFingerprint for looking up an inherent impl. Only certain - /// types can have inherent impls: if we have some `struct S`, we can have - /// an `impl S`, but not `impl &S`. Hence, this will return `None` for - /// reference types and such. - pub fn for_inherent_impl<'db>(ty: Ty<'db>) -> Option<TyFingerprint> { - let fp = match ty.kind() { - TyKind::Str => TyFingerprint::Str, - TyKind::Never => TyFingerprint::Never, - TyKind::Slice(..) => TyFingerprint::Slice, - TyKind::Array(..) => TyFingerprint::Array, - TyKind::Bool => TyFingerprint::Bool, - TyKind::Char => TyFingerprint::Char, - TyKind::Int(int) => TyFingerprint::Int(int), - TyKind::Uint(int) => TyFingerprint::Uint(int), - TyKind::Float(float) => TyFingerprint::Float(float), - TyKind::Adt(adt_def, _) => TyFingerprint::Adt(adt_def.def_id().0), - TyKind::RawPtr(_, mutability) => TyFingerprint::RawPtr(mutability), - TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(alias_id.0), - TyKind::Dynamic(bounds, _) => { - bounds.principal_def_id().map(|trait_| TyFingerprint::Dyn(trait_.0))? - } - _ => return None, - }; - Some(fp) - } - - /// Creates a TyFingerprint for looking up a trait impl. - pub fn for_trait_impl<'db>(ty: Ty<'db>) -> Option<TyFingerprint> { - let fp = match ty.kind() { - TyKind::Str => TyFingerprint::Str, - TyKind::Never => TyFingerprint::Never, - TyKind::Slice(..) => TyFingerprint::Slice, - TyKind::Array(..) => TyFingerprint::Array, - TyKind::Bool => TyFingerprint::Bool, - TyKind::Char => TyFingerprint::Char, - TyKind::Int(int) => TyFingerprint::Int(int), - TyKind::Uint(int) => TyFingerprint::Uint(int), - TyKind::Float(float) => TyFingerprint::Float(float), - TyKind::Adt(adt_def, _) => TyFingerprint::Adt(adt_def.def_id().0), - TyKind::RawPtr(_, mutability) => TyFingerprint::RawPtr(mutability), - TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(alias_id.0), - TyKind::Dynamic(bounds, _) => { - bounds.principal_def_id().map(|trait_| TyFingerprint::Dyn(trait_.0))? - } - TyKind::Ref(_, _, mutability) => TyFingerprint::Ref(mutability), - TyKind::Tuple(subst) => { - let first_ty = subst.as_slice().first(); - match first_ty { - Some(ty) => return TyFingerprint::for_trait_impl(*ty), - None => TyFingerprint::Unit, - } - } - // FIXME(next-solver): Putting `Alias` here is *probably* incorrect, AFAIK it should return `None`. But this breaks - // flyimport, which uses an incorrect but fast method resolution algorithm. Therefore we put it here, - // because this function is only called by flyimport, and anyway we should get rid of `TyFingerprint` - // and switch to `rustc_type_ir`'s `SimplifiedType`. - TyKind::Alias(..) - | TyKind::FnDef(_, _) - | TyKind::Closure(_, _) - | TyKind::Coroutine(..) - | TyKind::CoroutineClosure(..) - | TyKind::CoroutineWitness(..) => TyFingerprint::Unnameable, - TyKind::FnPtr(sig, _) => { - TyFingerprint::Function(sig.skip_binder().inputs_and_output.inner().len() as u32) - } - TyKind::Param(_) - | TyKind::Bound(..) - | TyKind::Placeholder(..) - | TyKind::Infer(_) - | TyKind::Error(_) - | TyKind::Pat(..) - | TyKind::UnsafeBinder(..) => return None, - }; - Some(fp) - } -} - -pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [ - TyFingerprint::Int(IntTy::I8), - TyFingerprint::Int(IntTy::I16), - TyFingerprint::Int(IntTy::I32), - TyFingerprint::Int(IntTy::I64), - TyFingerprint::Int(IntTy::I128), - TyFingerprint::Int(IntTy::Isize), - TyFingerprint::Uint(UintTy::U8), - TyFingerprint::Uint(UintTy::U16), - TyFingerprint::Uint(UintTy::U32), - TyFingerprint::Uint(UintTy::U64), - TyFingerprint::Uint(UintTy::U128), - TyFingerprint::Uint(UintTy::Usize), -]; - -pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 4] = [ - TyFingerprint::Float(FloatTy::F16), - TyFingerprint::Float(FloatTy::F32), - TyFingerprint::Float(FloatTy::F64), - TyFingerprint::Float(FloatTy::F128), -]; - -type TraitFpMap = FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Box<[ImplId]>>>; -type TraitFpMapCollector = FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>; - -/// Trait impls defined or available in some crate. -#[derive(Debug, Eq, PartialEq)] -pub struct TraitImpls { - // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type. - map: TraitFpMap, -} - -impl TraitImpls { - pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: Crate) -> Arc<Self> { - let _p = tracing::info_span!("trait_impls_in_crate_query", ?krate).entered(); - let mut impls = FxHashMap::default(); - - Self::collect_def_map(db, &mut impls, crate_def_map(db, krate)); - - Arc::new(Self::finish(impls)) - } - - pub(crate) fn trait_impls_in_block_query( - db: &dyn HirDatabase, - block: BlockId, - ) -> Option<Arc<Self>> { - let _p = tracing::info_span!("trait_impls_in_block_query").entered(); - let mut impls = FxHashMap::default(); - - Self::collect_def_map(db, &mut impls, block_def_map(db, block)); - - if impls.is_empty() { None } else { Some(Arc::new(Self::finish(impls))) } - } - - pub(crate) fn trait_impls_in_deps_query( - db: &dyn HirDatabase, - krate: Crate, - ) -> Arc<[Arc<Self>]> { - let _p = tracing::info_span!("trait_impls_in_deps_query", ?krate).entered(); - Arc::from_iter( - db.transitive_deps(krate).into_iter().map(|krate| db.trait_impls_in_crate(krate)), - ) - } - - fn finish(map: TraitFpMapCollector) -> TraitImpls { - TraitImpls { - map: map - .into_iter() - .map(|(k, v)| (k, v.into_iter().map(|(k, v)| (k, v.into_boxed_slice())).collect())) - .collect(), - } - } - - fn collect_def_map(db: &dyn HirDatabase, map: &mut TraitFpMapCollector, def_map: &DefMap) { - for (_module_id, module_data) in def_map.modules() { - for impl_id in module_data.scope.impls() { - // Reservation impls should be ignored during trait resolution, so we never need - // them during type analysis. See rust-lang/rust#64631 for details. - // - // FIXME: Reservation impls should be considered during coherence checks. If we are - // (ever) to implement coherence checks, this filtering should be done by the trait - // solver. - if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() { - continue; - } - let target_trait = match db.impl_trait(impl_id) { - Some(tr) => tr.skip_binder().def_id.0, - None => continue, - }; - let self_ty = db.impl_self_ty(impl_id); - let self_ty_fp = TyFingerprint::for_trait_impl(self_ty.instantiate_identity()); - map.entry(target_trait).or_default().entry(self_ty_fp).or_default().push(impl_id); - } - - // To better support custom derives, collect impls in all unnamed const items. - // const _: () = { ... }; - for konst in module_data.scope.unnamed_consts() { - let body = db.body(konst.into()); - for (_, block_def_map) in body.blocks(db) { - Self::collect_def_map(db, map, block_def_map); - } - } - } - } - - /// Queries all trait impls for the given type. - pub fn for_self_ty_without_blanket_impls( - &self, - fp: TyFingerprint, - ) -> impl Iterator<Item = ImplId> + '_ { - self.map - .values() - .flat_map(move |impls| impls.get(&Some(fp)).into_iter()) - .flat_map(|it| it.iter().copied()) - } - - /// Queries all impls of the given trait. - pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ { - self.map - .get(&trait_) - .into_iter() - .flat_map(|map| map.values().flat_map(|v| v.iter().copied())) - } - - /// Queries all impls of `trait_` that may apply to `self_ty`. - pub fn for_trait_and_self_ty( - &self, - trait_: TraitId, - self_ty: TyFingerprint, - ) -> impl Iterator<Item = ImplId> + '_ { - self.map - .get(&trait_) - .into_iter() - .flat_map(move |map| map.get(&Some(self_ty)).into_iter().chain(map.get(&None))) - .flat_map(|v| v.iter().copied()) - } - - /// Queries whether `self_ty` has potentially applicable implementations of `trait_`. - pub fn has_impls_for_trait_and_self_ty(&self, trait_: TraitId, self_ty: TyFingerprint) -> bool { - self.for_trait_and_self_ty(trait_, self_ty).next().is_some() - } - - pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ { - self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied())) - } -} - -/// Inherent impls defined in some crate. -/// -/// Inherent impls can only be defined in the crate that also defines the self type of the impl -/// (note that some primitives are considered to be defined by both libcore and liballoc). -/// -/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a -/// single crate. -#[derive(Debug, Eq, PartialEq)] -pub struct InherentImpls { - map: FxHashMap<TyFingerprint, Vec<ImplId>>, - invalid_impls: Vec<ImplId>, -} - -impl InherentImpls { - pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: Crate) -> Arc<Self> { - let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered(); - let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; - - let crate_def_map = crate_def_map(db, krate); - impls.collect_def_map(db, crate_def_map); - impls.shrink_to_fit(); - - Arc::new(impls) - } - - pub(crate) fn inherent_impls_in_block_query( - db: &dyn HirDatabase, - block: BlockId, - ) -> Option<Arc<Self>> { - let _p = tracing::info_span!("inherent_impls_in_block_query").entered(); - let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; - - let block_def_map = block_def_map(db, block); - impls.collect_def_map(db, block_def_map); - impls.shrink_to_fit(); - - if impls.map.is_empty() && impls.invalid_impls.is_empty() { - None - } else { - Some(Arc::new(impls)) - } - } - - fn shrink_to_fit(&mut self) { - self.map.values_mut().for_each(Vec::shrink_to_fit); - self.map.shrink_to_fit(); - } - - fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) { - for (_module_id, module_data) in def_map.modules() { - for impl_id in module_data.scope.impls() { - let data = db.impl_signature(impl_id); - if data.target_trait.is_some() { - continue; - } - - let self_ty = db.impl_self_ty(impl_id); - let self_ty = self_ty.instantiate_identity(); - - match is_inherent_impl_coherent(db, def_map, impl_id, self_ty) { - true => { - // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution) - if let Some(fp) = TyFingerprint::for_inherent_impl(self_ty) { - self.map.entry(fp).or_default().push(impl_id); - } - } - false => self.invalid_impls.push(impl_id), - } - } - - // To better support custom derives, collect impls in all unnamed const items. - // const _: () = { ... }; - for konst in module_data.scope.unnamed_consts() { - let body = db.body(konst.into()); - for (_, block_def_map) in body.blocks(db) { - self.collect_def_map(db, block_def_map); - } - } - } - } - - pub fn for_self_ty<'db>(&self, self_ty: Ty<'db>) -> &[ImplId] { - match TyFingerprint::for_inherent_impl(self_ty) { - Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]), - None => &[], - } - } - - pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ { - self.map.values().flat_map(|v| v.iter().copied()) - } - - pub fn invalid_impls(&self) -> &[ImplId] { - &self.invalid_impls - } -} - -pub(crate) fn incoherent_inherent_impl_crates( - db: &dyn HirDatabase, - krate: Crate, - fp: TyFingerprint, -) -> SmallVec<[Crate; 2]> { - let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered(); - let mut res = SmallVec::new(); - - // should pass crate for finger print and do reverse deps - - for krate in db.transitive_deps(krate) { - let impls = db.inherent_impls_in_crate(krate); - if impls.map.get(&fp).is_some_and(|v| !v.is_empty()) { - res.push(krate); - } - } - - res -} - -pub fn def_crates<'db>( - db: &'db dyn HirDatabase, - ty: Ty<'db>, - cur_crate: Crate, -) -> Option<SmallVec<[Crate; 2]>> { - match ty.kind() { - TyKind::Adt(adt_def, _) => { - let def_id = adt_def.def_id().0; - let rustc_has_incoherent_inherent_impls = match def_id { - hir_def::AdtId::StructId(id) => db - .struct_signature(id) - .flags - .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), - hir_def::AdtId::UnionId(id) => db - .union_signature(id) - .flags - .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), - hir_def::AdtId::EnumId(id) => db - .enum_signature(id) - .flags - .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), - }; - Some(if rustc_has_incoherent_inherent_impls { - db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Adt(def_id)) - } else { - smallvec![def_id.module(db).krate()] - }) - } - TyKind::Foreign(alias) => { - let alias = alias.0; - Some( - if db - .type_alias_signature(alias) - .flags - .contains(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL) - { - db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(alias)) - } else { - smallvec![alias.module(db).krate()] - }, - ) - } - TyKind::Dynamic(bounds, _) => { - let trait_id = bounds.principal_def_id()?.0; - Some( - if db - .trait_signature(trait_id) - .flags - .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) - { - db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Dyn(trait_id)) - } else { - smallvec![trait_id.module(db).krate()] - }, - ) - } - // for primitives, there may be impls in various places (core and alloc - // mostly). We just check the whole crate graph for crates with impls - // (cached behind a query). - TyKind::Bool - | TyKind::Char - | TyKind::Int(_) - | TyKind::Uint(_) - | TyKind::Float(_) - | TyKind::Str - | TyKind::Slice(_) - | TyKind::Array(..) - | TyKind::RawPtr(..) => Some(db.incoherent_inherent_impl_crates( - cur_crate, - TyFingerprint::for_inherent_impl(ty).expect("fingerprint for primitive"), - )), - _ => None, - } -} - -/// Look up the method with the given name. -pub(crate) fn lookup_method<'db>( - ty: &Canonical<'db, Ty<'db>>, - table: &mut InferenceTable<'db>, - traits_in_scope: &FxHashSet<TraitId>, - visible_from_module: VisibleFromModule, - name: &Name, -) -> Option<(ReceiverAdjustments, FunctionId, bool)> { - let mut not_visible = None; - let res = iterate_method_candidates( - ty, - table, - traits_in_scope, - visible_from_module, - Some(name), - LookupMode::MethodCall, - |adjustments, f, visible| match f { - AssocItemId::FunctionId(f) if visible => Some((adjustments, f, true)), - AssocItemId::FunctionId(f) if not_visible.is_none() => { - not_visible = Some((adjustments, f, false)); - None - } - _ => None, - }, - ); - res.or(not_visible) -} - -/// Whether we're looking up a dotted method call (like `v.len()`) or a path -/// (like `Vec::new`). -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum LookupMode { - /// Looking up a method call like `v.len()`: We only consider candidates - /// that have a `self` parameter, and do autoderef. - MethodCall, - /// Looking up a path like `Vec::new` or `Vec::default`: We consider all - /// candidates including associated constants, but don't do autoderef. - Path, -} - -#[derive(Clone, Copy)] -pub enum VisibleFromModule { - /// Filter for results that are visible from the given module - Filter(ModuleId), - /// Include impls from the given block. - IncludeBlock(BlockId), - /// Do nothing special in regards visibility - None, -} - -impl From<Option<ModuleId>> for VisibleFromModule { - fn from(module: Option<ModuleId>) -> Self { - match module { - Some(module) => Self::Filter(module), - None => Self::None, - } - } -} - -impl From<Option<BlockId>> for VisibleFromModule { - fn from(block: Option<BlockId>) -> Self { - match block { - Some(block) => Self::IncludeBlock(block), - None => Self::None, - } - } -} +pub use self::probe::{ + Candidate, CandidateKind, CandidateStep, CandidateWithPrivate, Mode, Pick, PickKind, +}; #[derive(Debug, Clone)] -pub enum AutorefOrPtrAdjustment { - Autoref(Mutability), - ToConstPtr, +pub struct MethodResolutionUnstableFeatures { + arbitrary_self_types: bool, + arbitrary_self_types_pointers: bool, + supertrait_item_shadowing: bool, } -#[derive(Debug, Clone, Default)] -pub struct ReceiverAdjustments { - autoref: Option<AutorefOrPtrAdjustment>, - autoderefs: usize, - unsize_array: bool, -} - -impl ReceiverAdjustments { - pub(crate) fn apply<'db>( - &self, - table: &mut InferenceTable<'db>, - mut ty: Ty<'db>, - ) -> (Ty<'db>, Vec<Adjustment<'db>>) { - let mut adjust = Vec::new(); - let mut autoderef = table.autoderef(ty); - autoderef.next(); - for _ in 0..self.autoderefs { - match autoderef.next() { - None => { - never!("autoderef not possible for {:?}", ty); - ty = Ty::new_error(table.interner(), ErrorGuaranteed); - break; - } - Some((new_ty, _)) => { - ty = new_ty; - let mutbl = match self.autoref { - Some(AutorefOrPtrAdjustment::Autoref(m)) => Some(m), - Some(AutorefOrPtrAdjustment::ToConstPtr) => Some(Mutability::Not), - // FIXME should we know the mutability here, when autoref is `None`? - None => None, - }; - adjust.push(Adjustment { - kind: Adjust::Deref(match autoderef.steps().last().unwrap().1 { - AutoderefKind::Overloaded => Some(OverloadedDeref(mutbl)), - AutoderefKind::Builtin => None, - }), - target: ty, - }); - } - } +impl MethodResolutionUnstableFeatures { + pub fn from_def_map(def_map: &DefMap) -> Self { + Self { + arbitrary_self_types: def_map.is_unstable_feature_enabled(&sym::arbitrary_self_types), + arbitrary_self_types_pointers: def_map + .is_unstable_feature_enabled(&sym::arbitrary_self_types_pointers), + supertrait_item_shadowing: def_map + .is_unstable_feature_enabled(&sym::supertrait_item_shadowing), } - if let Some(autoref) = &self.autoref { - let lt = table.next_region_var(); - match autoref { - AutorefOrPtrAdjustment::Autoref(m) => { - let a = Adjustment::borrow(table.interner(), *m, ty, lt); - ty = a.target; - adjust.push(a); + } +} + +pub struct MethodResolutionContext<'a, 'db> { + pub infcx: &'a InferCtxt<'db>, + pub resolver: &'a Resolver<'db>, + pub env: &'a TraitEnvironment<'db>, + pub traits_in_scope: &'a FxHashSet<TraitId>, + pub edition: Edition, + pub unstable_features: &'a MethodResolutionUnstableFeatures, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum CandidateId { + FunctionId(FunctionId), + ConstId(ConstId), +} +impl_from!(FunctionId, ConstId for CandidateId); + +impl CandidateId { + fn container(self, db: &dyn HirDatabase) -> ItemContainerId { + match self { + CandidateId::FunctionId(id) => id.loc(db).container, + CandidateId::ConstId(id) => id.loc(db).container, + } + } +} + +#[derive(Clone, Copy, Debug)] +pub(crate) struct MethodCallee<'db> { + /// Impl method ID, for inherent methods, or trait method ID, otherwise. + pub def_id: FunctionId, + pub args: GenericArgs<'db>, + + /// Instantiated method signature, i.e., it has been + /// instantiated, normalized, and has had late-bound + /// lifetimes replaced with inference variables. + pub sig: FnSig<'db>, +} + +#[derive(Debug)] +pub enum MethodError<'db> { + /// Did not find an applicable method. + NoMatch, + + /// Multiple methods might apply. + Ambiguity(Vec<CandidateSource>), + + /// Found an applicable method, but it is not visible. + PrivateMatch(Pick<'db>), + + /// Found a `Self: Sized` bound where `Self` is a trait object. + IllegalSizedBound { candidates: Vec<FunctionId>, needs_mut: bool }, + + /// Error has already been emitted, no need to emit another one. + ErrorReported, +} + +// A pared down enum describing just the places from which a method +// candidate can arise. Used for error reporting only. +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub enum CandidateSource { + Impl(ImplId), + Trait(TraitId), +} + +impl<'a, 'db> InferenceContext<'a, 'db> { + /// Performs method lookup. If lookup is successful, it will return the callee + /// and store an appropriate adjustment for the self-expr. In some cases it may + /// report an error (e.g., invoking the `drop` method). + #[instrument(level = "debug", skip(self))] + pub(crate) fn lookup_method_including_private( + &mut self, + self_ty: Ty<'db>, + name: Name, + generic_args: Option<&HirGenericArgs>, + receiver: ExprId, + call_expr: ExprId, + ) -> Result<(MethodCallee<'db>, bool), MethodError<'db>> { + let (pick, is_visible) = match self.lookup_probe(name, self_ty) { + Ok(it) => (it, true), + Err(MethodError::PrivateMatch(it)) => { + // FIXME: Report error. + (it, false) + } + Err(err) => return Err(err), + }; + + let result = self.confirm_method(&pick, self_ty, call_expr, generic_args); + debug!("result = {:?}", result); + + if result.illegal_sized_bound { + // FIXME: Report an error. + } + + self.write_expr_adj(receiver, result.adjustments); + self.write_method_resolution(call_expr, result.callee.def_id, result.callee.args); + + Ok((result.callee, is_visible)) + } + + #[instrument(level = "debug", skip(self))] + pub(crate) fn lookup_probe( + &self, + method_name: Name, + self_ty: Ty<'db>, + ) -> probe::PickResult<'db> { + self.with_method_resolution(|ctx| { + let pick = ctx.probe_for_name(probe::Mode::MethodCall, method_name, self_ty)?; + Ok(pick) + }) + } + + pub(crate) fn with_method_resolution<R>( + &self, + f: impl FnOnce(&MethodResolutionContext<'_, 'db>) -> R, + ) -> R { + let traits_in_scope = self.get_traits_in_scope(); + let traits_in_scope = match &traits_in_scope { + Either::Left(it) => it, + Either::Right(it) => *it, + }; + let ctx = MethodResolutionContext { + infcx: &self.table.infer_ctxt, + resolver: &self.resolver, + env: &self.table.trait_env, + traits_in_scope, + edition: self.edition, + unstable_features: &self.unstable_features, + }; + f(&ctx) + } +} + +/// Used by [FnCtxt::lookup_method_for_operator] with `-Znext-solver`. +/// +/// With `AsRigid` we error on `impl Opaque: NotInItemBounds` while +/// `AsInfer` just treats it as ambiguous and succeeds. This is necessary +/// as we want [FnCtxt::check_expr_call] to treat not-yet-defined opaque +/// types as rigid to support `impl Deref<Target = impl FnOnce()>` and +/// `Box<impl FnOnce()>`. +/// +/// We only want to treat opaque types as rigid if we need to eagerly choose +/// between multiple candidates. We otherwise treat them as ordinary inference +/// variable to avoid rejecting otherwise correct code. +#[derive(Debug)] +#[expect(dead_code)] +pub(super) enum TreatNotYetDefinedOpaques { + AsInfer, + AsRigid, +} + +impl<'db> InferenceTable<'db> { + /// `lookup_method_in_trait` is used for overloaded operators. + /// It does a very narrow slice of what the normal probe/confirm path does. + /// In particular, it doesn't really do any probing: it simply constructs + /// an obligation for a particular trait with the given self type and checks + /// whether that trait is implemented. + #[instrument(level = "debug", skip(self))] + pub(super) fn lookup_method_for_operator( + &self, + cause: ObligationCause, + method_name: Symbol, + trait_def_id: TraitId, + self_ty: Ty<'db>, + opt_rhs_ty: Option<Ty<'db>>, + treat_opaques: TreatNotYetDefinedOpaques, + ) -> Option<InferOk<'db, MethodCallee<'db>>> { + // Construct a trait-reference `self_ty : Trait<input_tys>` + let args = GenericArgs::for_item( + self.interner(), + trait_def_id.into(), + |param_idx, param_id, _| match param_id { + GenericParamId::LifetimeParamId(_) | GenericParamId::ConstParamId(_) => { + unreachable!("did not expect operator trait to have lifetime/const") } - AutorefOrPtrAdjustment::ToConstPtr => { - if let TyKind::RawPtr(pointee, Mutability::Mut) = ty.kind() { - let a = Adjustment { - kind: Adjust::Pointer(PointerCast::MutToConstPointer), - target: Ty::new_ptr(table.interner(), pointee, Mutability::Not), - }; - ty = a.target; - adjust.push(a); + GenericParamId::TypeParamId(_) => { + if param_idx == 0 { + self_ty.into() + } else if let Some(rhs_ty) = opt_rhs_ty { + assert_eq!(param_idx, 1, "did not expect >1 param on operator trait"); + rhs_ty.into() } else { - never!("`ToConstPtr` target is not a raw mutable pointer"); + // FIXME: We should stop passing `None` for the failure case + // when probing for call exprs. I.e. `opt_rhs_ty` should always + // be set when it needs to be. + self.next_var_for_param(param_id) } } - }; - } - if self.unsize_array { - ty = 'it: { - if let TyKind::Ref(l, inner, m) = ty.kind() - && let TyKind::Array(inner, _) = inner.kind() - { - break 'it Ty::new_ref( - table.interner(), - l, - Ty::new_slice(table.interner(), inner), - m, - ); - } - // FIXME: report diagnostic if array unsizing happens without indirection. - ty - }; - adjust.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target: ty }); - } - (ty, adjust) - } + }, + ); - fn with_autoref(&self, a: AutorefOrPtrAdjustment) -> ReceiverAdjustments { - Self { autoref: Some(a), ..*self } - } -} + let obligation = Obligation::new( + self.interner(), + cause, + self.trait_env.env, + TraitRef::new_from_args(self.interner(), trait_def_id.into(), args), + ); -// This would be nicer if it just returned an iterator, but that runs into -// lifetime problems, because we need to borrow temp `CrateImplDefs`. -// FIXME add a context type here? -pub(crate) fn iterate_method_candidates<'db, T>( - ty: &Canonical<'db, Ty<'db>>, - table: &mut InferenceTable<'db>, - traits_in_scope: &FxHashSet<TraitId>, - visible_from_module: VisibleFromModule, - name: Option<&Name>, - mode: LookupMode, - mut callback: impl FnMut(ReceiverAdjustments, AssocItemId, bool) -> Option<T>, -) -> Option<T> { - let mut slot = None; - _ = iterate_method_candidates_dyn_impl( - ty, - table, - traits_in_scope, - visible_from_module, - name, - mode, - &mut |adj, item, visible| { - assert!(slot.is_none()); - if let Some(it) = callback(adj, item, visible) { - slot = Some(it); - return ControlFlow::Break(()); + // Now we want to know if this can be matched + let matches_trait = match treat_opaques { + TreatNotYetDefinedOpaques::AsInfer => self.infer_ctxt.predicate_may_hold(&obligation), + TreatNotYetDefinedOpaques::AsRigid => { + self.infer_ctxt.predicate_may_hold_opaque_types_jank(&obligation) } - ControlFlow::Continue(()) - }, - ); - slot + }; + + if !matches_trait { + debug!("--> Cannot match obligation"); + // Cannot be matched, no such method resolution is possible. + return None; + } + + // Trait must have a method named `m_name` and it should not have + // type parameters or early-bound regions. + let interner = self.interner(); + // We use `Ident::with_dummy_span` since no built-in operator methods have + // any macro-specific hygiene, so the span's context doesn't really matter. + let Some(method_item) = + trait_def_id.trait_items(self.db).method_by_name(&Name::new_symbol_root(method_name)) + else { + panic!("expected associated item for operator trait") + }; + + let def_id = method_item; + + debug!("lookup_in_trait_adjusted: method_item={:?}", method_item); + let mut obligations = PredicateObligations::new(); + + // Instantiate late-bound regions and instantiate the trait + // parameters into the method type to get the actual method type. + // + // N.B., instantiate late-bound regions before normalizing the + // function signature so that normalization does not need to deal + // with bound regions. + let fn_sig = + self.db.callable_item_signature(method_item.into()).instantiate(interner, args); + let fn_sig = self + .infer_ctxt + .instantiate_binder_with_fresh_vars(BoundRegionConversionTime::FnCall, fn_sig); + + // Register obligations for the parameters. This will include the + // `Self` parameter, which in turn has a bound of the main trait, + // so this also effectively registers `obligation` as well. (We + // used to register `obligation` explicitly, but that resulted in + // double error messages being reported.) + // + // Note that as the method comes from a trait, it should not have + // any late-bound regions appearing in its bounds. + let bounds = GenericPredicates::query_all(self.db, method_item.into()); + let bounds = clauses_as_obligations( + bounds.iter_instantiated_copied(interner, args.as_slice()), + ObligationCause::new(), + self.trait_env.env, + ); + + obligations.extend(bounds); + + // Also add an obligation for the method type being well-formed. + debug!( + "lookup_method_in_trait: matched method fn_sig={:?} obligation={:?}", + fn_sig, obligation + ); + for ty in fn_sig.inputs_and_output { + obligations.push(Obligation::new( + interner, + obligation.cause.clone(), + self.trait_env.env, + Binder::dummy(PredicateKind::Clause(ClauseKind::WellFormed(ty.into()))), + )); + } + + let callee = MethodCallee { def_id, args, sig: fn_sig }; + debug!("callee = {:?}", callee); + + Some(InferOk { obligations, value: callee }) + } } pub fn lookup_impl_const<'db>( @@ -690,7 +356,7 @@ let interner = infcx.interner; let db = interner.db; - let trait_id = match const_id.lookup(db).container { + let trait_id = match const_id.loc(db).container { ItemContainerId::TraitId(id) => id, _ => return (const_id, subs), }; @@ -719,7 +385,7 @@ ) -> Option<usize> { let db = interner.db; - let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else { + let ItemContainerId::TraitId(trait_id) = func.loc(db).container else { return None; }; let trait_params = db.generic_params(trait_id.into()).len(); @@ -755,7 +421,7 @@ let interner = DbInterner::new_with(db, Some(env.krate), env.block); let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis); - let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else { + let ItemContainerId::TraitId(trait_id) = func.loc(db).container else { return (func, fn_subst); }; let trait_params = db.generic_params(trait_id.into()).len(); @@ -833,981 +499,337 @@ } } -fn is_inherent_impl_coherent<'db>( - db: &'db dyn HirDatabase, - def_map: &DefMap, - impl_id: ImplId, - self_ty: Ty<'db>, -) -> bool { - let self_ty = self_ty.kind(); - let impl_allowed = match self_ty { - TyKind::Tuple(_) - | TyKind::FnDef(_, _) - | TyKind::Array(_, _) - | TyKind::Never - | TyKind::RawPtr(_, _) - | TyKind::Ref(_, _, _) - | TyKind::Slice(_) - | TyKind::Str - | TyKind::Bool - | TyKind::Char - | TyKind::Int(_) - | TyKind::Uint(_) - | TyKind::Float(_) => def_map.is_rustc_coherence_is_core(), +#[salsa::tracked(returns(ref))] +fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase) -> Box<[Crate]> { + // We assume that only sysroot crates contain `#[rustc_has_incoherent_inherent_impls]` + // impls, since this is an internal feature and only std uses it. + db.all_crates().iter().copied().filter(|krate| krate.data(db).origin.is_lang()).collect() +} - TyKind::Adt(adt_def, _) => adt_def.def_id().0.module(db).krate() == def_map.krate(), - TyKind::Dynamic(it, _) => it - .principal_def_id() - .is_some_and(|trait_id| trait_id.0.module(db).krate() == def_map.krate()), - +pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType) -> &[ImplId] { + let has_incoherent_impls = match self_ty.def() { + Some(def_id) => match def_id.try_into() { + Ok(def_id) => { + db.attrs(def_id).by_key(sym::rustc_has_incoherent_inherent_impls).exists() + } + Err(()) => true, + }, _ => true, }; - impl_allowed || { - let rustc_has_incoherent_inherent_impls = match self_ty { - TyKind::Tuple(_) - | TyKind::FnDef(_, _) - | TyKind::Array(_, _) - | TyKind::Never - | TyKind::RawPtr(_, _) - | TyKind::Ref(_, _, _) - | TyKind::Slice(_) - | TyKind::Str - | TyKind::Bool - | TyKind::Char - | TyKind::Int(_) - | TyKind::Uint(_) - | TyKind::Float(_) => true, + return if !has_incoherent_impls { + &[] + } else { + incoherent_inherent_impls_query(db, (), self_ty) + }; - TyKind::Adt(adt_def, _) => match adt_def.def_id().0 { - hir_def::AdtId::StructId(id) => db - .struct_signature(id) - .flags - .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), - hir_def::AdtId::UnionId(id) => db - .union_signature(id) - .flags - .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), - hir_def::AdtId::EnumId(it) => db - .enum_signature(it) - .flags - .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), - }, - TyKind::Dynamic(it, _) => it.principal_def_id().is_some_and(|trait_id| { - db.trait_signature(trait_id.0) - .flags - .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) - }), + #[salsa::tracked(returns(ref))] + fn incoherent_inherent_impls_query( + db: &dyn HirDatabase, + _force_query_input_to_be_interned: (), + self_ty: SimplifiedType, + ) -> Box<[ImplId]> { + let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered(); - _ => false, - }; - let items = impl_id.impl_items(db); - rustc_has_incoherent_inherent_impls - && !items.items.is_empty() - && items.items.iter().all(|&(_, assoc)| match assoc { - AssocItemId::FunctionId(it) => { - db.function_signature(it).flags.contains(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL) - } - AssocItemId::ConstId(it) => { - db.const_signature(it).flags.contains(ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL) - } - AssocItemId::TypeAliasId(it) => db - .type_alias_signature(it) - .flags - .contains(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL), - }) + let mut result = Vec::new(); + for &krate in crates_containing_incoherent_inherent_impls(db) { + let impls = InherentImpls::for_crate(db, krate); + result.extend_from_slice(impls.for_self_ty(&self_ty)); + } + result.into_boxed_slice() } } -/// Checks whether the impl satisfies the orphan rules. -/// -/// Given `impl<P1..=Pn> Trait<T1..=Tn> for T0`, an `impl`` is valid only if at least one of the following is true: -/// - Trait is a local trait -/// - All of -/// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type. -/// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`) -pub fn check_orphan_rules<'db>(db: &'db dyn HirDatabase, impl_: ImplId) -> bool { - let Some(impl_trait) = db.impl_trait(impl_) else { - // not a trait impl - return true; - }; +pub fn simplified_type_module(db: &dyn HirDatabase, ty: &SimplifiedType) -> Option<ModuleId> { + match ty.def()? { + SolverDefId::AdtId(id) => Some(id.module(db)), + SolverDefId::TypeAliasId(id) => Some(id.module(db)), + SolverDefId::TraitId(id) => Some(id.module(db)), + _ => None, + } +} - let local_crate = impl_.lookup(db).container.krate(); - let is_local = |tgt_crate| tgt_crate == local_crate; +#[derive(Debug, PartialEq, Eq)] +pub struct InherentImpls { + map: FxHashMap<SimplifiedType, Box<[ImplId]>>, +} - let trait_ref = impl_trait.instantiate_identity(); - let trait_id = trait_ref.def_id.0; - if is_local(trait_id.module(db).krate()) { - // trait to be implemented is local - return true; +#[salsa::tracked] +impl InherentImpls { + #[salsa::tracked(returns(ref))] + pub fn for_crate(db: &dyn HirDatabase, krate: Crate) -> Self { + let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered(); + + let crate_def_map = crate_def_map(db, krate); + + Self::collect_def_map(db, crate_def_map) } - let unwrap_fundamental = |mut ty: Ty<'db>| { - // Unwrap all layers of fundamental types with a loop. - loop { - match ty.kind() { - TyKind::Ref(_, referenced, _) => ty = referenced, - TyKind::Adt(adt_def, subs) => { - let AdtId::StructId(s) = adt_def.def_id().0 else { - break ty; + #[salsa::tracked(returns(ref))] + pub fn for_block(db: &dyn HirDatabase, block: BlockId) -> Option<Box<Self>> { + let _p = tracing::info_span!("inherent_impls_in_block_query").entered(); + + let block_def_map = block_def_map(db, block); + let result = Self::collect_def_map(db, block_def_map); + if result.map.is_empty() { None } else { Some(Box::new(result)) } + } +} + +impl InherentImpls { + fn collect_def_map(db: &dyn HirDatabase, def_map: &DefMap) -> Self { + let mut map = FxHashMap::default(); + collect(db, def_map, &mut map); + let mut map = map + .into_iter() + .map(|(self_ty, impls)| (self_ty, impls.into_boxed_slice())) + .collect::<FxHashMap<_, _>>(); + map.shrink_to_fit(); + return Self { map }; + + fn collect( + db: &dyn HirDatabase, + def_map: &DefMap, + map: &mut FxHashMap<SimplifiedType, Vec<ImplId>>, + ) { + for (_module_id, module_data) in def_map.modules() { + for impl_id in module_data.scope.impls() { + let data = db.impl_signature(impl_id); + if data.target_trait.is_some() { + continue; + } + + let interner = DbInterner::new_with(db, None, None); + let self_ty = db.impl_self_ty(impl_id); + let self_ty = self_ty.instantiate_identity(); + if let Some(self_ty) = + simplify_type(interner, self_ty, TreatParams::InstantiateWithInfer) + { + map.entry(self_ty).or_default().push(impl_id); + } + } + + // To better support custom derives, collect impls in all unnamed const items. + // const _: () = { ... }; + for konst in module_data.scope.unnamed_consts() { + let body = db.body(konst.into()); + for (_, block_def_map) in body.blocks(db) { + collect(db, block_def_map, map); + } + } + } + } + } + + pub fn for_self_ty(&self, self_ty: &SimplifiedType) -> &[ImplId] { + self.map.get(self_ty).map(|it| &**it).unwrap_or_default() + } + + pub fn for_each_crate_and_block( + db: &dyn HirDatabase, + krate: Crate, + block: Option<BlockId>, + for_each: &mut dyn FnMut(&InherentImpls), + ) { + let blocks = std::iter::successors(block, |block| block.loc(db).module.containing_block()); + blocks.filter_map(|block| Self::for_block(db, block).as_deref()).for_each(&mut *for_each); + for_each(Self::for_crate(db, krate)); + } +} + +#[derive(Debug, PartialEq)] +struct OneTraitImpls { + non_blanket_impls: FxHashMap<SimplifiedType, Box<[ImplId]>>, + blanket_impls: Box<[ImplId]>, +} + +#[derive(Default)] +struct OneTraitImplsBuilder { + non_blanket_impls: FxHashMap<SimplifiedType, Vec<ImplId>>, + blanket_impls: Vec<ImplId>, +} + +impl OneTraitImplsBuilder { + fn finish(self) -> OneTraitImpls { + let mut non_blanket_impls = self + .non_blanket_impls + .into_iter() + .map(|(self_ty, impls)| (self_ty, impls.into_boxed_slice())) + .collect::<FxHashMap<_, _>>(); + non_blanket_impls.shrink_to_fit(); + let blanket_impls = self.blanket_impls.into_boxed_slice(); + OneTraitImpls { non_blanket_impls, blanket_impls } + } +} + +#[derive(Debug, PartialEq)] +pub struct TraitImpls { + map: FxHashMap<TraitId, OneTraitImpls>, +} + +#[salsa::tracked] +impl TraitImpls { + #[salsa::tracked(returns(ref))] + pub fn for_crate(db: &dyn HirDatabase, krate: Crate) -> Arc<Self> { + let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered(); + + let crate_def_map = crate_def_map(db, krate); + let result = Self::collect_def_map(db, crate_def_map); + Arc::new(result) + } + + #[salsa::tracked(returns(ref))] + pub fn for_block(db: &dyn HirDatabase, block: BlockId) -> Option<Box<Self>> { + let _p = tracing::info_span!("inherent_impls_in_block_query").entered(); + + let block_def_map = block_def_map(db, block); + let result = Self::collect_def_map(db, block_def_map); + if result.map.is_empty() { None } else { Some(Box::new(result)) } + } + + #[salsa::tracked(returns(ref))] + pub fn for_crate_and_deps(db: &dyn HirDatabase, krate: Crate) -> Box<[Arc<Self>]> { + db.transitive_deps(krate).iter().map(|&dep| Self::for_crate(db, dep).clone()).collect() + } +} + +impl TraitImpls { + fn collect_def_map(db: &dyn HirDatabase, def_map: &DefMap) -> Self { + let mut map = FxHashMap::default(); + collect(db, def_map, &mut map); + let mut map = map + .into_iter() + .map(|(trait_id, trait_map)| (trait_id, trait_map.finish())) + .collect::<FxHashMap<_, _>>(); + map.shrink_to_fit(); + return Self { map }; + + fn collect( + db: &dyn HirDatabase, + def_map: &DefMap, + map: &mut FxHashMap<TraitId, OneTraitImplsBuilder>, + ) { + for (_module_id, module_data) in def_map.modules() { + for impl_id in module_data.scope.impls() { + // Reservation impls should be ignored during trait resolution, so we never need + // them during type analysis. See rust-lang/rust#64631 for details. + // + // FIXME: Reservation impls should be considered during coherence checks. If we are + // (ever) to implement coherence checks, this filtering should be done by the trait + // solver. + if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() { + continue; + } + let trait_ref = match db.impl_trait(impl_id) { + Some(tr) => tr.instantiate_identity(), + None => continue, }; - let struct_signature = db.struct_signature(s); - if struct_signature.flags.contains(StructFlags::FUNDAMENTAL) { - let next = subs.types().next(); - match next { - Some(it) => ty = it, - None => break ty, + let self_ty = trait_ref.self_ty(); + let interner = DbInterner::new_with(db, None, None); + let entry = map.entry(trait_ref.def_id.0).or_default(); + match simplify_type(interner, self_ty, TreatParams::InstantiateWithInfer) { + Some(self_ty) => { + entry.non_blanket_impls.entry(self_ty).or_default().push(impl_id) } - } else { - break ty; + None => entry.blanket_impls.push(impl_id), } } - _ => break ty, + + // To better support custom derives, collect impls in all unnamed const items. + // const _: () = { ... }; + for konst in module_data.scope.unnamed_consts() { + let body = db.body(konst.into()); + for (_, block_def_map) in body.blocks(db) { + collect(db, block_def_map, map); + } + } } } - }; - // - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type. + } - // FIXME: param coverage - // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`) - let is_not_orphan = trait_ref.args.types().any(|ty| match unwrap_fundamental(ty).kind() { - TyKind::Adt(adt_def, _) => is_local(adt_def.def_id().0.module(db).krate()), - TyKind::Error(_) => true, - TyKind::Dynamic(it, _) => { - it.principal_def_id().is_some_and(|trait_id| is_local(trait_id.0.module(db).krate())) + pub fn blanket_impls(&self, for_trait: TraitId) -> &[ImplId] { + self.map.get(&for_trait).map(|it| &*it.blanket_impls).unwrap_or_default() + } + + /// Queries whether `self_ty` has potentially applicable implementations of `trait_`. + pub fn has_impls_for_trait_and_self_ty( + &self, + trait_: TraitId, + self_ty: &SimplifiedType, + ) -> bool { + self.map.get(&trait_).is_some_and(|trait_impls| { + trait_impls.non_blanket_impls.contains_key(self_ty) + || !trait_impls.blanket_impls.is_empty() + }) + } + + pub fn for_trait_and_self_ty(&self, trait_: TraitId, self_ty: &SimplifiedType) -> &[ImplId] { + self.map + .get(&trait_) + .and_then(|map| map.non_blanket_impls.get(self_ty)) + .map(|it| &**it) + .unwrap_or_default() + } + + pub fn for_trait(&self, trait_: TraitId, mut callback: impl FnMut(&[ImplId])) { + if let Some(impls) = self.map.get(&trait_) { + callback(&impls.blanket_impls); + for impls in impls.non_blanket_impls.values() { + callback(impls); + } } - _ => false, - }); - #[allow(clippy::let_and_return)] - is_not_orphan -} + } -/// To be used from `hir` only. -pub fn iterate_path_candidates<'db>( - ty: &Canonical<'db, Ty<'db>>, - db: &'db dyn HirDatabase, - env: Arc<TraitEnvironment<'db>>, - traits_in_scope: &FxHashSet<TraitId>, - visible_from_module: VisibleFromModule, - name: Option<&Name>, - callback: &mut dyn MethodCandidateCallback, -) -> ControlFlow<()> { - iterate_method_candidates_dyn( - ty, - db, - env, - traits_in_scope, - visible_from_module, - name, - LookupMode::Path, - // the adjustments are not relevant for path lookup - callback, - ) -} + pub fn for_self_ty(&self, self_ty: &SimplifiedType, mut callback: impl FnMut(&[ImplId])) { + for for_trait in self.map.values() { + if let Some(for_ty) = for_trait.non_blanket_impls.get(self_ty) { + callback(for_ty); + } + } + } -/// To be used from `hir` only. -pub fn iterate_method_candidates_dyn<'db>( - ty: &Canonical<'db, Ty<'db>>, - db: &'db dyn HirDatabase, - env: Arc<TraitEnvironment<'db>>, - traits_in_scope: &FxHashSet<TraitId>, - visible_from_module: VisibleFromModule, - name: Option<&Name>, - mode: LookupMode, - callback: &mut dyn MethodCandidateCallback, -) -> ControlFlow<()> { - iterate_method_candidates_dyn_impl( - ty, - &mut InferenceTable::new(db, env, None), - traits_in_scope, - visible_from_module, - name, - mode, - callback, - ) -} + pub fn for_each_crate_and_block( + db: &dyn HirDatabase, + krate: Crate, + block: Option<BlockId>, + for_each: &mut dyn FnMut(&TraitImpls), + ) { + let blocks = std::iter::successors(block, |block| block.loc(db).module.containing_block()); + blocks.filter_map(|block| Self::for_block(db, block).as_deref()).for_each(&mut *for_each); + Self::for_crate_and_deps(db, krate).iter().map(|it| &**it).for_each(for_each); + } -fn iterate_method_candidates_dyn_impl<'db>( - ty: &Canonical<'db, Ty<'db>>, - table: &mut InferenceTable<'db>, - traits_in_scope: &FxHashSet<TraitId>, - visible_from_module: VisibleFromModule, - name: Option<&Name>, - mode: LookupMode, - callback: &mut dyn MethodCandidateCallback, -) -> ControlFlow<()> { - let _p = tracing::info_span!( - "iterate_method_candidates_dyn", - ?mode, - ?name, - traits_in_scope_len = traits_in_scope.len() - ) - .entered(); + /// Like [`Self::for_each_crate_and_block()`], but takes in account two blocks, one for a trait and one for a self type. + pub fn for_each_crate_and_block_trait_and_type( + db: &dyn HirDatabase, + krate: Crate, + type_block: Option<BlockId>, + trait_block: Option<BlockId>, + for_each: &mut dyn FnMut(&TraitImpls), + ) { + let in_self_and_deps = TraitImpls::for_crate_and_deps(db, krate); + in_self_and_deps.iter().for_each(|impls| for_each(impls)); - match mode { - LookupMode::MethodCall => { - // For method calls, rust first does any number of autoderef, and - // then one autoref (i.e. when the method takes &self or &mut self). - // Note that when we've got a receiver like &S, even if the method - // we find in the end takes &self, we still do the autoderef step - // (just as rustc does an autoderef and then autoref again). - - // We have to be careful about the order we're looking at candidates - // in here. Consider the case where we're resolving `it.clone()` - // where `it: &Vec<_>`. This resolves to the clone method with self - // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where - // the receiver type exactly matches before cases where we have to - // do autoref. But in the autoderef steps, the `&_` self type comes - // up *before* the `Vec<_>` self type. - // - // On the other hand, we don't want to just pick any by-value method - // before any by-autoref method; it's just that we need to consider - // the methods by autoderef order of *receiver types*, not *self - // types*. - - table.run_in_snapshot(|table| { - let ty = table.instantiate_canonical(*ty); - let deref_chain = autoderef_method_receiver(table, ty); - - deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| { - iterate_method_candidates_with_autoref( - table, - receiver_ty, - adj, - traits_in_scope, - visible_from_module, - name, - callback, - ) + // We must not provide duplicate impls to the solver. Therefore we work with the following strategy: + // start from each block, and walk ancestors until you meet the other block. If they never meet, + // that means there can't be duplicate impls; if they meet, we stop the search of the deeper block. + // This breaks when they are equal (both will stop immediately), therefore we handle this case + // specifically. + let blocks_iter = |block: Option<BlockId>| { + std::iter::successors(block, |block| block.loc(db).module.containing_block()) + }; + let for_each_block = |current_block: Option<BlockId>, other_block: Option<BlockId>| { + blocks_iter(current_block) + .take_while(move |&block| { + other_block.is_none_or(|other_block| other_block != block) }) - }) - } - LookupMode::Path => { - // No autoderef for path lookups - iterate_method_candidates_for_self_ty( - ty, - table, - traits_in_scope, - visible_from_module, - name, - callback, - ) - } - } -} - -#[tracing::instrument(skip_all, fields(name = ?name))] -fn iterate_method_candidates_with_autoref<'db>( - table: &mut InferenceTable<'db>, - receiver_ty: Canonical<'db, Ty<'db>>, - first_adjustment: ReceiverAdjustments, - traits_in_scope: &FxHashSet<TraitId>, - visible_from_module: VisibleFromModule, - name: Option<&Name>, - callback: &mut dyn MethodCandidateCallback, -) -> ControlFlow<()> { - let interner = table.interner(); - - let mut iterate_method_candidates_by_receiver = move |receiver_ty, first_adjustment| { - iterate_method_candidates_by_receiver( - table, - receiver_ty, - first_adjustment, - traits_in_scope, - visible_from_module, - name, - callback, - ) - }; - - let mut maybe_reborrowed = first_adjustment.clone(); - if let TyKind::Ref(_, _, m) = receiver_ty.value.kind() { - // Prefer reborrow of references to move - maybe_reborrowed.autoref = Some(AutorefOrPtrAdjustment::Autoref(m)); - maybe_reborrowed.autoderefs += 1; - } - - iterate_method_candidates_by_receiver(receiver_ty, maybe_reborrowed)?; - - let refed = Canonical { - max_universe: receiver_ty.max_universe, - variables: receiver_ty.variables, - value: Ty::new_ref(interner, Region::error(interner), receiver_ty.value, Mutability::Not), - }; - - iterate_method_candidates_by_receiver( - refed, - first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Not)), - )?; - - let ref_muted = Canonical { - max_universe: receiver_ty.max_universe, - variables: receiver_ty.variables, - value: Ty::new_ref(interner, Region::error(interner), receiver_ty.value, Mutability::Mut), - }; - - iterate_method_candidates_by_receiver( - ref_muted, - first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Mut)), - )?; - - if let TyKind::RawPtr(ty, Mutability::Mut) = receiver_ty.value.kind() { - let const_ptr_ty = rustc_type_ir::Canonical { - max_universe: rustc_type_ir::UniverseIndex::ZERO, - value: Ty::new_ptr(interner, ty, Mutability::Not), - variables: receiver_ty.variables, + .filter_map(move |block| TraitImpls::for_block(db, block).as_deref()) }; - iterate_method_candidates_by_receiver( - const_ptr_ty, - first_adjustment.with_autoref(AutorefOrPtrAdjustment::ToConstPtr), - )?; - } - - ControlFlow::Continue(()) -} - -pub trait MethodCandidateCallback { - fn on_inherent_method( - &mut self, - adjustments: ReceiverAdjustments, - item: AssocItemId, - is_visible: bool, - ) -> ControlFlow<()>; - - fn on_trait_method( - &mut self, - adjustments: ReceiverAdjustments, - item: AssocItemId, - is_visible: bool, - ) -> ControlFlow<()>; -} - -impl<F> MethodCandidateCallback for F -where - F: FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, -{ - fn on_inherent_method( - &mut self, - adjustments: ReceiverAdjustments, - item: AssocItemId, - is_visible: bool, - ) -> ControlFlow<()> { - self(adjustments, item, is_visible) - } - - fn on_trait_method( - &mut self, - adjustments: ReceiverAdjustments, - item: AssocItemId, - is_visible: bool, - ) -> ControlFlow<()> { - self(adjustments, item, is_visible) - } -} - -#[tracing::instrument(skip_all, fields(name = ?name))] -fn iterate_method_candidates_by_receiver<'db>( - table: &mut InferenceTable<'db>, - receiver_ty: Canonical<'db, Ty<'db>>, - receiver_adjustments: ReceiverAdjustments, - traits_in_scope: &FxHashSet<TraitId>, - visible_from_module: VisibleFromModule, - name: Option<&Name>, - callback: &mut dyn MethodCandidateCallback, -) -> ControlFlow<()> { - let receiver_ty = table.instantiate_canonical(receiver_ty); - // We're looking for methods with *receiver* type receiver_ty. These could - // be found in any of the derefs of receiver_ty, so we have to go through - // that, including raw derefs. - table.run_in_snapshot(|table| { - let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty) - .include_raw_pointers() - .use_receiver_trait(); - while let Some((self_ty, _)) = autoderef.next() { - iterate_inherent_methods( - self_ty, - autoderef.table, - name, - Some(receiver_ty), - Some(receiver_adjustments.clone()), - visible_from_module, - LookupMode::MethodCall, - &mut |adjustments, item, is_visible| { - callback.on_inherent_method(adjustments, item, is_visible) - }, - )? - } - ControlFlow::Continue(()) - })?; - table.run_in_snapshot(|table| { - let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty) - .include_raw_pointers() - .use_receiver_trait(); - while let Some((self_ty, _)) = autoderef.next() { - if matches!(self_ty.kind(), TyKind::Infer(rustc_type_ir::TyVar(_))) { - // don't try to resolve methods on unknown types - return ControlFlow::Continue(()); - } - - iterate_trait_method_candidates( - self_ty, - autoderef.table, - traits_in_scope, - name, - Some(receiver_ty), - Some(receiver_adjustments.clone()), - LookupMode::MethodCall, - &mut |adjustments, item, is_visible| { - callback.on_trait_method(adjustments, item, is_visible) - }, - )? - } - ControlFlow::Continue(()) - }) -} - -#[tracing::instrument(skip_all, fields(name = ?name))] -fn iterate_method_candidates_for_self_ty<'db>( - self_ty: &Canonical<'db, Ty<'db>>, - table: &mut InferenceTable<'db>, - traits_in_scope: &FxHashSet<TraitId>, - visible_from_module: VisibleFromModule, - name: Option<&Name>, - callback: &mut dyn MethodCandidateCallback, -) -> ControlFlow<()> { - table.run_in_snapshot(|table| { - let self_ty = table.instantiate_canonical(*self_ty); - iterate_inherent_methods( - self_ty, - table, - name, - None, - None, - visible_from_module, - LookupMode::Path, - &mut |adjustments, item, is_visible| { - callback.on_inherent_method(adjustments, item, is_visible) - }, - )?; - iterate_trait_method_candidates( - self_ty, - table, - traits_in_scope, - name, - None, - None, - LookupMode::Path, - &mut |adjustments, item, is_visible| { - callback.on_trait_method(adjustments, item, is_visible) - }, - ) - }) -} - -#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))] -fn iterate_trait_method_candidates<'db>( - self_ty: Ty<'db>, - table: &mut InferenceTable<'db>, - traits_in_scope: &FxHashSet<TraitId>, - name: Option<&Name>, - receiver_ty: Option<Ty<'db>>, - receiver_adjustments: Option<ReceiverAdjustments>, - mode: LookupMode, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, -) -> ControlFlow<()> { - let db = table.db; - - let canonical_self_ty = table.canonicalize(self_ty); - let krate = table.trait_env.krate; - - 'traits: for &t in traits_in_scope { - let data = db.trait_signature(t); - - // Traits annotated with `#[rustc_skip_during_method_dispatch]` are skipped during - // method resolution, if the receiver is an array, and we're compiling for editions before - // 2021. - // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for - // arrays. - if data.flags.contains(TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH) - && matches!(self_ty.kind(), TyKind::Array(..)) - { - // FIXME: this should really be using the edition of the method name's span, in case it - // comes from a macro - if !krate.data(db).edition.at_least_2021() { - continue; - } - } - if data.flags.contains(TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) - && matches!( - self_ty.kind(), TyKind::Adt(adt_def, subst) - if is_box(table.db, adt_def.def_id().0) - && matches!(subst.type_at(0).kind(), TyKind::Slice(..)) - ) - { - // FIXME: this should really be using the edition of the method name's span, in case it - // comes from a macro - if !krate.data(db).edition.at_least_2024() { - continue; - } - } - - // we'll be lazy about checking whether the type implements the - // trait, but if we find out it doesn't, we'll skip the rest of the - // iteration - let mut known_implemented = false; - for &(_, item) in t.trait_items(db).items.iter() { - // Don't pass a `visible_from_module` down to `is_valid_candidate`, - // since only inherent methods should be included into visibility checking. - let visible = match is_valid_trait_method_candidate( - table, - t, - name, - receiver_ty, - item, - self_ty, - mode, - ) { - IsValidCandidate::Yes => true, - IsValidCandidate::NotVisible => false, - IsValidCandidate::No => continue, - }; - if !known_implemented { - let goal = generic_implements_goal_ns(table, t, canonical_self_ty); - if next_trait_solve_canonical_in_ctxt(&table.infer_ctxt, goal).no_solution() { - continue 'traits; - } - } - known_implemented = true; - callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?; - } - } - ControlFlow::Continue(()) -} - -#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))] -fn iterate_inherent_methods<'db>( - self_ty: Ty<'db>, - table: &mut InferenceTable<'db>, - name: Option<&Name>, - receiver_ty: Option<Ty<'db>>, - receiver_adjustments: Option<ReceiverAdjustments>, - visible_from_module: VisibleFromModule, - mode: LookupMode, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, -) -> ControlFlow<()> { - let db = table.db; - let env = table.trait_env.clone(); - - // For trait object types and placeholder types with trait bounds, the methods of the trait and - // its super traits are considered inherent methods. This matters because these methods have - // higher priority than the other traits' methods, which would be considered in - // `iterate_trait_method_candidates()` only after this function. - match self_ty.kind() { - TyKind::Param(_) => { - let env = table.trait_env.clone(); - let traits = - env.traits_in_scope_from_clauses(self_ty).flat_map(|t| all_super_traits(db, t)); - iterate_inherent_trait_methods( - self_ty, - table, - name, - receiver_ty, - receiver_adjustments.clone(), - callback, - traits, - mode, - )?; - } - TyKind::Dynamic(bounds, _) => { - if let Some(principal_trait) = bounds.principal_def_id() { - let traits = all_super_traits(db, principal_trait.0); - iterate_inherent_trait_methods( - self_ty, - table, - name, - receiver_ty, - receiver_adjustments.clone(), - callback, - traits.into_iter(), - mode, - )?; - } - } - _ => {} - } - - let def_crates = match def_crates(db, self_ty, env.krate) { - Some(k) => k, - None => return ControlFlow::Continue(()), - }; - - let (module, mut block) = match visible_from_module { - VisibleFromModule::Filter(module) => (Some(module), module.containing_block()), - VisibleFromModule::IncludeBlock(block) => (None, Some(block)), - VisibleFromModule::None => (None, None), - }; - - while let Some(block_id) = block { - if let Some(impls) = db.inherent_impls_in_block(block_id) { - impls_for_self_ty( - &impls, - self_ty, - table, - name, - receiver_ty, - receiver_adjustments.clone(), - module, - callback, - )?; - } - - block = block_def_map(db, block_id).parent().and_then(|module| module.containing_block()); - } - - for krate in def_crates { - let impls = db.inherent_impls_in_crate(krate); - impls_for_self_ty( - &impls, - self_ty, - table, - name, - receiver_ty, - receiver_adjustments.clone(), - module, - callback, - )?; - } - return ControlFlow::Continue(()); - - #[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))] - fn iterate_inherent_trait_methods<'db>( - self_ty: Ty<'db>, - table: &mut InferenceTable<'db>, - name: Option<&Name>, - receiver_ty: Option<Ty<'db>>, - receiver_adjustments: Option<ReceiverAdjustments>, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, - traits: impl Iterator<Item = TraitId>, - mode: LookupMode, - ) -> ControlFlow<()> { - let db = table.db; - for t in traits { - let data = t.trait_items(db); - for &(_, item) in data.items.iter() { - // We don't pass `visible_from_module` as all trait items should be visible. - let visible = match is_valid_trait_method_candidate( - table, - t, - name, - receiver_ty, - item, - self_ty, - mode, - ) { - IsValidCandidate::Yes => true, - IsValidCandidate::NotVisible => false, - IsValidCandidate::No => continue, - }; - callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?; - } - } - ControlFlow::Continue(()) - } - - #[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))] - fn impls_for_self_ty<'db>( - impls: &InherentImpls, - self_ty: Ty<'db>, - table: &mut InferenceTable<'db>, - name: Option<&Name>, - receiver_ty: Option<Ty<'db>>, - receiver_adjustments: Option<ReceiverAdjustments>, - visible_from_module: Option<ModuleId>, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, - ) -> ControlFlow<()> { - for &impl_id in impls.for_self_ty(self_ty) { - for &(ref item_name, item) in impl_id.impl_items(table.db).items.iter() { - let visible = match is_valid_impl_method_candidate( - table, - self_ty, - receiver_ty, - visible_from_module, - name, - impl_id, - item, - item_name, - ) { - IsValidCandidate::Yes => true, - IsValidCandidate::NotVisible => false, - IsValidCandidate::No => continue, - }; - callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?; - } - } - ControlFlow::Continue(()) - } -} - -/// Returns the receiver type for the index trait call. -pub(crate) fn resolve_indexing_op<'db>( - table: &mut InferenceTable<'db>, - ty: Canonical<'db, Ty<'db>>, - index_trait: TraitId, -) -> Option<ReceiverAdjustments> { - let ty = table.instantiate_canonical(ty); - let deref_chain = autoderef_method_receiver(table, ty); - for (ty, adj) in deref_chain { - let goal = generic_implements_goal_ns(table, index_trait, ty); - if !next_trait_solve_canonical_in_ctxt(&table.infer_ctxt, goal).no_solution() { - return Some(adj); - } - } - None -} - -// FIXME: Replace this with a `Try` impl once stable -macro_rules! check_that { - ($cond:expr) => { - if !$cond { - return IsValidCandidate::No; - } - }; -} - -#[derive(Debug)] -enum IsValidCandidate { - Yes, - No, - NotVisible, -} - -#[tracing::instrument(skip_all, fields(name))] -fn is_valid_impl_method_candidate<'db>( - table: &mut InferenceTable<'db>, - self_ty: Ty<'db>, - receiver_ty: Option<Ty<'db>>, - visible_from_module: Option<ModuleId>, - name: Option<&Name>, - impl_id: ImplId, - item: AssocItemId, - item_name: &Name, -) -> IsValidCandidate { - match item { - AssocItemId::FunctionId(f) => is_valid_impl_fn_candidate( - table, - impl_id, - f, - name, - receiver_ty, - self_ty, - visible_from_module, - item_name, - ), - AssocItemId::ConstId(c) => { - let db = table.db; - check_that!(receiver_ty.is_none()); - check_that!(name.is_none_or(|n| n == item_name)); - - if let Some(from_module) = visible_from_module - && !db.assoc_visibility(c.into()).is_visible_from(db, from_module) - { - cov_mark::hit!(const_candidate_not_visible); - return IsValidCandidate::NotVisible; - } - let self_ty_matches = table.run_in_snapshot(|table| { - let impl_args = table.fresh_args_for_item(impl_id.into()); - let expected_self_ty = - db.impl_self_ty(impl_id).instantiate(table.interner(), impl_args); - table.unify(expected_self_ty, self_ty) - }); - if !self_ty_matches { - cov_mark::hit!(const_candidate_self_type_mismatch); - return IsValidCandidate::No; - } - IsValidCandidate::Yes - } - _ => IsValidCandidate::No, - } -} - -/// Checks whether a given `AssocItemId` is applicable for `receiver_ty`. -#[tracing::instrument(skip_all, fields(name))] -fn is_valid_trait_method_candidate<'db>( - table: &mut InferenceTable<'db>, - trait_id: TraitId, - name: Option<&Name>, - receiver_ty: Option<Ty<'db>>, - item: AssocItemId, - self_ty: Ty<'db>, - mode: LookupMode, -) -> IsValidCandidate { - let db = table.db; - match item { - AssocItemId::FunctionId(fn_id) => { - let data = db.function_signature(fn_id); - - check_that!(name.is_none_or(|n| n == &data.name)); - - table.run_in_snapshot(|table| { - let impl_subst = table.fresh_args_for_item(trait_id.into()); - let expect_self_ty = impl_subst.type_at(0); - - check_that!(table.unify(expect_self_ty, self_ty)); - - if let Some(receiver_ty) = receiver_ty { - check_that!(data.has_self_param()); - - let args = table.fill_rest_fresh_args(fn_id.into(), impl_subst); - - let sig = db.callable_item_signature(fn_id.into()); - let expected_receiver = sig - .map_bound(|s| s.skip_binder().inputs_and_output.as_slice()[0]) - .instantiate(table.interner(), args); - - // FIXME: Clean up this mess with some context struct like rustc's `ProbeContext` - let variance = match mode { - LookupMode::MethodCall => rustc_type_ir::Variance::Covariant, - LookupMode::Path => rustc_type_ir::Variance::Invariant, - }; - let res = table - .infer_ctxt - .at(&ObligationCause::dummy(), table.trait_env.env) - .relate(expected_receiver, variance, receiver_ty); - let Ok(infer_ok) = res else { - return IsValidCandidate::No; - }; - - if !infer_ok.obligations.is_empty() { - let mut ctxt = ObligationCtxt::new(&table.infer_ctxt); - ctxt.register_obligations(infer_ok.into_obligations()); - // FIXME: Are we doing this correctly? Probably better to follow rustc more closely. - check_that!(ctxt.try_evaluate_obligations().is_empty()); - } - - check_that!(table.unify(receiver_ty, expected_receiver)); - } - - IsValidCandidate::Yes - }) - } - AssocItemId::ConstId(c) => { - check_that!(receiver_ty.is_none()); - check_that!(name.is_none_or(|n| db.const_signature(c).name.as_ref() == Some(n))); - - IsValidCandidate::Yes - } - _ => IsValidCandidate::No, - } -} - -#[tracing::instrument(skip_all, fields(name))] -fn is_valid_impl_fn_candidate<'db>( - table: &mut InferenceTable<'db>, - impl_id: ImplId, - fn_id: FunctionId, - name: Option<&Name>, - receiver_ty: Option<Ty<'db>>, - self_ty: Ty<'db>, - visible_from_module: Option<ModuleId>, - item_name: &Name, -) -> IsValidCandidate { - check_that!(name.is_none_or(|n| n == item_name)); - - let db = table.db; - let data = db.function_signature(fn_id); - - if let Some(from_module) = visible_from_module - && !db.assoc_visibility(fn_id.into()).is_visible_from(db, from_module) - { - cov_mark::hit!(autoderef_candidate_not_visible); - return IsValidCandidate::NotVisible; - } - table.run_in_snapshot(|table| { - let _p = tracing::info_span!("subst_for_def").entered(); - let impl_subst = table.infer_ctxt.fresh_args_for_item(impl_id.into()); - let expect_self_ty = db.impl_self_ty(impl_id).instantiate(table.interner(), &impl_subst); - - check_that!(table.unify(expect_self_ty, self_ty)); - - if let Some(receiver_ty) = receiver_ty { - let _p = tracing::info_span!("check_receiver_ty").entered(); - check_that!(data.has_self_param()); - - let args = table.infer_ctxt.fresh_args_for_item(fn_id.into()); - - let sig = db.callable_item_signature(fn_id.into()); - let expected_receiver = sig - .map_bound(|s| s.skip_binder().inputs_and_output.as_slice()[0]) - .instantiate(table.interner(), args); - - check_that!(table.unify(receiver_ty, expected_receiver)); - } - - // We need to consider the bounds on the impl to distinguish functions of the same name - // for a type. - let predicates = db.generic_predicates(impl_id.into()); - let Some(predicates) = predicates.instantiate(table.interner(), impl_subst) else { - return IsValidCandidate::Yes; - }; - - let mut ctxt = ObligationCtxt::new(&table.infer_ctxt); - - ctxt.register_obligations(predicates.into_iter().map(|p| { - PredicateObligation::new( - table.interner(), - ObligationCause::new(), - table.trait_env.env, - p.0, - ) - })); - - if ctxt.try_evaluate_obligations().is_empty() { - IsValidCandidate::Yes + if trait_block == type_block { + blocks_iter(trait_block) + .filter_map(|block| TraitImpls::for_block(db, block).as_deref()) + .for_each(for_each); } else { - IsValidCandidate::No + for_each_block(trait_block, type_block).for_each(&mut *for_each); + for_each_block(type_block, trait_block).for_each(for_each); } - }) -} - -/// This creates Substs for a trait with the given Self type and type variables -/// for all other parameters, to query the trait solver with it. -#[tracing::instrument(skip_all)] -fn generic_implements_goal_ns<'db>( - table: &mut InferenceTable<'db>, - trait_: TraitId, - self_ty: Canonical<'db, Ty<'db>>, -) -> Canonical<'db, Goal<'db, Predicate<'db>>> { - let args = table.infer_ctxt.fresh_args_for_item(SolverDefId::TraitId(trait_)); - let self_ty = table.instantiate_canonical(self_ty); - let trait_ref = - rustc_type_ir::TraitRef::new_from_args(table.infer_ctxt.interner, trait_.into(), args) - .with_replaced_self_ty(table.infer_ctxt.interner, self_ty); - let goal = Goal::new(table.infer_ctxt.interner, table.trait_env.env, trait_ref); - - table.canonicalize(goal) -} - -fn autoderef_method_receiver<'db>( - table: &mut InferenceTable<'db>, - ty: Ty<'db>, -) -> Vec<(Canonical<'db, Ty<'db>>, ReceiverAdjustments)> { - let interner = table.interner(); - let mut deref_chain = Vec::new(); - let mut autoderef = autoderef::Autoderef::new_no_tracking(table, ty).use_receiver_trait(); - while let Some((ty, derefs)) = autoderef.next() { - deref_chain.push(( - autoderef.table.canonicalize(ty), - ReceiverAdjustments { autoref: None, autoderefs: derefs, unsize_array: false }, - )); } - // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!) - if let Some((rustc_type_ir::Array(parameters, _), variables, max_universe, adj)) = - deref_chain.last().map(|d| (d.0.value.kind(), d.0.variables, d.0.max_universe, d.1.clone())) - { - let unsized_ty = Ty::new_slice(interner, parameters); - deref_chain.push(( - Canonical { max_universe, value: unsized_ty, variables }, - ReceiverAdjustments { unsize_array: true, ..adj.clone() }, - )); - } - deref_chain }
diff --git a/crates/hir-ty/src/method_resolution/confirm.rs b/crates/hir-ty/src/method_resolution/confirm.rs new file mode 100644 index 0000000..9e8791e --- /dev/null +++ b/crates/hir-ty/src/method_resolution/confirm.rs
@@ -0,0 +1,616 @@ +//! Confirmation step of method selection, meaning ensuring the selected candidate +//! is valid and registering all obligations. + +use hir_def::{ + FunctionId, GenericDefId, GenericParamId, ItemContainerId, TraitId, + expr_store::path::{GenericArg as HirGenericArg, GenericArgs as HirGenericArgs}, + hir::{ExprId, generics::GenericParamDataRef}, + lang_item::LangItem, +}; +use rustc_type_ir::{ + TypeFoldable, + elaborate::elaborate, + inherent::{BoundExistentialPredicates, IntoKind, SliceLike, Ty as _}, +}; +use tracing::debug; + +use crate::{ + Adjust, Adjustment, AutoBorrow, IncorrectGenericsLenKind, InferenceDiagnostic, + LifetimeElisionKind, PointerCast, + db::HirDatabase, + infer::{AllowTwoPhase, AutoBorrowMutability, InferenceContext, TypeMismatch}, + lower::{ + GenericPredicates, + path::{GenericArgsLowerer, TypeLikeConst, substs_from_args_and_bindings}, + }, + method_resolution::{CandidateId, MethodCallee, probe}, + next_solver::{ + Binder, Clause, ClauseKind, Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, FnSig, + GenericArg, GenericArgs, ParamConst, PolyExistentialTraitRef, PolyTraitRef, Region, + TraitRef, Ty, TyKind, + infer::{ + BoundRegionConversionTime, InferCtxt, + traits::{ObligationCause, PredicateObligation}, + }, + util::{clauses_as_obligations, upcast_choices}, + }, +}; + +struct ConfirmContext<'a, 'b, 'db> { + ctx: &'a mut InferenceContext<'b, 'db>, + candidate: FunctionId, + expr: ExprId, +} + +#[derive(Debug)] +pub(crate) struct ConfirmResult<'db> { + pub(crate) callee: MethodCallee<'db>, + pub(crate) illegal_sized_bound: bool, + pub(crate) adjustments: Box<[Adjustment<'db>]>, +} + +impl<'a, 'db> InferenceContext<'a, 'db> { + pub(crate) fn confirm_method( + &mut self, + pick: &probe::Pick<'db>, + unadjusted_self_ty: Ty<'db>, + expr: ExprId, + generic_args: Option<&HirGenericArgs>, + ) -> ConfirmResult<'db> { + debug!( + "confirm(unadjusted_self_ty={:?}, pick={:?}, generic_args={:?})", + unadjusted_self_ty, pick, generic_args, + ); + + let CandidateId::FunctionId(candidate) = pick.item else { + panic!("confirmation is only done for method calls, not path lookups"); + }; + let mut confirm_cx = ConfirmContext::new(self, candidate, expr); + confirm_cx.confirm(unadjusted_self_ty, pick, generic_args) + } +} + +impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { + fn new( + ctx: &'a mut InferenceContext<'b, 'db>, + candidate: FunctionId, + expr: ExprId, + ) -> ConfirmContext<'a, 'b, 'db> { + ConfirmContext { ctx, candidate, expr } + } + + #[inline] + fn db(&self) -> &'db dyn HirDatabase { + self.ctx.table.infer_ctxt.interner.db + } + + #[inline] + fn interner(&self) -> DbInterner<'db> { + self.ctx.table.infer_ctxt.interner + } + + #[inline] + fn infcx(&self) -> &InferCtxt<'db> { + &self.ctx.table.infer_ctxt + } + + fn confirm( + &mut self, + unadjusted_self_ty: Ty<'db>, + pick: &probe::Pick<'db>, + generic_args: Option<&HirGenericArgs>, + ) -> ConfirmResult<'db> { + // Adjust the self expression the user provided and obtain the adjusted type. + let (self_ty, adjustments) = self.adjust_self_ty(unadjusted_self_ty, pick); + + // Create generic args for the method's type parameters. + let rcvr_args = self.fresh_receiver_args(self_ty, pick); + let all_args = self.instantiate_method_args(generic_args, rcvr_args); + + debug!("rcvr_args={rcvr_args:?}, all_args={all_args:?}"); + + // Create the final signature for the method, replacing late-bound regions. + let (method_sig, method_predicates) = + self.instantiate_method_sig(pick, all_args.as_slice()); + + // If there is a `Self: Sized` bound and `Self` is a trait object, it is possible that + // something which derefs to `Self` actually implements the trait and the caller + // wanted to make a static dispatch on it but forgot to import the trait. + // See test `tests/ui/issues/issue-35976.rs`. + // + // In that case, we'll error anyway, but we'll also re-run the search with all traits + // in scope, and if we find another method which can be used, we'll output an + // appropriate hint suggesting to import the trait. + let filler_args = GenericArgs::fill_rest( + self.interner(), + self.candidate.into(), + rcvr_args, + |index, id, _| match id { + GenericParamId::TypeParamId(id) => Ty::new_param(self.interner(), id, index).into(), + GenericParamId::ConstParamId(id) => { + Const::new_param(self.interner(), ParamConst { id, index }).into() + } + GenericParamId::LifetimeParamId(id) => { + Region::new_early_param(self.interner(), EarlyParamRegion { id, index }).into() + } + }, + ); + let illegal_sized_bound = self.predicates_require_illegal_sized_bound( + GenericPredicates::query_all(self.db(), self.candidate.into()) + .iter_instantiated_copied(self.interner(), filler_args.as_slice()), + ); + + // Unify the (adjusted) self type with what the method expects. + // + // SUBTLE: if we want good error messages, because of "guessing" while matching + // traits, no trait system method can be called before this point because they + // could alter our Self-type, except for normalizing the receiver from the + // signature (which is also done during probing). + let method_sig_rcvr = method_sig.inputs().as_slice()[0]; + debug!( + "confirm: self_ty={:?} method_sig_rcvr={:?} method_sig={:?}", + self_ty, method_sig_rcvr, method_sig + ); + self.unify_receivers(self_ty, method_sig_rcvr, pick); + + // Make sure nobody calls `drop()` explicitly. + self.check_for_illegal_method_calls(); + + // Lint when an item is shadowing a supertrait item. + self.lint_shadowed_supertrait_items(pick); + + // Add any trait/regions obligations specified on the method's type parameters. + // We won't add these if we encountered an illegal sized bound, so that we can use + // a custom error in that case. + if !illegal_sized_bound { + self.add_obligations(method_sig, all_args, method_predicates); + } + + // Create the final `MethodCallee`. + let callee = MethodCallee { def_id: self.candidate, args: all_args, sig: method_sig }; + ConfirmResult { callee, illegal_sized_bound, adjustments } + } + + /////////////////////////////////////////////////////////////////////////// + // ADJUSTMENTS + + fn adjust_self_ty( + &mut self, + unadjusted_self_ty: Ty<'db>, + pick: &probe::Pick<'db>, + ) -> (Ty<'db>, Box<[Adjustment<'db>]>) { + // Commit the autoderefs by calling `autoderef` again, but this + // time writing the results into the various typeck results. + let mut autoderef = self.ctx.table.autoderef_with_tracking(unadjusted_self_ty); + let Some((mut target, n)) = autoderef.nth(pick.autoderefs) else { + return (Ty::new_error(self.interner(), ErrorGuaranteed), Box::new([])); + }; + assert_eq!(n, pick.autoderefs); + + let mut adjustments = + self.ctx.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok()); + match pick.autoref_or_ptr_adjustment { + Some(probe::AutorefOrPtrAdjustment::Autoref { mutbl, unsize }) => { + let region = self.infcx().next_region_var(); + // Type we're wrapping in a reference, used later for unsizing + let base_ty = target; + + target = Ty::new_ref(self.interner(), region, target, mutbl); + + // Method call receivers are the primary use case + // for two-phase borrows. + let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes); + + adjustments + .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target }); + + if unsize { + let unsized_ty = if let TyKind::Array(elem_ty, _) = base_ty.kind() { + Ty::new_slice(self.interner(), elem_ty) + } else { + panic!( + "AutorefOrPtrAdjustment's unsize flag should only be set for array ty, found {:?}", + base_ty + ) + }; + target = Ty::new_ref(self.interner(), region, unsized_ty, mutbl.into()); + adjustments + .push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target }); + } + } + Some(probe::AutorefOrPtrAdjustment::ToConstPtr) => { + target = match target.kind() { + TyKind::RawPtr(ty, mutbl) => { + assert!(mutbl.is_mut()); + Ty::new_imm_ptr(self.interner(), ty) + } + other => panic!("Cannot adjust receiver type {other:?} to const ptr"), + }; + + adjustments.push(Adjustment { + kind: Adjust::Pointer(PointerCast::MutToConstPointer), + target, + }); + } + None => {} + } + + (target, adjustments.into_boxed_slice()) + } + + /// Returns a set of generic parameters for the method *receiver* where all type and region + /// parameters are instantiated with fresh variables. This generic parameters does not include any + /// parameters declared on the method itself. + /// + /// Note that this generic parameters may include late-bound regions from the impl level. If so, + /// these are instantiated later in the `instantiate_method_sig` routine. + fn fresh_receiver_args( + &mut self, + self_ty: Ty<'db>, + pick: &probe::Pick<'db>, + ) -> GenericArgs<'db> { + match pick.kind { + probe::InherentImplPick(impl_def_id) => { + self.infcx().fresh_args_for_item(impl_def_id.into()) + } + + probe::ObjectPick(trait_def_id) => { + // If the trait is not object safe (specifically, we care about when + // the receiver is not valid), then there's a chance that we will not + // actually be able to recover the object by derefing the receiver like + // we should if it were valid. + if self.db().dyn_compatibility_of_trait(trait_def_id).is_some() { + return GenericArgs::error_for_item(self.interner(), trait_def_id.into()); + } + + self.extract_existential_trait_ref(self_ty, |this, object_ty, principal| { + // The object data has no entry for the Self + // Type. For the purposes of this method call, we + // instantiate the object type itself. This + // wouldn't be a sound instantiation in all cases, + // since each instance of the object type is a + // different existential and hence could match + // distinct types (e.g., if `Self` appeared as an + // argument type), but those cases have already + // been ruled out when we deemed the trait to be + // "dyn-compatible". + let original_poly_trait_ref = + principal.with_self_ty(this.interner(), object_ty); + let upcast_poly_trait_ref = this.upcast(original_poly_trait_ref, trait_def_id); + let upcast_trait_ref = + this.instantiate_binder_with_fresh_vars(upcast_poly_trait_ref); + debug!( + "original_poly_trait_ref={:?} upcast_trait_ref={:?} target_trait={:?}", + original_poly_trait_ref, upcast_trait_ref, trait_def_id + ); + upcast_trait_ref.args + }) + } + + probe::TraitPick(trait_def_id) => { + // Make a trait reference `$0 : Trait<$1...$n>` + // consisting entirely of type variables. Later on in + // the process we will unify the transformed-self-type + // of the method with the actual type in order to + // unify some of these variables. + self.infcx().fresh_args_for_item(trait_def_id.into()) + } + + probe::WhereClausePick(poly_trait_ref) => { + // Where clauses can have bound regions in them. We need to instantiate + // those to convert from a poly-trait-ref to a trait-ref. + self.instantiate_binder_with_fresh_vars(poly_trait_ref).args + } + } + } + + fn extract_existential_trait_ref<R, F>(&self, self_ty: Ty<'db>, mut closure: F) -> R + where + F: FnMut(&ConfirmContext<'a, 'b, 'db>, Ty<'db>, PolyExistentialTraitRef<'db>) -> R, + { + // If we specified that this is an object method, then the + // self-type ought to be something that can be dereferenced to + // yield an object-type (e.g., `&Object` or `Box<Object>` + // etc). + + let mut autoderef = self.ctx.table.autoderef(self_ty); + + // We don't need to gate this behind arbitrary self types + // per se, but it does make things a bit more gated. + if self.ctx.unstable_features.arbitrary_self_types + || self.ctx.unstable_features.arbitrary_self_types_pointers + { + autoderef = autoderef.use_receiver_trait(); + } + + autoderef + .include_raw_pointers() + .find_map(|(ty, _)| match ty.kind() { + TyKind::Dynamic(data, ..) => Some(closure( + self, + ty, + data.principal().expect("calling trait method on empty object?"), + )), + _ => None, + }) + .unwrap_or_else(|| { + panic!("self-type `{:?}` for ObjectPick never dereferenced to an object", self_ty) + }) + } + + fn instantiate_method_args( + &mut self, + generic_args: Option<&HirGenericArgs>, + parent_args: GenericArgs<'db>, + ) -> GenericArgs<'db> { + struct LowererCtx<'a, 'b, 'db> { + ctx: &'a mut InferenceContext<'b, 'db>, + expr: ExprId, + parent_args: &'a [GenericArg<'db>], + } + + impl<'db> GenericArgsLowerer<'db> for LowererCtx<'_, '_, 'db> { + fn report_len_mismatch( + &mut self, + def: GenericDefId, + provided_count: u32, + expected_count: u32, + kind: IncorrectGenericsLenKind, + ) { + self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsLen { + expr: self.expr, + provided_count, + expected_count, + kind, + def, + }); + } + + fn report_arg_mismatch( + &mut self, + param_id: GenericParamId, + arg_idx: u32, + has_self_arg: bool, + ) { + self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsOrder { + expr: self.expr, + param_id, + arg_idx, + has_self_arg, + }); + } + + fn provided_kind( + &mut self, + param_id: GenericParamId, + param: GenericParamDataRef<'_>, + arg: &HirGenericArg, + ) -> GenericArg<'db> { + match (param, arg) { + ( + GenericParamDataRef::LifetimeParamData(_), + HirGenericArg::Lifetime(lifetime), + ) => self.ctx.make_body_lifetime(*lifetime).into(), + (GenericParamDataRef::TypeParamData(_), HirGenericArg::Type(type_ref)) => { + self.ctx.make_body_ty(*type_ref).into() + } + (GenericParamDataRef::ConstParamData(_), HirGenericArg::Const(konst)) => { + let GenericParamId::ConstParamId(const_id) = param_id else { + unreachable!("non-const param ID for const param"); + }; + let const_ty = self.ctx.db.const_param_ty_ns(const_id); + self.ctx.make_body_const(*konst, const_ty).into() + } + _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"), + } + } + + fn provided_type_like_const( + &mut self, + const_ty: Ty<'db>, + arg: TypeLikeConst<'_>, + ) -> Const<'db> { + match arg { + TypeLikeConst::Path(path) => self.ctx.make_path_as_body_const(path, const_ty), + TypeLikeConst::Infer => self.ctx.table.next_const_var(), + } + } + + fn inferred_kind( + &mut self, + _def: GenericDefId, + param_id: GenericParamId, + _param: GenericParamDataRef<'_>, + _infer_args: bool, + _preceding_args: &[GenericArg<'db>], + ) -> GenericArg<'db> { + // Always create an inference var, even when `infer_args == false`. This helps with diagnostics, + // and I think it's also required in the presence of `impl Trait` (that must be inferred). + self.ctx.table.next_var_for_param(param_id) + } + + fn parent_arg(&mut self, param_idx: u32, _param_id: GenericParamId) -> GenericArg<'db> { + self.parent_args[param_idx as usize] + } + + fn report_elided_lifetimes_in_path( + &mut self, + _def: GenericDefId, + _expected_count: u32, + _hard_error: bool, + ) { + unreachable!("we set `LifetimeElisionKind::Infer`") + } + + fn report_elision_failure(&mut self, _def: GenericDefId, _expected_count: u32) { + unreachable!("we set `LifetimeElisionKind::Infer`") + } + + fn report_missing_lifetime(&mut self, _def: GenericDefId, _expected_count: u32) { + unreachable!("we set `LifetimeElisionKind::Infer`") + } + } + + substs_from_args_and_bindings( + self.db(), + self.ctx.body, + generic_args, + self.candidate.into(), + true, + LifetimeElisionKind::Infer, + false, + None, + &mut LowererCtx { ctx: self.ctx, expr: self.expr, parent_args: parent_args.as_slice() }, + ) + } + + fn unify_receivers( + &mut self, + self_ty: Ty<'db>, + method_self_ty: Ty<'db>, + pick: &probe::Pick<'db>, + ) { + debug!( + "unify_receivers: self_ty={:?} method_self_ty={:?} pick={:?}", + self_ty, method_self_ty, pick + ); + let cause = ObligationCause::new(); + match self.ctx.table.at(&cause).sup(method_self_ty, self_ty) { + Ok(infer_ok) => { + self.ctx.table.register_infer_ok(infer_ok); + } + Err(_) => { + if self.ctx.unstable_features.arbitrary_self_types { + self.ctx.result.type_mismatches.insert( + self.expr.into(), + TypeMismatch { expected: method_self_ty, actual: self_ty }, + ); + } + } + } + } + + // NOTE: this returns the *unnormalized* predicates and method sig. Because of + // inference guessing, the predicates and method signature can't be normalized + // until we unify the `Self` type. + fn instantiate_method_sig<'c>( + &mut self, + pick: &probe::Pick<'db>, + all_args: &'c [GenericArg<'db>], + ) -> (FnSig<'db>, impl Iterator<Item = PredicateObligation<'db>> + use<'c, 'db>) { + debug!("instantiate_method_sig(pick={:?}, all_args={:?})", pick, all_args); + + // Instantiate the bounds on the method with the + // type/early-bound-regions instantiations performed. There can + // be no late-bound regions appearing here. + let def_id = self.candidate; + let method_predicates = clauses_as_obligations( + GenericPredicates::query_all(self.db(), def_id.into()) + .iter_instantiated_copied(self.interner(), all_args), + ObligationCause::new(), + self.ctx.table.trait_env.env, + ); + + let sig = + self.db().callable_item_signature(def_id.into()).instantiate(self.interner(), all_args); + debug!("type scheme instantiated, sig={:?}", sig); + + let sig = self.instantiate_binder_with_fresh_vars(sig); + debug!("late-bound lifetimes from method instantiated, sig={:?}", sig); + + (sig, method_predicates) + } + + fn add_obligations( + &mut self, + sig: FnSig<'db>, + all_args: GenericArgs<'db>, + method_predicates: impl Iterator<Item = PredicateObligation<'db>>, + ) { + debug!("add_obligations: sig={:?} all_args={:?}", sig, all_args); + + self.ctx.table.register_predicates(method_predicates); + + // this is a projection from a trait reference, so we have to + // make sure that the trait reference inputs are well-formed. + self.ctx.table.add_wf_bounds(all_args); + + // the function type must also be well-formed (this is not + // implied by the args being well-formed because of inherent + // impls and late-bound regions - see issue #28609). + for ty in sig.inputs_and_output { + self.ctx.table.register_wf_obligation(ty.into(), ObligationCause::new()); + } + } + + /////////////////////////////////////////////////////////////////////////// + // MISCELLANY + + fn predicates_require_illegal_sized_bound( + &self, + predicates: impl Iterator<Item = Clause<'db>>, + ) -> bool { + let Some(sized_def_id) = + LangItem::Sized.resolve_trait(self.db(), self.ctx.resolver.krate()) + else { + return false; + }; + + elaborate(self.interner(), predicates) + // We don't care about regions here. + .filter_map(|pred| match pred.kind().skip_binder() { + ClauseKind::Trait(trait_pred) if trait_pred.def_id().0 == sized_def_id => { + Some(trait_pred) + } + _ => None, + }) + .any(|trait_pred| matches!(trait_pred.self_ty().kind(), TyKind::Dynamic(..))) + } + + fn check_for_illegal_method_calls(&self) { + // Disallow calls to the method `drop` defined in the `Drop` trait. + if let ItemContainerId::TraitId(trait_def_id) = self.candidate.loc(self.db()).container + && LangItem::Drop + .resolve_trait(self.db(), self.ctx.resolver.krate()) + .is_some_and(|drop_trait| drop_trait == trait_def_id) + { + // FIXME: Report an error. + } + } + + #[expect(clippy::needless_return)] + fn lint_shadowed_supertrait_items(&self, pick: &probe::Pick<'_>) { + if pick.shadowed_candidates.is_empty() { + return; + } + + // FIXME: Emit the lint. + } + + fn upcast( + &self, + source_trait_ref: PolyTraitRef<'db>, + target_trait_def_id: TraitId, + ) -> PolyTraitRef<'db> { + let upcast_trait_refs = + upcast_choices(self.interner(), source_trait_ref, target_trait_def_id); + + // must be exactly one trait ref or we'd get an ambig error etc + if let &[upcast_trait_ref] = upcast_trait_refs.as_slice() { + upcast_trait_ref + } else { + Binder::dummy(TraitRef::new_from_args( + self.interner(), + target_trait_def_id.into(), + GenericArgs::error_for_item(self.interner(), target_trait_def_id.into()), + )) + } + } + + fn instantiate_binder_with_fresh_vars<T>(&self, value: Binder<'db, T>) -> T + where + T: TypeFoldable<DbInterner<'db>> + Copy, + { + self.infcx().instantiate_binder_with_fresh_vars(BoundRegionConversionTime::FnCall, value) + } +}
diff --git a/crates/hir-ty/src/method_resolution/probe.rs b/crates/hir-ty/src/method_resolution/probe.rs new file mode 100644 index 0000000..adc144c --- /dev/null +++ b/crates/hir-ty/src/method_resolution/probe.rs
@@ -0,0 +1,2077 @@ +//! Candidate assembly and selection in method resolution - where we enumerate all candidates +//! and choose the best one (or, in some IDE scenarios, just enumerate them all). + +use std::{cell::RefCell, convert::Infallible, ops::ControlFlow}; + +use hir_def::{ + AssocItemId, FunctionId, GenericParamId, ImplId, ItemContainerId, TraitId, + signatures::TraitFlags, +}; +use hir_expand::name::Name; +use rustc_ast_ir::Mutability; +use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_type_ir::{ + InferTy, TypeVisitableExt, Upcast, Variance, + elaborate::{self, supertrait_def_ids}, + fast_reject::{DeepRejectCtxt, TreatParams, simplify_type}, + inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _}, +}; +use smallvec::{SmallVec, smallvec}; +use tracing::{debug, instrument}; + +use self::CandidateKind::*; +pub(super) use self::PickKind::*; +use crate::{ + autoderef::Autoderef, + db::HirDatabase, + lower::GenericPredicates, + method_resolution::{ + CandidateId, CandidateSource, InherentImpls, MethodError, MethodResolutionContext, + incoherent_inherent_impls, simplified_type_module, + }, + next_solver::{ + Binder, Canonical, ClauseKind, DbInterner, FnSig, GenericArg, GenericArgs, Goal, ParamEnv, + PolyTraitRef, Predicate, Region, SimplifiedType, TraitRef, Ty, TyKind, + infer::{ + BoundRegionConversionTime, InferCtxt, InferOk, + canonical::{QueryResponse, canonicalizer::OriginalQueryValues}, + select::{ImplSource, Selection, SelectionResult}, + traits::{Obligation, ObligationCause, PredicateObligation}, + }, + obligation_ctxt::ObligationCtxt, + util::clauses_as_obligations, + }, +}; + +struct ProbeContext<'a, 'db, Choice> { + ctx: &'a MethodResolutionContext<'a, 'db>, + mode: Mode, + + /// This is the OriginalQueryValues for the steps queries + /// that are answered in steps. + orig_steps_var_values: &'a OriginalQueryValues<'db>, + steps: &'a [CandidateStep<'db>], + + inherent_candidates: Vec<Candidate<'db>>, + extension_candidates: Vec<Candidate<'db>>, + impl_dups: FxHashSet<ImplId>, + + /// List of potential private candidates. Will be trimmed to ones that + /// actually apply and then the result inserted into `private_candidate` + private_candidates: Vec<Candidate<'db>>, + + /// Collects near misses when the candidate functions are missing a `self` keyword and is only + /// used for error reporting + static_candidates: Vec<CandidateSource>, + + choice: Choice, +} + +#[derive(Debug)] +pub struct CandidateWithPrivate<'db> { + pub candidate: Candidate<'db>, + pub is_visible: bool, +} + +#[derive(Debug, Clone)] +pub struct Candidate<'db> { + pub item: CandidateId, + pub kind: CandidateKind<'db>, +} + +#[derive(Debug, Clone)] +pub enum CandidateKind<'db> { + InherentImplCandidate { impl_def_id: ImplId, receiver_steps: usize }, + ObjectCandidate(PolyTraitRef<'db>), + TraitCandidate(PolyTraitRef<'db>), + WhereClauseCandidate(PolyTraitRef<'db>), +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +enum ProbeResult { + NoMatch, + Match, +} + +/// When adjusting a receiver we often want to do one of +/// +/// - Add a `&` (or `&mut`), converting the receiver from `T` to `&T` (or `&mut T`) +/// - If the receiver has type `*mut T`, convert it to `*const T` +/// +/// This type tells us which one to do. +/// +/// Note that in principle we could do both at the same time. For example, when the receiver has +/// type `T`, we could autoref it to `&T`, then convert to `*const T`. Or, when it has type `*mut +/// T`, we could convert it to `*const T`, then autoref to `&*const T`. However, currently we do +/// (at most) one of these. Either the receiver has type `T` and we convert it to `&T` (or with +/// `mut`), or it has type `*mut T` and we convert it to `*const T`. +#[derive(Debug, PartialEq, Copy, Clone)] +pub enum AutorefOrPtrAdjustment { + /// Receiver has type `T`, add `&` or `&mut` (if `T` is `mut`), and maybe also "unsize" it. + /// Unsizing is used to convert a `[T; N]` to `[T]`, which only makes sense when autorefing. + Autoref { + mutbl: Mutability, + + /// Indicates that the source expression should be "unsized" to a target type. + /// This is special-cased for just arrays unsizing to slices. + unsize: bool, + }, + /// Receiver has type `*mut T`, convert to `*const T` + ToConstPtr, +} + +impl AutorefOrPtrAdjustment { + fn get_unsize(&self) -> bool { + match self { + AutorefOrPtrAdjustment::Autoref { mutbl: _, unsize } => *unsize, + AutorefOrPtrAdjustment::ToConstPtr => false, + } + } +} + +/// Criteria to apply when searching for a given Pick. This is used during +/// the search for potentially shadowed methods to ensure we don't search +/// more candidates than strictly necessary. +#[derive(Debug)] +struct PickConstraintsForShadowed { + autoderefs: usize, + receiver_steps: Option<usize>, + def_id: CandidateId, +} + +impl PickConstraintsForShadowed { + fn may_shadow_based_on_autoderefs(&self, autoderefs: usize) -> bool { + autoderefs == self.autoderefs + } + + fn candidate_may_shadow(&self, candidate: &Candidate<'_>) -> bool { + // An item never shadows itself + candidate.item != self.def_id + // and we're only concerned about inherent impls doing the shadowing. + // Shadowing can only occur if the shadowed is further along + // the Receiver dereferencing chain than the shadowed. + && match candidate.kind { + CandidateKind::InherentImplCandidate { receiver_steps, .. } => match self.receiver_steps { + Some(shadowed_receiver_steps) => receiver_steps > shadowed_receiver_steps, + _ => false + }, + _ => false + } + } +} + +#[derive(Debug, Clone)] +pub struct Pick<'db> { + pub item: CandidateId, + pub kind: PickKind<'db>, + + /// Indicates that the source expression should be autoderef'd N times + /// ```ignore (not-rust) + /// A = expr | *expr | **expr | ... + /// ``` + pub autoderefs: usize, + + /// Indicates that we want to add an autoref (and maybe also unsize it), or if the receiver is + /// `*mut T`, convert it to `*const T`. + pub autoref_or_ptr_adjustment: Option<AutorefOrPtrAdjustment>, + pub self_ty: Ty<'db>, + + /// Number of jumps along the `Receiver::Target` chain we followed + /// to identify this method. Used only for deshadowing errors. + /// Only applies for inherent impls. + pub receiver_steps: Option<usize>, + + /// Candidates that were shadowed by supertraits. + pub shadowed_candidates: Vec<CandidateId>, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum PickKind<'db> { + InherentImplPick(ImplId), + ObjectPick(TraitId), + TraitPick(TraitId), + WhereClausePick( + // Trait + PolyTraitRef<'db>, + ), +} + +pub(crate) type PickResult<'db> = Result<Pick<'db>, MethodError<'db>>; + +#[derive(PartialEq, Eq, Copy, Clone, Debug)] +pub enum Mode { + // An expression of the form `receiver.method_name(...)`. + // Autoderefs are performed on `receiver`, lookup is done based on the + // `self` argument of the method, and static methods aren't considered. + MethodCall, + // An expression of the form `Type::item` or `<T>::item`. + // No autoderefs are performed, lookup is done based on the type each + // implementation is for, and static methods are included. + Path, +} + +#[derive(Debug, Clone)] +pub struct CandidateStep<'db> { + pub self_ty: Canonical<'db, QueryResponse<'db, Ty<'db>>>, + pub self_ty_is_opaque: bool, + pub autoderefs: usize, + /// `true` if the type results from a dereference of a raw pointer. + /// when assembling candidates, we include these steps, but not when + /// picking methods. This so that if we have `foo: *const Foo` and `Foo` has methods + /// `fn by_raw_ptr(self: *const Self)` and `fn by_ref(&self)`, then + /// `foo.by_raw_ptr()` will work and `foo.by_ref()` won't. + pub from_unsafe_deref: bool, + pub unsize: bool, + /// We will generate CandidateSteps which are reachable via a chain + /// of following `Receiver`. The first 'n' of those will be reachable + /// by following a chain of 'Deref' instead (since there's a blanket + /// implementation of Receiver for Deref). + /// We use the entire set of steps when identifying method candidates + /// (e.g. identifying relevant `impl` blocks) but only those that are + /// reachable via Deref when examining what the receiver type can + /// be converted into by autodereffing. + pub reachable_via_deref: bool, +} + +#[derive(Clone, Debug)] +struct MethodAutoderefStepsResult<'db> { + /// The valid autoderef steps that could be found by following a chain + /// of `Receiver<Target=T>` or `Deref<Target=T>` trait implementations. + pub steps: SmallVec<[CandidateStep<'db>; 3]>, + /// If Some(T), a type autoderef reported an error on. + pub opt_bad_ty: Option<MethodAutoderefBadTy<'db>>, + /// If `true`, `steps` has been truncated due to reaching the + /// recursion limit. + pub reached_recursion_limit: bool, +} + +#[derive(Debug, Clone)] +struct MethodAutoderefBadTy<'db> { + pub reached_raw_pointer: bool, + pub ty: Canonical<'db, QueryResponse<'db, Ty<'db>>>, +} + +impl<'a, 'db> MethodResolutionContext<'a, 'db> { + #[instrument(level = "debug", skip(self))] + pub fn probe_for_name(&self, mode: Mode, item_name: Name, self_ty: Ty<'db>) -> PickResult<'db> { + self.probe_op(mode, self_ty, ProbeForNameChoice { private_candidate: None, item_name }) + } + + #[instrument(level = "debug", skip(self))] + pub fn probe_all( + &self, + mode: Mode, + self_ty: Ty<'db>, + ) -> impl Iterator<Item = CandidateWithPrivate<'db>> { + self.probe_op(mode, self_ty, ProbeAllChoice::new()).candidates.into_inner().into_values() + } + + fn probe_op<Choice: ProbeChoice<'db>>( + &self, + mode: Mode, + self_ty: Ty<'db>, + choice: Choice, + ) -> Choice::FinalChoice { + let mut orig_values = OriginalQueryValues::default(); + let query_input = self.infcx.canonicalize_query(self_ty, &mut orig_values); + let steps = match mode { + Mode::MethodCall => self.method_autoderef_steps(&query_input), + Mode::Path => self.infcx.probe(|_| { + // Mode::Path - the deref steps is "trivial". This turns + // our CanonicalQuery into a "trivial" QueryResponse. This + // is a bit inefficient, but I don't think that writing + // special handling for this "trivial case" is a good idea. + + let infcx = self.infcx; + let (self_ty, var_values) = infcx.instantiate_canonical(&query_input); + debug!(?self_ty, ?query_input, "probe_op: Mode::Path"); + MethodAutoderefStepsResult { + steps: smallvec![CandidateStep { + self_ty: self + .infcx + .make_query_response_ignoring_pending_obligations(var_values, self_ty), + self_ty_is_opaque: false, + autoderefs: 0, + from_unsafe_deref: false, + unsize: false, + reachable_via_deref: true, + }], + opt_bad_ty: None, + reached_recursion_limit: false, + } + }), + }; + + if steps.reached_recursion_limit { + // FIXME: Report an error. + } + + // If we encountered an `_` type or an error type during autoderef, this is + // ambiguous. + if let Some(bad_ty) = &steps.opt_bad_ty { + if bad_ty.reached_raw_pointer + && !self.unstable_features.arbitrary_self_types_pointers + && self.edition.at_least_2018() + { + // this case used to be allowed by the compiler, + // so we do a future-compat lint here for the 2015 edition + // (see https://github.com/rust-lang/rust/issues/46906) + // FIXME: Emit the lint. + // self.tcx.node_span_lint( + // lint::builtin::TYVAR_BEHIND_RAW_POINTER, + // scope_expr_id, + // span, + // |lint| { + // lint.primary_message("type annotations needed"); + // }, + // ); + } else { + // Ended up encountering a type variable when doing autoderef, + // but it may not be a type variable after processing obligations + // in our local `FnCtxt`, so don't call `structurally_resolve_type`. + let ty = &bad_ty.ty; + let ty = self + .infcx + .instantiate_query_response_and_region_obligations( + &ObligationCause::new(), + self.env.env, + &orig_values, + ty, + ) + .unwrap_or_else(|_| panic!("instantiating {:?} failed?", ty)); + let ty = self.infcx.resolve_vars_if_possible(ty.value); + match ty.kind() { + TyKind::Infer(InferTy::TyVar(_)) => { + // FIXME: Report "type annotations needed" error. + } + TyKind::Error(_) => {} + _ => panic!("unexpected bad final type in method autoderef"), + }; + return Choice::final_choice_from_err(MethodError::ErrorReported); + } + } + + debug!("ProbeContext: steps for self_ty={:?} are {:?}", self_ty, steps); + + // this creates one big transaction so that all type variables etc + // that we create during the probe process are removed later + self.infcx.probe(|_| { + let mut probe_cx = ProbeContext::new(self, mode, &orig_values, &steps.steps, choice); + + probe_cx.assemble_inherent_candidates(); + probe_cx.assemble_extension_candidates_for_traits_in_scope(); + Choice::choose(probe_cx) + }) + } + + fn method_autoderef_steps( + &self, + self_ty: &Canonical<'db, Ty<'db>>, + ) -> MethodAutoderefStepsResult<'db> { + self.infcx.probe(|_| { + debug!("method_autoderef_steps({:?})", self_ty); + + // We accept not-yet-defined opaque types in the autoderef + // chain to support recursive calls. We do error if the final + // infer var is not an opaque. + let infcx = self.infcx; + let (self_ty, inference_vars) = infcx.instantiate_canonical(self_ty); + let self_ty_is_opaque = |ty: Ty<'_>| { + if let TyKind::Infer(InferTy::TyVar(vid)) = ty.kind() { + infcx.has_opaques_with_sub_unified_hidden_type(vid) + } else { + false + } + }; + + // If arbitrary self types is not enabled, we follow the chain of + // `Deref<Target=T>`. If arbitrary self types is enabled, we instead + // follow the chain of `Receiver<Target=T>`, but we also record whether + // such types are reachable by following the (potentially shorter) + // chain of `Deref<Target=T>`. We will use the first list when finding + // potentially relevant function implementations (e.g. relevant impl blocks) + // but the second list when determining types that the receiver may be + // converted to, in order to find out which of those methods might actually + // be callable. + let mut autoderef_via_deref = + Autoderef::new(infcx, self.env, self_ty).include_raw_pointers(); + + let mut reached_raw_pointer = false; + let arbitrary_self_types_enabled = self.unstable_features.arbitrary_self_types + || self.unstable_features.arbitrary_self_types_pointers; + let (mut steps, reached_recursion_limit) = if arbitrary_self_types_enabled { + let reachable_via_deref = + autoderef_via_deref.by_ref().map(|_| true).chain(std::iter::repeat(false)); + + let mut autoderef_via_receiver = Autoderef::new(infcx, self.env, self_ty) + .include_raw_pointers() + .use_receiver_trait(); + let steps = autoderef_via_receiver + .by_ref() + .zip(reachable_via_deref) + .map(|((ty, d), reachable_via_deref)| { + let step = CandidateStep { + self_ty: infcx.make_query_response_ignoring_pending_obligations( + inference_vars, + ty, + ), + self_ty_is_opaque: self_ty_is_opaque(ty), + autoderefs: d, + from_unsafe_deref: reached_raw_pointer, + unsize: false, + reachable_via_deref, + }; + if ty.is_raw_ptr() { + // all the subsequent steps will be from_unsafe_deref + reached_raw_pointer = true; + } + step + }) + .collect::<SmallVec<[_; _]>>(); + (steps, autoderef_via_receiver.reached_recursion_limit()) + } else { + let steps = autoderef_via_deref + .by_ref() + .map(|(ty, d)| { + let step = CandidateStep { + self_ty: infcx.make_query_response_ignoring_pending_obligations( + inference_vars, + ty, + ), + self_ty_is_opaque: self_ty_is_opaque(ty), + autoderefs: d, + from_unsafe_deref: reached_raw_pointer, + unsize: false, + reachable_via_deref: true, + }; + if ty.is_raw_ptr() { + // all the subsequent steps will be from_unsafe_deref + reached_raw_pointer = true; + } + step + }) + .collect(); + (steps, autoderef_via_deref.reached_recursion_limit()) + }; + let final_ty = autoderef_via_deref.final_ty(); + let opt_bad_ty = match final_ty.kind() { + TyKind::Infer(InferTy::TyVar(_)) if !self_ty_is_opaque(final_ty) => { + Some(MethodAutoderefBadTy { + reached_raw_pointer, + ty: infcx.make_query_response_ignoring_pending_obligations( + inference_vars, + final_ty, + ), + }) + } + TyKind::Error(_) => Some(MethodAutoderefBadTy { + reached_raw_pointer, + ty: infcx + .make_query_response_ignoring_pending_obligations(inference_vars, final_ty), + }), + TyKind::Array(elem_ty, _) => { + let autoderefs = steps.iter().filter(|s| s.reachable_via_deref).count() - 1; + steps.push(CandidateStep { + self_ty: infcx.make_query_response_ignoring_pending_obligations( + inference_vars, + Ty::new_slice(infcx.interner, elem_ty), + ), + self_ty_is_opaque: false, + autoderefs, + // this could be from an unsafe deref if we had + // a *mut/const [T; N] + from_unsafe_deref: reached_raw_pointer, + unsize: true, + reachable_via_deref: true, // this is always the final type from + // autoderef_via_deref + }); + + None + } + _ => None, + }; + + debug!("method_autoderef_steps: steps={:?} opt_bad_ty={:?}", steps, opt_bad_ty); + MethodAutoderefStepsResult { steps, opt_bad_ty, reached_recursion_limit } + }) + } +} + +trait ProbeChoice<'db>: Sized { + type Choice; + type FinalChoice; + + /// Finds the method with the appropriate name (or return type, as the case may be). + // The length of the returned iterator is nearly always 0 or 1 and this + // method is fairly hot. + fn with_impl_or_trait_item<'a>( + this: &mut ProbeContext<'a, 'db, Self>, + items: &[(Name, AssocItemId)], + callback: impl FnMut(&mut ProbeContext<'a, 'db, Self>, CandidateId), + ); + + fn consider_candidates( + this: &ProbeContext<'_, 'db, Self>, + self_ty: Ty<'db>, + candidates: Vec<&Candidate<'db>>, + ) -> ControlFlow<Self::Choice>; + + fn consider_private_candidates( + this: &mut ProbeContext<'_, 'db, Self>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ); + + fn map_choice_pick( + choice: Self::Choice, + f: impl FnOnce(Pick<'db>) -> Pick<'db>, + ) -> Self::Choice; + + fn check_by_value_method_shadowing( + this: &mut ProbeContext<'_, 'db, Self>, + by_value_pick: &Self::Choice, + step: &CandidateStep<'db>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) -> ControlFlow<Self::Choice>; + + fn check_autorefed_method_shadowing( + this: &mut ProbeContext<'_, 'db, Self>, + autoref_pick: &Self::Choice, + step: &CandidateStep<'db>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) -> ControlFlow<Self::Choice>; + + fn final_choice_from_err(err: MethodError<'db>) -> Self::FinalChoice; + + fn choose(this: ProbeContext<'_, 'db, Self>) -> Self::FinalChoice; +} + +#[derive(Debug)] +struct ProbeForNameChoice<'db> { + item_name: Name, + + /// Some(candidate) if there is a private candidate + private_candidate: Option<Pick<'db>>, +} + +impl<'db> ProbeChoice<'db> for ProbeForNameChoice<'db> { + type Choice = PickResult<'db>; + type FinalChoice = PickResult<'db>; + + fn with_impl_or_trait_item<'a>( + this: &mut ProbeContext<'a, 'db, Self>, + items: &[(Name, AssocItemId)], + mut callback: impl FnMut(&mut ProbeContext<'a, 'db, Self>, CandidateId), + ) { + let item = items + .iter() + .filter_map(|(name, id)| { + let id = match *id { + AssocItemId::FunctionId(id) => id.into(), + AssocItemId::ConstId(id) => id.into(), + AssocItemId::TypeAliasId(_) => return None, + }; + Some((name, id)) + }) + .find(|(name, _)| **name == this.choice.item_name) + .map(|(_, id)| id) + .filter(|id| this.mode == Mode::Path || matches!(id, CandidateId::FunctionId(_))); + if let Some(item) = item { + callback(this, item); + } + } + + fn consider_candidates( + this: &ProbeContext<'_, 'db, Self>, + self_ty: Ty<'db>, + mut applicable_candidates: Vec<&Candidate<'db>>, + ) -> ControlFlow<Self::Choice> { + if applicable_candidates.len() > 1 + && let Some(pick) = + this.collapse_candidates_to_trait_pick(self_ty, &applicable_candidates) + { + return ControlFlow::Break(Ok(pick)); + } + + if applicable_candidates.len() > 1 { + // We collapse to a subtrait pick *after* filtering unstable candidates + // to make sure we don't prefer a unstable subtrait method over a stable + // supertrait method. + if this.ctx.unstable_features.supertrait_item_shadowing + && let Some(pick) = + this.collapse_candidates_to_subtrait_pick(self_ty, &applicable_candidates) + { + return ControlFlow::Break(Ok(pick)); + } + + let sources = + applicable_candidates.iter().map(|p| this.candidate_source(p, self_ty)).collect(); + return ControlFlow::Break(Err(MethodError::Ambiguity(sources))); + } + + match applicable_candidates.pop() { + Some(probe) => ControlFlow::Break(Ok(probe.to_unadjusted_pick(self_ty))), + None => ControlFlow::Continue(()), + } + } + + fn consider_private_candidates( + this: &mut ProbeContext<'_, 'db, Self>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) { + if this.choice.private_candidate.is_none() + && let ControlFlow::Break(Ok(pick)) = this.consider_candidates( + self_ty, + instantiate_self_ty_obligations, + &this.private_candidates, + None, + ) + { + this.choice.private_candidate = Some(pick); + } + } + + fn map_choice_pick( + choice: Self::Choice, + f: impl FnOnce(Pick<'db>) -> Pick<'db>, + ) -> Self::Choice { + choice.map(f) + } + + fn check_by_value_method_shadowing( + this: &mut ProbeContext<'_, 'db, Self>, + by_value_pick: &Self::Choice, + step: &CandidateStep<'db>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) -> ControlFlow<Self::Choice> { + if let Ok(by_value_pick) = by_value_pick + && matches!(by_value_pick.kind, PickKind::InherentImplPick(_)) + { + for mutbl in [Mutability::Not, Mutability::Mut] { + if let Err(e) = this.check_for_shadowed_autorefd_method( + by_value_pick, + step, + self_ty, + instantiate_self_ty_obligations, + mutbl, + ) { + return ControlFlow::Break(Err(e)); + } + } + } + ControlFlow::Continue(()) + } + + fn check_autorefed_method_shadowing( + this: &mut ProbeContext<'_, 'db, Self>, + autoref_pick: &Self::Choice, + step: &CandidateStep<'db>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) -> ControlFlow<Self::Choice> { + if let Ok(autoref_pick) = autoref_pick.as_ref() { + // Check we're not shadowing others + if matches!(autoref_pick.kind, PickKind::InherentImplPick(_)) + && let Err(e) = this.check_for_shadowed_autorefd_method( + autoref_pick, + step, + self_ty, + instantiate_self_ty_obligations, + Mutability::Mut, + ) + { + return ControlFlow::Break(Err(e)); + } + } + ControlFlow::Continue(()) + } + + fn final_choice_from_err(err: MethodError<'db>) -> Self::FinalChoice { + Err(err) + } + + fn choose(this: ProbeContext<'_, 'db, Self>) -> Self::FinalChoice { + this.pick() + } +} + +#[derive(Debug)] +struct ProbeAllChoice<'db> { + candidates: RefCell<FxHashMap<CandidateId, CandidateWithPrivate<'db>>>, + considering_visible_candidates: bool, +} + +impl ProbeAllChoice<'_> { + fn new() -> Self { + Self { candidates: RefCell::default(), considering_visible_candidates: true } + } +} + +impl<'db> ProbeChoice<'db> for ProbeAllChoice<'db> { + type Choice = Infallible; + type FinalChoice = Self; + + fn with_impl_or_trait_item<'a>( + this: &mut ProbeContext<'a, 'db, Self>, + items: &[(Name, AssocItemId)], + mut callback: impl FnMut(&mut ProbeContext<'a, 'db, Self>, CandidateId), + ) { + let mode = this.mode; + items + .iter() + .filter_map(|(_, id)| is_relevant_kind_for_mode(mode, *id)) + .for_each(|id| callback(this, id)); + } + + fn consider_candidates( + this: &ProbeContext<'_, 'db, Self>, + _self_ty: Ty<'db>, + candidates: Vec<&Candidate<'db>>, + ) -> ControlFlow<Self::Choice> { + let is_visible = this.choice.considering_visible_candidates; + let mut all_candidates = this.choice.candidates.borrow_mut(); + for candidate in candidates { + // We should not override existing entries, because inherent methods of trait objects (from the principal) + // are also visited as trait methods, and we want to consider them inherent. + all_candidates + .entry(candidate.item) + .or_insert(CandidateWithPrivate { candidate: candidate.clone(), is_visible }); + } + ControlFlow::Continue(()) + } + + fn consider_private_candidates( + this: &mut ProbeContext<'_, 'db, Self>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) { + this.choice.considering_visible_candidates = false; + let ControlFlow::Continue(()) = this.consider_candidates( + self_ty, + instantiate_self_ty_obligations, + &this.private_candidates, + None, + ); + this.choice.considering_visible_candidates = true; + } + + fn map_choice_pick( + choice: Self::Choice, + _f: impl FnOnce(Pick<'db>) -> Pick<'db>, + ) -> Self::Choice { + choice + } + + fn check_by_value_method_shadowing( + _this: &mut ProbeContext<'_, 'db, Self>, + _by_value_pick: &Self::Choice, + _step: &CandidateStep<'db>, + _self_ty: Ty<'db>, + _instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) -> ControlFlow<Self::Choice> { + ControlFlow::Continue(()) + } + + fn check_autorefed_method_shadowing( + _this: &mut ProbeContext<'_, 'db, Self>, + _autoref_pick: &Self::Choice, + _step: &CandidateStep<'db>, + _self_ty: Ty<'db>, + _instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) -> ControlFlow<Self::Choice> { + ControlFlow::Continue(()) + } + + fn final_choice_from_err(_err: MethodError<'db>) -> Self::FinalChoice { + Self::new() + } + + fn choose(mut this: ProbeContext<'_, 'db, Self>) -> Self::FinalChoice { + let ControlFlow::Continue(()) = this.pick_all_method(); + this.choice + } +} + +impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> { + fn new( + ctx: &'a MethodResolutionContext<'a, 'db>, + mode: Mode, + orig_steps_var_values: &'a OriginalQueryValues<'db>, + steps: &'a [CandidateStep<'db>], + choice: Choice, + ) -> ProbeContext<'a, 'db, Choice> { + ProbeContext { + ctx, + mode, + inherent_candidates: Vec::new(), + extension_candidates: Vec::new(), + impl_dups: FxHashSet::default(), + orig_steps_var_values, + steps, + private_candidates: Vec::new(), + static_candidates: Vec::new(), + choice, + } + } + + #[inline] + fn db(&self) -> &'db dyn HirDatabase { + self.ctx.infcx.interner.db + } + + #[inline] + fn interner(&self) -> DbInterner<'db> { + self.ctx.infcx.interner + } + + #[inline] + fn infcx(&self) -> &'a InferCtxt<'db> { + self.ctx.infcx + } + + #[inline] + fn param_env(&self) -> ParamEnv<'db> { + self.ctx.env.env + } + + /// When we're looking up a method by path (UFCS), we relate the receiver + /// types invariantly. When we are looking up a method by the `.` operator, + /// we relate them covariantly. + fn variance(&self) -> Variance { + match self.mode { + Mode::MethodCall => Variance::Covariant, + Mode::Path => Variance::Invariant, + } + } + + /////////////////////////////////////////////////////////////////////////// + // CANDIDATE ASSEMBLY + + fn push_candidate(&mut self, candidate: Candidate<'db>, is_inherent: bool) { + let is_accessible = if is_inherent { + let candidate_id = match candidate.item { + CandidateId::FunctionId(id) => id.into(), + CandidateId::ConstId(id) => id.into(), + }; + let visibility = self.db().assoc_visibility(candidate_id); + self.ctx.resolver.is_visible(self.db(), visibility) + } else { + true + }; + if is_accessible { + if is_inherent { + self.inherent_candidates.push(candidate); + } else { + self.extension_candidates.push(candidate); + } + } else { + self.private_candidates.push(candidate); + } + } + + fn assemble_inherent_candidates(&mut self) { + for step in self.steps.iter() { + self.assemble_probe(&step.self_ty, step.autoderefs); + } + } + + #[instrument(level = "debug", skip(self))] + fn assemble_probe( + &mut self, + self_ty: &Canonical<'db, QueryResponse<'db, Ty<'db>>>, + receiver_steps: usize, + ) { + let raw_self_ty = self_ty.value.value; + match raw_self_ty.kind() { + TyKind::Dynamic(data, ..) => { + if let Some(p) = data.principal() { + // Subtle: we can't use `instantiate_query_response` here: using it will + // commit to all of the type equalities assumed by inference going through + // autoderef (see the `method-probe-no-guessing` test). + // + // However, in this code, it is OK if we end up with an object type that is + // "more general" than the object type that we are evaluating. For *every* + // object type `MY_OBJECT`, a function call that goes through a trait-ref + // of the form `<MY_OBJECT as SuperTraitOf(MY_OBJECT)>::func` is a valid + // `ObjectCandidate`, and it should be discoverable "exactly" through one + // of the iterations in the autoderef loop, so there is no problem with it + // being discoverable in another one of these iterations. + // + // Using `instantiate_canonical` on our + // `Canonical<QueryResponse<Ty<'db>>>` and then *throwing away* the + // `CanonicalVarValues` will exactly give us such a generalization - it + // will still match the original object type, but it won't pollute our + // type variables in any form, so just do that! + let (QueryResponse { value: generalized_self_ty, .. }, _ignored_var_values) = + self.infcx().instantiate_canonical(self_ty); + + self.assemble_inherent_candidates_from_object(generalized_self_ty); + self.assemble_inherent_impl_candidates_for_type( + &SimplifiedType::Trait(p.def_id().0.into()), + receiver_steps, + ); + self.assemble_inherent_candidates_for_incoherent_ty( + raw_self_ty, + receiver_steps, + ); + } + } + TyKind::Adt(def, _) => { + let def_id = def.def_id().0; + self.assemble_inherent_impl_candidates_for_type( + &SimplifiedType::Adt(def_id.into()), + receiver_steps, + ); + self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps); + } + TyKind::Foreign(did) => { + self.assemble_inherent_impl_candidates_for_type( + &SimplifiedType::Foreign(did.0.into()), + receiver_steps, + ); + self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps); + } + TyKind::Param(_) => { + self.assemble_inherent_candidates_from_param(raw_self_ty); + } + TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Str + | TyKind::Array(..) + | TyKind::Slice(_) + | TyKind::RawPtr(_, _) + | TyKind::Ref(..) + | TyKind::Never + | TyKind::Tuple(..) => { + self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps) + } + _ => {} + } + } + + fn assemble_inherent_candidates_for_incoherent_ty( + &mut self, + self_ty: Ty<'db>, + receiver_steps: usize, + ) { + let Some(simp) = simplify_type(self.interner(), self_ty, TreatParams::InstantiateWithInfer) + else { + panic!("unexpected incoherent type: {:?}", self_ty) + }; + for &impl_def_id in incoherent_inherent_impls(self.db(), simp) { + self.assemble_inherent_impl_probe(impl_def_id, receiver_steps); + } + } + + fn assemble_inherent_impl_candidates_for_type( + &mut self, + self_ty: &SimplifiedType, + receiver_steps: usize, + ) { + let Some(module) = simplified_type_module(self.db(), self_ty) else { + return; + }; + InherentImpls::for_each_crate_and_block( + self.db(), + module.krate(), + module.containing_block(), + &mut |impls| { + for &impl_def_id in impls.for_self_ty(self_ty) { + self.assemble_inherent_impl_probe(impl_def_id, receiver_steps); + } + }, + ); + } + + #[instrument(level = "debug", skip(self))] + fn assemble_inherent_impl_probe(&mut self, impl_def_id: ImplId, receiver_steps: usize) { + if !self.impl_dups.insert(impl_def_id) { + return; // already visited + } + + self.with_impl_item(impl_def_id, |this, item| { + if !this.has_applicable_self(item) { + // No receiver declared. Not a candidate. + this.record_static_candidate(CandidateSource::Impl(impl_def_id)); + return; + } + this.push_candidate( + Candidate { item, kind: InherentImplCandidate { impl_def_id, receiver_steps } }, + true, + ); + }); + } + + #[instrument(level = "debug", skip(self))] + fn assemble_inherent_candidates_from_object(&mut self, self_ty: Ty<'db>) { + let principal = match self_ty.kind() { + TyKind::Dynamic(data, ..) => Some(data), + _ => None, + } + .and_then(|data| data.principal()) + .unwrap_or_else(|| { + panic!("non-object {:?} in assemble_inherent_candidates_from_object", self_ty) + }); + + // It is illegal to invoke a method on a trait instance that refers to + // the `Self` type. An [`DynCompatibilityViolation::SupertraitSelf`] error + // will be reported by `dyn_compatibility.rs` if the method refers to the + // `Self` type anywhere other than the receiver. Here, we use a + // instantiation that replaces `Self` with the object type itself. Hence, + // a `&self` method will wind up with an argument type like `&dyn Trait`. + let trait_ref = principal.with_self_ty(self.interner(), self_ty); + self.assemble_candidates_for_bounds( + elaborate::supertraits(self.interner(), trait_ref), + |this, new_trait_ref, item| { + this.push_candidate(Candidate { item, kind: ObjectCandidate(new_trait_ref) }, true); + }, + ); + } + + #[instrument(level = "debug", skip(self))] + fn assemble_inherent_candidates_from_param(&mut self, param_ty: Ty<'db>) { + debug_assert!(matches!(param_ty.kind(), TyKind::Param(_))); + + let interner = self.interner(); + + // We use `DeepRejectCtxt` here which may return false positive on where clauses + // with alias self types. We need to later on reject these as inherent candidates + // in `consider_probe`. + let bounds = self.param_env().clauses.iter().filter_map(|predicate| { + let bound_predicate = predicate.kind(); + match bound_predicate.skip_binder() { + ClauseKind::Trait(trait_predicate) => DeepRejectCtxt::relate_rigid_rigid(interner) + .types_may_unify(param_ty, trait_predicate.trait_ref.self_ty()) + .then(|| bound_predicate.rebind(trait_predicate.trait_ref)), + ClauseKind::RegionOutlives(_) + | ClauseKind::TypeOutlives(_) + | ClauseKind::Projection(_) + | ClauseKind::ConstArgHasType(_, _) + | ClauseKind::WellFormed(_) + | ClauseKind::ConstEvaluatable(_) + | ClauseKind::UnstableFeature(_) + | ClauseKind::HostEffect(..) => None, + } + }); + + self.assemble_candidates_for_bounds(bounds, |this, poly_trait_ref, item| { + this.push_candidate( + Candidate { item, kind: WhereClauseCandidate(poly_trait_ref) }, + true, + ); + }); + } + + // Do a search through a list of bounds, using a callback to actually + // create the candidates. + fn assemble_candidates_for_bounds<F>( + &mut self, + bounds: impl Iterator<Item = PolyTraitRef<'db>>, + mut mk_cand: F, + ) where + F: for<'b> FnMut(&mut ProbeContext<'b, 'db, Choice>, PolyTraitRef<'db>, CandidateId), + { + for bound_trait_ref in bounds { + debug!("elaborate_bounds(bound_trait_ref={:?})", bound_trait_ref); + self.with_trait_item(bound_trait_ref.def_id().0, |this, item| { + if !this.has_applicable_self(item) { + this.record_static_candidate(CandidateSource::Trait( + bound_trait_ref.def_id().0, + )); + } else { + mk_cand(this, bound_trait_ref, item); + } + }); + } + } + + #[instrument(level = "debug", skip(self))] + fn assemble_extension_candidates_for_traits_in_scope(&mut self) { + for &trait_did in self.ctx.traits_in_scope { + self.assemble_extension_candidates_for_trait(trait_did); + } + } + + #[instrument(level = "debug", skip(self))] + fn assemble_extension_candidates_for_trait(&mut self, trait_def_id: TraitId) { + let trait_args = self.infcx().fresh_args_for_item(trait_def_id.into()); + let trait_ref = TraitRef::new_from_args(self.interner(), trait_def_id.into(), trait_args); + + self.with_trait_item(trait_def_id, |this, item| { + // Check whether `trait_def_id` defines a method with suitable name. + if !this.has_applicable_self(item) { + debug!("method has inapplicable self"); + this.record_static_candidate(CandidateSource::Trait(trait_def_id)); + return; + } + this.push_candidate( + Candidate { item, kind: TraitCandidate(Binder::dummy(trait_ref)) }, + false, + ); + }); + } +} + +/////////////////////////////////////////////////////////////////////////// +// THE ACTUAL SEARCH +impl<'a, 'db> ProbeContext<'a, 'db, ProbeForNameChoice<'db>> { + #[instrument(level = "debug", skip(self))] + fn pick(mut self) -> PickResult<'db> { + if let Some(r) = self.pick_core() { + return r; + } + + debug!("pick: actual search failed, assemble diagnostics"); + + if let Some(candidate) = self.choice.private_candidate { + return Err(MethodError::PrivateMatch(candidate)); + } + + Err(MethodError::NoMatch) + } + + fn pick_core(&mut self) -> Option<PickResult<'db>> { + self.pick_all_method().break_value() + } + + /// Check for cases where arbitrary self types allows shadowing + /// of methods that might be a compatibility break. Specifically, + /// we have something like: + /// ```ignore (illustrative) + /// struct A; + /// impl A { + /// fn foo(self: &NonNull<A>) {} + /// // note this is by reference + /// } + /// ``` + /// then we've come along and added this method to `NonNull`: + /// ```ignore (illustrative) + /// fn foo(self) // note this is by value + /// ``` + /// Report an error in this case. + fn check_for_shadowed_autorefd_method( + &mut self, + possible_shadower: &Pick<'db>, + step: &CandidateStep<'db>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + mutbl: Mutability, + ) -> Result<(), MethodError<'db>> { + // The errors emitted by this function are part of + // the arbitrary self types work, and should not impact + // other users. + if !self.ctx.unstable_features.arbitrary_self_types + && !self.ctx.unstable_features.arbitrary_self_types_pointers + { + return Ok(()); + } + + // Set criteria for how we find methods possibly shadowed by 'possible_shadower' + let pick_constraints = PickConstraintsForShadowed { + // It's the same `self` type... + autoderefs: possible_shadower.autoderefs, + // ... but the method was found in an impl block determined + // by searching further along the Receiver chain than the other, + // showing that it's a smart pointer type causing the problem... + receiver_steps: possible_shadower.receiver_steps, + // ... and they don't end up pointing to the same item in the + // first place (could happen with things like blanket impls for T) + def_id: possible_shadower.item, + }; + // A note on the autoderefs above. Within pick_by_value_method, an extra + // autoderef may be applied in order to reborrow a reference with + // a different lifetime. That seems as though it would break the + // logic of these constraints, since the number of autoderefs could + // no longer be used to identify the fundamental type of the receiver. + // However, this extra autoderef is applied only to by-value calls + // where the receiver is already a reference. So this situation would + // only occur in cases where the shadowing looks like this: + // ``` + // struct A; + // impl A { + // fn foo(self: &&NonNull<A>) {} + // // note this is by DOUBLE reference + // } + // ``` + // then we've come along and added this method to `NonNull`: + // ``` + // fn foo(&self) // note this is by single reference + // ``` + // and the call is: + // ``` + // let bar = NonNull<Foo>; + // let bar = &foo; + // bar.foo(); + // ``` + // In these circumstances, the logic is wrong, and we wouldn't spot + // the shadowing, because the autoderef-based maths wouldn't line up. + // This is a niche case and we can live without generating an error + // in the case of such shadowing. + let potentially_shadowed_pick = self.pick_autorefd_method( + step, + self_ty, + instantiate_self_ty_obligations, + mutbl, + Some(&pick_constraints), + ); + // Look for actual pairs of shadower/shadowed which are + // the sort of shadowing case we want to avoid. Specifically... + if let ControlFlow::Break(Ok(possible_shadowed)) = &potentially_shadowed_pick { + let sources = [possible_shadower, possible_shadowed] + .into_iter() + .map(|p| self.candidate_source_from_pick(p)) + .collect(); + return Err(MethodError::Ambiguity(sources)); + } + Ok(()) + } +} + +impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> { + fn pick_all_method(&mut self) -> ControlFlow<Choice::Choice> { + self.steps + .iter() + // At this point we're considering the types to which the receiver can be converted, + // so we want to follow the `Deref` chain not the `Receiver` chain. Filter out + // steps which can only be reached by following the (longer) `Receiver` chain. + .filter(|step| step.reachable_via_deref) + .filter(|step| { + debug!("pick_all_method: step={:?}", step); + // skip types that are from a type error or that would require dereferencing + // a raw pointer + !step.self_ty.value.value.references_non_lt_error() && !step.from_unsafe_deref + }) + .try_for_each(|step| { + let InferOk { value: self_ty, obligations: instantiate_self_ty_obligations } = self + .infcx() + .instantiate_query_response_and_region_obligations( + &ObligationCause::new(), + self.param_env(), + self.orig_steps_var_values, + &step.self_ty, + ) + .unwrap_or_else(|_| panic!("{:?} was applicable but now isn't?", step.self_ty)); + + let by_value_pick = + self.pick_by_value_method(step, self_ty, &instantiate_self_ty_obligations); + + // Check for shadowing of a by-reference method by a by-value method (see comments on check_for_shadowing) + if let ControlFlow::Break(by_value_pick) = by_value_pick { + Choice::check_by_value_method_shadowing( + self, + &by_value_pick, + step, + self_ty, + &instantiate_self_ty_obligations, + )?; + return ControlFlow::Break(by_value_pick); + } + + let autoref_pick = self.pick_autorefd_method( + step, + self_ty, + &instantiate_self_ty_obligations, + Mutability::Not, + None, + ); + // Check for shadowing of a by-mut-ref method by a by-reference method (see comments on check_for_shadowing) + if let ControlFlow::Break(autoref_pick) = autoref_pick { + Choice::check_autorefed_method_shadowing( + self, + &autoref_pick, + step, + self_ty, + &instantiate_self_ty_obligations, + )?; + return ControlFlow::Break(autoref_pick); + } + + // Note that no shadowing errors are produced from here on, + // as we consider const ptr methods. + // We allow new methods that take *mut T to shadow + // methods which took *const T, so there is no entry in + // this list for the results of `pick_const_ptr_method`. + // The reason is that the standard pointer cast method + // (on a mutable pointer) always already shadows the + // cast method (on a const pointer). So, if we added + // `pick_const_ptr_method` to this method, the anti- + // shadowing algorithm would always complain about + // the conflict between *const::cast and *mut::cast. + // In practice therefore this does constrain us: + // we cannot add new + // self: *mut Self + // methods to types such as NonNull or anything else + // which implements Receiver, because this might in future + // shadow existing methods taking + // self: *const NonNull<Self> + // in the pointee. In practice, methods taking raw pointers + // are rare, and it seems that it should be easily possible + // to avoid such compatibility breaks. + // We also don't check for reborrowed pin methods which + // may be shadowed; these also seem unlikely to occur. + self.pick_autorefd_method( + step, + self_ty, + &instantiate_self_ty_obligations, + Mutability::Mut, + None, + )?; + self.pick_const_ptr_method(step, self_ty, &instantiate_self_ty_obligations) + }) + } + + /// For each type `T` in the step list, this attempts to find a method where + /// the (transformed) self type is exactly `T`. We do however do one + /// transformation on the adjustment: if we are passing a region pointer in, + /// we will potentially *reborrow* it to a shorter lifetime. This allows us + /// to transparently pass `&mut` pointers, in particular, without consuming + /// them for their entire lifetime. + fn pick_by_value_method( + &mut self, + step: &CandidateStep<'db>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) -> ControlFlow<Choice::Choice> { + if step.unsize { + return ControlFlow::Continue(()); + } + + self.pick_method(self_ty, instantiate_self_ty_obligations, None).map_break(|r| { + Choice::map_choice_pick(r, |mut pick| { + pick.autoderefs = step.autoderefs; + + match step.self_ty.value.value.kind() { + // Insert a `&*` or `&mut *` if this is a reference type: + TyKind::Ref(_, _, mutbl) => { + pick.autoderefs += 1; + pick.autoref_or_ptr_adjustment = Some(AutorefOrPtrAdjustment::Autoref { + mutbl, + unsize: pick.autoref_or_ptr_adjustment.is_some_and(|a| a.get_unsize()), + }) + } + + _ => (), + } + + pick + }) + }) + } + + fn pick_autorefd_method( + &mut self, + step: &CandidateStep<'db>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + mutbl: Mutability, + pick_constraints: Option<&PickConstraintsForShadowed>, + ) -> ControlFlow<Choice::Choice> { + let interner = self.interner(); + + if let Some(pick_constraints) = pick_constraints + && !pick_constraints.may_shadow_based_on_autoderefs(step.autoderefs) + { + return ControlFlow::Continue(()); + } + + // In general, during probing we erase regions. + let region = Region::new_erased(interner); + + let autoref_ty = Ty::new_ref(interner, region, self_ty, mutbl); + self.pick_method(autoref_ty, instantiate_self_ty_obligations, pick_constraints).map_break( + |r| { + Choice::map_choice_pick(r, |mut pick| { + pick.autoderefs = step.autoderefs; + pick.autoref_or_ptr_adjustment = + Some(AutorefOrPtrAdjustment::Autoref { mutbl, unsize: step.unsize }); + pick + }) + }, + ) + } + + /// If `self_ty` is `*mut T` then this picks `*const T` methods. The reason why we have a + /// special case for this is because going from `*mut T` to `*const T` with autoderefs and + /// autorefs would require dereferencing the pointer, which is not safe. + fn pick_const_ptr_method( + &mut self, + step: &CandidateStep<'db>, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + ) -> ControlFlow<Choice::Choice> { + // Don't convert an unsized reference to ptr + if step.unsize { + return ControlFlow::Continue(()); + } + + let TyKind::RawPtr(ty, Mutability::Mut) = self_ty.kind() else { + return ControlFlow::Continue(()); + }; + + let const_ptr_ty = Ty::new_ptr(self.interner(), ty, Mutability::Not); + self.pick_method(const_ptr_ty, instantiate_self_ty_obligations, None).map_break(|r| { + Choice::map_choice_pick(r, |mut pick| { + pick.autoderefs = step.autoderefs; + pick.autoref_or_ptr_adjustment = Some(AutorefOrPtrAdjustment::ToConstPtr); + pick + }) + }) + } + + fn pick_method( + &mut self, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + pick_constraints: Option<&PickConstraintsForShadowed>, + ) -> ControlFlow<Choice::Choice> { + debug!("pick_method(self_ty={:?})", self_ty); + + for (kind, candidates) in + [("inherent", &self.inherent_candidates), ("extension", &self.extension_candidates)] + { + debug!("searching {} candidates", kind); + self.consider_candidates( + self_ty, + instantiate_self_ty_obligations, + candidates, + pick_constraints, + )?; + } + + Choice::consider_private_candidates(self, self_ty, instantiate_self_ty_obligations); + + ControlFlow::Continue(()) + } + + fn consider_candidates( + &self, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + candidates: &[Candidate<'db>], + pick_constraints: Option<&PickConstraintsForShadowed>, + ) -> ControlFlow<Choice::Choice> { + let applicable_candidates: Vec<_> = candidates + .iter() + .filter(|candidate| { + pick_constraints + .map(|pick_constraints| pick_constraints.candidate_may_shadow(candidate)) + .unwrap_or(true) + }) + .filter(|probe| { + self.consider_probe(self_ty, instantiate_self_ty_obligations, probe) + != ProbeResult::NoMatch + }) + .collect(); + + debug!("applicable_candidates: {:?}", applicable_candidates); + + Choice::consider_candidates(self, self_ty, applicable_candidates) + } + + fn select_trait_candidate( + &self, + trait_ref: TraitRef<'db>, + ) -> SelectionResult<'db, Selection<'db>> { + let obligation = + Obligation::new(self.interner(), ObligationCause::new(), self.param_env(), trait_ref); + self.infcx().select(&obligation) + } + + /// Used for ambiguous method call error reporting. Uses probing that throws away the result internally, + /// so do not use to make a decision that may lead to a successful compilation. + fn candidate_source(&self, candidate: &Candidate<'db>, self_ty: Ty<'db>) -> CandidateSource { + match candidate.kind { + InherentImplCandidate { impl_def_id, .. } => CandidateSource::Impl(impl_def_id), + ObjectCandidate(trait_ref) | WhereClauseCandidate(trait_ref) => { + CandidateSource::Trait(trait_ref.def_id().0) + } + TraitCandidate(trait_ref) => self.infcx().probe(|_| { + let trait_ref = self.infcx().instantiate_binder_with_fresh_vars( + BoundRegionConversionTime::FnCall, + trait_ref, + ); + let (xform_self_ty, _) = self.xform_self_ty( + candidate.item, + trait_ref.self_ty(), + trait_ref.args.as_slice(), + ); + // Guide the trait selection to show impls that have methods whose type matches + // up with the `self` parameter of the method. + let _ = self + .infcx() + .at(&ObligationCause::dummy(), self.param_env()) + .sup(xform_self_ty, self_ty); + match self.select_trait_candidate(trait_ref) { + Ok(Some(ImplSource::UserDefined(ref impl_data))) => { + // If only a single impl matches, make the error message point + // to that impl. + CandidateSource::Impl(impl_data.impl_def_id) + } + _ => CandidateSource::Trait(trait_ref.def_id.0), + } + }), + } + } + + fn candidate_source_from_pick(&self, pick: &Pick<'db>) -> CandidateSource { + match pick.kind { + InherentImplPick(impl_) => CandidateSource::Impl(impl_), + ObjectPick(trait_) | TraitPick(trait_) => CandidateSource::Trait(trait_), + WhereClausePick(trait_ref) => CandidateSource::Trait(trait_ref.skip_binder().def_id.0), + } + } + + #[instrument(level = "debug", skip(self), ret)] + fn consider_probe( + &self, + self_ty: Ty<'db>, + instantiate_self_ty_obligations: &[PredicateObligation<'db>], + probe: &Candidate<'db>, + ) -> ProbeResult { + self.infcx().probe(|_| { + let mut result = ProbeResult::Match; + let cause = &ObligationCause::new(); + let mut ocx = ObligationCtxt::new(self.infcx()); + + // Subtle: we're not *really* instantiating the current self type while + // probing, but instead fully recompute the autoderef steps once we've got + // a final `Pick`. We can't nicely handle these obligations outside of a probe. + // + // We simply handle them for each candidate here for now. That's kinda scuffed + // and ideally we just put them into the `FnCtxt` right away. We need to consider + // them to deal with defining uses in `method_autoderef_steps`. + ocx.register_obligations(instantiate_self_ty_obligations.iter().cloned()); + let errors = ocx.try_evaluate_obligations(); + if !errors.is_empty() { + unreachable!("unexpected autoderef error {errors:?}"); + } + + let mut trait_predicate = None; + let (xform_self_ty, xform_ret_ty); + + match probe.kind { + InherentImplCandidate { impl_def_id, .. } => { + let impl_args = self.infcx().fresh_args_for_item(impl_def_id.into()); + let impl_ty = + self.db().impl_self_ty(impl_def_id).instantiate(self.interner(), impl_args); + (xform_self_ty, xform_ret_ty) = + self.xform_self_ty(probe.item, impl_ty, impl_args.as_slice()); + match ocx.relate( + cause, + self.param_env(), + self.variance(), + self_ty, + xform_self_ty, + ) { + Ok(()) => {} + Err(err) => { + debug!("--> cannot relate self-types {:?}", err); + return ProbeResult::NoMatch; + } + } + // Check whether the impl imposes obligations we have to worry about. + let impl_bounds = GenericPredicates::query_all(self.db(), impl_def_id.into()); + let impl_bounds = clauses_as_obligations( + impl_bounds.iter_instantiated_copied(self.interner(), impl_args.as_slice()), + ObligationCause::new(), + self.param_env(), + ); + // Convert the bounds into obligations. + ocx.register_obligations(impl_bounds); + } + TraitCandidate(poly_trait_ref) => { + // Some trait methods are excluded for arrays before 2021. + // (`array.into_iter()` wants a slice iterator for compatibility.) + if self_ty.is_array() && !self.ctx.edition.at_least_2021() { + let trait_signature = self.db().trait_signature(poly_trait_ref.def_id().0); + if trait_signature + .flags + .contains(TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH) + { + return ProbeResult::NoMatch; + } + } + + // Some trait methods are excluded for boxed slices before 2024. + // (`boxed_slice.into_iter()` wants a slice iterator for compatibility.) + if self_ty.boxed_ty().is_some_and(Ty::is_slice) + && !self.ctx.edition.at_least_2024() + { + let trait_signature = self.db().trait_signature(poly_trait_ref.def_id().0); + if trait_signature + .flags + .contains(TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) + { + return ProbeResult::NoMatch; + } + } + + let trait_ref = self.infcx().instantiate_binder_with_fresh_vars( + BoundRegionConversionTime::FnCall, + poly_trait_ref, + ); + (xform_self_ty, xform_ret_ty) = self.xform_self_ty( + probe.item, + trait_ref.self_ty(), + trait_ref.args.as_slice(), + ); + match ocx.relate( + cause, + self.param_env(), + self.variance(), + self_ty, + xform_self_ty, + ) { + Ok(()) => {} + Err(err) => { + debug!("--> cannot relate self-types {:?}", err); + return ProbeResult::NoMatch; + } + } + let obligation = Obligation::new( + self.interner(), + cause.clone(), + self.param_env(), + Binder::dummy(trait_ref), + ); + + // We only need this hack to deal with fatal overflow in the old solver. + ocx.register_obligation(obligation); + + trait_predicate = Some(trait_ref.upcast(self.interner())); + } + ObjectCandidate(poly_trait_ref) | WhereClauseCandidate(poly_trait_ref) => { + let trait_ref = self.infcx().instantiate_binder_with_fresh_vars( + BoundRegionConversionTime::FnCall, + poly_trait_ref, + ); + (xform_self_ty, xform_ret_ty) = self.xform_self_ty( + probe.item, + trait_ref.self_ty(), + trait_ref.args.as_slice(), + ); + + if matches!(probe.kind, WhereClauseCandidate(_)) { + // `WhereClauseCandidate` requires that the self type is a param, + // because it has special behavior with candidate preference as an + // inherent pick. + match ocx.structurally_normalize_ty( + cause, + self.param_env(), + trait_ref.self_ty(), + ) { + Ok(ty) => { + if !matches!(ty.kind(), TyKind::Param(_)) { + debug!("--> not a param ty: {xform_self_ty:?}"); + return ProbeResult::NoMatch; + } + } + Err(errors) => { + debug!("--> cannot relate self-types {:?}", errors); + return ProbeResult::NoMatch; + } + } + } + + match ocx.relate( + cause, + self.param_env(), + self.variance(), + self_ty, + xform_self_ty, + ) { + Ok(()) => {} + Err(err) => { + debug!("--> cannot relate self-types {:?}", err); + return ProbeResult::NoMatch; + } + } + } + } + + // See <https://github.com/rust-lang/trait-system-refactor-initiative/issues/134>. + // + // In the new solver, check the well-formedness of the return type. + // This emulates, in a way, the predicates that fall out of + // normalizing the return type in the old solver. + // + // FIXME(-Znext-solver): We alternatively could check the predicates of + // the method itself hold, but we intentionally do not do this in the old + // solver b/c of cycles, and doing it in the new solver would be stronger. + // This should be fixed in the future, since it likely leads to much better + // method winnowing. + if let Some(xform_ret_ty) = xform_ret_ty { + ocx.register_obligation(Obligation::new( + self.interner(), + cause.clone(), + self.param_env(), + ClauseKind::WellFormed(xform_ret_ty.into()), + )); + } + + if !ocx.try_evaluate_obligations().is_empty() { + result = ProbeResult::NoMatch; + } + + if self.should_reject_candidate_due_to_opaque_treated_as_rigid(trait_predicate) { + result = ProbeResult::NoMatch; + } + + // FIXME: Need to leak-check here. + // if let Err(_) = self.leak_check(outer_universe, Some(snapshot)) { + // result = ProbeResult::NoMatch; + // } + + result + }) + } + + /// Trait candidates for not-yet-defined opaque types are a somewhat hacky. + /// + /// We want to only accept trait methods if they were hold even if the + /// opaque types were rigid. To handle this, we both check that for trait + /// candidates the goal were to hold even when treating opaques as rigid, + /// see [OpaqueTypesJank](rustc_trait_selection::solve::OpaqueTypesJank). + /// + /// We also check that all opaque types encountered as self types in the + /// autoderef chain don't get constrained when applying the candidate. + /// Importantly, this also handles calling methods taking `&self` on + /// `impl Trait` to reject the "by-self" candidate. + /// + /// This needs to happen at the end of `consider_probe` as we need to take + /// all the constraints from that into account. + #[instrument(level = "debug", skip(self), ret)] + fn should_reject_candidate_due_to_opaque_treated_as_rigid( + &self, + trait_predicate: Option<Predicate<'db>>, + ) -> bool { + // This function is what hacky and doesn't perfectly do what we want it to. + // It's not soundness critical and we should be able to freely improve this + // in the future. + // + // Some concrete edge cases include the fact that `goal_may_hold_opaque_types_jank` + // also fails if there are any constraints opaques which are never used as a self + // type. We also allow where-bounds which are currently ambiguous but end up + // constraining an opaque later on. + + // Check whether the trait candidate would not be applicable if the + // opaque type were rigid. + if let Some(predicate) = trait_predicate { + let goal = Goal { param_env: self.param_env(), predicate }; + if !self.infcx().goal_may_hold_opaque_types_jank(goal) { + return true; + } + } + + // Check whether any opaque types in the autoderef chain have been + // constrained. + for step in self.steps { + if step.self_ty_is_opaque { + debug!(?step.autoderefs, ?step.self_ty, "self_type_is_opaque"); + let constrained_opaque = self.infcx().probe(|_| { + // If we fail to instantiate the self type of this + // step, this part of the deref-chain is no longer + // reachable. In this case we don't care about opaque + // types there. + let Ok(ok) = self.infcx().instantiate_query_response_and_region_obligations( + &ObligationCause::new(), + self.param_env(), + self.orig_steps_var_values, + &step.self_ty, + ) else { + debug!("failed to instantiate self_ty"); + return false; + }; + let mut ocx = ObligationCtxt::new(self.infcx()); + let self_ty = ocx.register_infer_ok_obligations(ok); + if !ocx.try_evaluate_obligations().is_empty() { + debug!("failed to prove instantiate self_ty obligations"); + return false; + } + + !self.infcx().resolve_vars_if_possible(self_ty).is_ty_var() + }); + if constrained_opaque { + debug!("opaque type has been constrained"); + return true; + } + } + } + + false + } + + /// Sometimes we get in a situation where we have multiple probes that are all impls of the + /// same trait, but we don't know which impl to use. In this case, since in all cases the + /// external interface of the method can be determined from the trait, it's ok not to decide. + /// We can basically just collapse all of the probes for various impls into one where-clause + /// probe. This will result in a pending obligation so when more type-info is available we can + /// make the final decision. + /// + /// Example (`tests/ui/methods/method-two-trait-defer-resolution-1.rs`): + /// + /// ```ignore (illustrative) + /// trait Foo { ... } + /// impl Foo for Vec<i32> { ... } + /// impl Foo for Vec<usize> { ... } + /// ``` + /// + /// Now imagine the receiver is `Vec<_>`. It doesn't really matter at this time which impl we + /// use, so it's ok to just commit to "using the method from the trait Foo". + fn collapse_candidates_to_trait_pick( + &self, + self_ty: Ty<'db>, + probes: &[&Candidate<'db>], + ) -> Option<Pick<'db>> { + // Do all probes correspond to the same trait? + let ItemContainerId::TraitId(container) = probes[0].item.container(self.db()) else { + return None; + }; + for p in &probes[1..] { + let ItemContainerId::TraitId(p_container) = p.item.container(self.db()) else { + return None; + }; + if p_container != container { + return None; + } + } + + // FIXME: check the return type here somehow. + // If so, just use this trait and call it a day. + Some(Pick { + item: probes[0].item, + kind: TraitPick(container), + autoderefs: 0, + autoref_or_ptr_adjustment: None, + self_ty, + receiver_steps: None, + shadowed_candidates: vec![], + }) + } + + /// Much like `collapse_candidates_to_trait_pick`, this method allows us to collapse + /// multiple conflicting picks if there is one pick whose trait container is a subtrait + /// of the trait containers of all of the other picks. + /// + /// This implements RFC #3624. + fn collapse_candidates_to_subtrait_pick( + &self, + self_ty: Ty<'db>, + probes: &[&Candidate<'db>], + ) -> Option<Pick<'db>> { + let mut child_candidate = probes[0]; + let ItemContainerId::TraitId(mut child_trait) = child_candidate.item.container(self.db()) + else { + return None; + }; + let mut supertraits: FxHashSet<_> = + supertrait_def_ids(self.interner(), child_trait.into()).collect(); + + let mut remaining_candidates: Vec<_> = probes[1..].to_vec(); + while !remaining_candidates.is_empty() { + let mut made_progress = false; + let mut next_round = vec![]; + + for remaining_candidate in remaining_candidates { + let ItemContainerId::TraitId(remaining_trait) = + remaining_candidate.item.container(self.db()) + else { + return None; + }; + if supertraits.contains(&remaining_trait.into()) { + made_progress = true; + continue; + } + + // This pick is not a supertrait of the `child_pick`. + // Check if it's a subtrait of the `child_pick`, instead. + // If it is, then it must have been a subtrait of every + // other pick we've eliminated at this point. It will + // take over at this point. + let remaining_trait_supertraits: FxHashSet<_> = + supertrait_def_ids(self.interner(), remaining_trait.into()).collect(); + if remaining_trait_supertraits.contains(&child_trait.into()) { + child_candidate = remaining_candidate; + child_trait = remaining_trait; + supertraits = remaining_trait_supertraits; + made_progress = true; + continue; + } + + // `child_pick` is not a supertrait of this pick. + // Don't bail here, since we may be comparing two supertraits + // of a common subtrait. These two supertraits won't be related + // at all, but we will pick them up next round when we find their + // child as we continue iterating in this round. + next_round.push(remaining_candidate); + } + + if made_progress { + // If we've made progress, iterate again. + remaining_candidates = next_round; + } else { + // Otherwise, we must have at least two candidates which + // are not related to each other at all. + return None; + } + } + + Some(Pick { + item: child_candidate.item, + kind: TraitPick(child_trait), + autoderefs: 0, + autoref_or_ptr_adjustment: None, + self_ty, + shadowed_candidates: probes + .iter() + .map(|c| c.item) + .filter(|item| *item != child_candidate.item) + .collect(), + receiver_steps: None, + }) + } + + /////////////////////////////////////////////////////////////////////////// + // MISCELLANY + fn has_applicable_self(&self, item: CandidateId) -> bool { + // "Fast track" -- check for usage of sugar when in method call + // mode. + // + // In Path mode (i.e., resolving a value like `T::next`), consider any + // associated value (i.e., methods, constants). + match item { + CandidateId::FunctionId(id) if self.mode == Mode::MethodCall => { + self.db().function_signature(id).has_self_param() + } + _ => true, + } + // FIXME -- check for types that deref to `Self`, + // like `Rc<Self>` and so on. + // + // Note also that the current code will break if this type + // includes any of the type parameters defined on the method + // -- but this could be overcome. + } + + fn record_static_candidate(&mut self, source: CandidateSource) { + self.static_candidates.push(source); + } + + #[instrument(level = "debug", skip(self))] + fn xform_self_ty( + &self, + item: CandidateId, + impl_ty: Ty<'db>, + args: &[GenericArg<'db>], + ) -> (Ty<'db>, Option<Ty<'db>>) { + if let CandidateId::FunctionId(item) = item + && self.mode == Mode::MethodCall + { + let sig = self.xform_method_sig(item, args); + (sig.inputs().as_slice()[0], Some(sig.output())) + } else { + (impl_ty, None) + } + } + + #[instrument(level = "debug", skip(self))] + fn xform_method_sig(&self, method: FunctionId, args: &[GenericArg<'db>]) -> FnSig<'db> { + let fn_sig = self.db().callable_item_signature(method.into()); + debug!(?fn_sig); + + assert!(!args.has_escaping_bound_vars()); + + // It is possible for type parameters or early-bound lifetimes + // to appear in the signature of `self`. The generic parameters + // we are given do not include type/lifetime parameters for the + // method yet. So create fresh variables here for those too, + // if there are any. + let generics = self.db().generic_params(method.into()); + + let xform_fn_sig = if generics.is_empty() { + fn_sig.instantiate(self.interner(), args) + } else { + let args = GenericArgs::for_item( + self.interner(), + method.into(), + |param_index, param_id, _| { + let i = param_index as usize; + if i < args.len() { + args[i] + } else { + match param_id { + GenericParamId::LifetimeParamId(_) => { + // In general, during probe we erase regions. + Region::new_erased(self.interner()).into() + } + GenericParamId::TypeParamId(_) => self.infcx().next_ty_var().into(), + GenericParamId::ConstParamId(_) => self.infcx().next_const_var().into(), + } + } + }, + ); + fn_sig.instantiate(self.interner(), args) + }; + + self.interner().instantiate_bound_regions_with_erased(xform_fn_sig) + } + + fn with_impl_item(&mut self, def_id: ImplId, callback: impl FnMut(&mut Self, CandidateId)) { + Choice::with_impl_or_trait_item(self, &def_id.impl_items(self.db()).items, callback) + } + + fn with_trait_item(&mut self, def_id: TraitId, callback: impl FnMut(&mut Self, CandidateId)) { + Choice::with_impl_or_trait_item(self, &def_id.trait_items(self.db()).items, callback) + } +} + +/// Determine if the given associated item type is relevant in the current context. +fn is_relevant_kind_for_mode(mode: Mode, kind: AssocItemId) -> Option<CandidateId> { + Some(match (mode, kind) { + (Mode::MethodCall, AssocItemId::FunctionId(id)) => id.into(), + (Mode::Path, AssocItemId::ConstId(id)) => id.into(), + (Mode::Path, AssocItemId::FunctionId(id)) => id.into(), + _ => return None, + }) +} + +impl<'db> Candidate<'db> { + fn to_unadjusted_pick(&self, self_ty: Ty<'db>) -> Pick<'db> { + Pick { + item: self.item, + kind: match self.kind { + InherentImplCandidate { impl_def_id, .. } => InherentImplPick(impl_def_id), + ObjectCandidate(trait_ref) => ObjectPick(trait_ref.skip_binder().def_id.0), + TraitCandidate(trait_ref) => TraitPick(trait_ref.skip_binder().def_id.0), + WhereClauseCandidate(trait_ref) => { + // Only trait derived from where-clauses should + // appear here, so they should not contain any + // inference variables or other artifacts. This + // means they are safe to put into the + // `WhereClausePick`. + assert!( + !trait_ref.skip_binder().args.has_infer() + && !trait_ref.skip_binder().args.has_placeholders() + ); + + WhereClausePick(trait_ref) + } + }, + autoderefs: 0, + autoref_or_ptr_adjustment: None, + self_ty, + receiver_steps: match self.kind { + InherentImplCandidate { receiver_steps, .. } => Some(receiver_steps), + _ => None, + }, + shadowed_candidates: vec![], + } + } +}
diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs index 7aebe17..b5b691d 100644 --- a/crates/hir-ty/src/mir.rs +++ b/crates/hir-ty/src/mir.rs
@@ -12,7 +12,7 @@ use la_arena::{Arena, ArenaMap, Idx, RawIdx}; use rustc_ast_ir::Mutability; use rustc_hash::FxHashMap; -use rustc_type_ir::inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, SliceLike, Ty as _}; use smallvec::{SmallVec, smallvec}; use stdx::{impl_from, never}; @@ -22,7 +22,6 @@ db::{HirDatabase, InternedClosureId}, display::{DisplayTarget, HirDisplay}, infer::PointerCast, - lang_items::is_box, next_solver::{ Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, Ty, TyKind, infer::{InferCtxt, traits::ObligationCause}, @@ -185,7 +184,7 @@ match self { ProjectionElem::Deref => match base.kind() { TyKind::RawPtr(inner, _) | TyKind::Ref(_, inner, _) => inner, - TyKind::Adt(adt_def, subst) if is_box(db, adt_def.def_id().0) => subst.type_at(0), + TyKind::Adt(adt_def, subst) if adt_def.is_box() => subst.type_at(0), _ => { never!( "Overloaded deref on type {} is not a projection",
diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs index f242115..88acd49 100644 --- a/crates/hir-ty/src/mir/eval/tests.rs +++ b/crates/hir-ty/src/mir/eval/tests.rs
@@ -544,7 +544,7 @@ fn for_loop() { check_pass( r#" -//- minicore: iterator, add +//- minicore: iterator, add, builtin_impls fn should_not_reach() { _ // FIXME: replace this function with panic when that works } @@ -706,7 +706,7 @@ fn closure_state() { check_pass( r#" -//- minicore: fn, add, copy +//- minicore: fn, add, copy, builtin_impls fn should_not_reach() { _ // FIXME: replace this function with panic when that works }
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 1439c43..6704012 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs
@@ -33,6 +33,7 @@ infer::{CaptureKind, CapturedItem, TypeMismatch, cast::CastTy}, inhabitedness::is_ty_uninhabited_from, layout::LayoutError, + method_resolution::CandidateId, mir::{ AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, BorrowKind, CastKind, Either, Expr, FieldId, GenericArgs, Idx, InferenceResult, Local, LocalId, MemoryMap, MirBody, MirSpan, @@ -388,15 +389,15 @@ ); Ok(Some(current)) } - Adjust::Borrow(AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) => { - let Some((p, current)) = - self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? - else { - return Ok(None); - }; - let bk = BorrowKind::from_rustc(*m); - self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into()); - Ok(Some(current)) + Adjust::Borrow(AutoBorrow::Ref(m)) => self.lower_expr_to_place_with_borrow_adjust( + expr_id, + place, + current, + rest, + (*m).into(), + ), + Adjust::Borrow(AutoBorrow::RawPtr(m)) => { + self.lower_expr_to_place_with_borrow_adjust(expr_id, place, current, rest, *m) } Adjust::Pointer(cast) => { let Some((p, current)) = @@ -421,6 +422,24 @@ } } + fn lower_expr_to_place_with_borrow_adjust( + &mut self, + expr_id: ExprId, + place: Place<'db>, + current: BasicBlockId<'db>, + rest: &[Adjustment<'db>], + m: Mutability, + ) -> Result<'db, Option<BasicBlockId<'db>>> { + let Some((p, current)) = + self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? + else { + return Ok(None); + }; + let bk = BorrowKind::from_rustc(m); + self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into()); + Ok(Some(current)) + } + fn lower_expr_to_place( &mut self, expr_id: ExprId, @@ -460,18 +479,14 @@ let pr = if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) { match assoc { - hir_def::AssocItemId::ConstId(c) => { + CandidateId::ConstId(c) => { self.lower_const(c.into(), current, place, subst, expr_id.into())?; return Ok(Some(current)); } - hir_def::AssocItemId::FunctionId(_) => { + CandidateId::FunctionId(_) => { // FnDefs are zero sized, no action is needed. return Ok(Some(current)); } - hir_def::AssocItemId::TypeAliasId(_) => { - // FIXME: If it is unreachable, use proper error instead of `not_supported`. - not_supported!("associated functions and types") - } } } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) { match variant {
diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs index 52f1412..bceafae 100644 --- a/crates/hir-ty/src/mir/lower/as_place.rs +++ b/crates/hir-ty/src/mir/lower/as_place.rs
@@ -193,7 +193,7 @@ return self.lower_overloaded_deref( current, p, - self.expr_ty_after_adjustments(*expr), + self.expr_ty_without_adjust(*expr), self.expr_ty_without_adjust(expr_id), expr_id.into(), 'b: {
diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs index b1b86ab..c3a4814 100644 --- a/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -1,9 +1,8 @@ //! MIR lowering for patterns -use hir_def::{AssocItemId, hir::ExprId, signatures::VariantFields}; +use hir_def::{hir::ExprId, signatures::VariantFields}; use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _}; -use crate::next_solver::GenericArgs; use crate::{ BindingMode, mir::{ @@ -16,6 +15,7 @@ }, }, }; +use crate::{method_resolution::CandidateId, next_solver::GenericArgs}; macro_rules! not_supported { ($x: expr) => { @@ -207,7 +207,7 @@ mode, )? } - Pat::Range { start, end } => { + Pat::Range { start, end, range_type: _ } => { let mut add_check = |l: &ExprId, binop| -> Result<'db, ()> { let lv = self.lower_literal_or_const_to_operand(self.infer[pattern], l)?; let else_target = *current_else.get_or_insert_with(|| self.new_basic_block()); @@ -393,7 +393,7 @@ } let (c, subst) = 'b: { if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern) - && let AssocItemId::ConstId(c) = x.0 + && let CandidateId::ConstId(c) = x.0 { break 'b (c, x.1); }
diff --git a/crates/hir-ty/src/next_solver/def_id.rs b/crates/hir-ty/src/next_solver/def_id.rs index 77f2106..2c30922 100644 --- a/crates/hir-ty/src/next_solver/def_id.rs +++ b/crates/hir-ty/src/next_solver/def_id.rs
@@ -1,8 +1,9 @@ //! Definition of `SolverDefId` use hir_def::{ - AdtId, CallableDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, - GeneralConstId, GenericDefId, ImplId, StaticId, StructId, TraitId, TypeAliasId, UnionId, + AdtId, AttrDefId, CallableDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, + GeneralConstId, GenericDefId, HasModule, ImplId, ModuleId, StaticId, StructId, TraitId, + TypeAliasId, UnionId, db::DefDatabase, }; use rustc_type_ir::inherent; use stdx::impl_from; @@ -154,6 +155,28 @@ } } +impl TryFrom<SolverDefId> for AttrDefId { + type Error = (); + #[inline] + fn try_from(value: SolverDefId) -> Result<Self, Self::Error> { + match value { + SolverDefId::AdtId(it) => Ok(it.into()), + SolverDefId::ConstId(it) => Ok(it.into()), + SolverDefId::FunctionId(it) => Ok(it.into()), + SolverDefId::ImplId(it) => Ok(it.into()), + SolverDefId::StaticId(it) => Ok(it.into()), + SolverDefId::TraitId(it) => Ok(it.into()), + SolverDefId::TypeAliasId(it) => Ok(it.into()), + SolverDefId::EnumVariantId(it) => Ok(it.into()), + SolverDefId::Ctor(Ctor::Struct(it)) => Ok(it.into()), + SolverDefId::Ctor(Ctor::Enum(it)) => Ok(it.into()), + SolverDefId::InternedClosureId(_) + | SolverDefId::InternedCoroutineId(_) + | SolverDefId::InternedOpaqueTyId(_) => Err(()), + } + } +} + impl TryFrom<SolverDefId> for DefWithBodyId { type Error = (); @@ -218,6 +241,28 @@ } } +impl HasModule for SolverDefId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { + match *self { + SolverDefId::AdtId(id) => id.module(db), + SolverDefId::ConstId(id) => id.module(db), + SolverDefId::FunctionId(id) => id.module(db), + SolverDefId::ImplId(id) => id.module(db), + SolverDefId::StaticId(id) => id.module(db), + SolverDefId::TraitId(id) => id.module(db), + SolverDefId::TypeAliasId(id) => id.module(db), + SolverDefId::InternedClosureId(id) => id.loc(db).0.module(db), + SolverDefId::InternedCoroutineId(id) => id.loc(db).0.module(db), + SolverDefId::InternedOpaqueTyId(id) => match id.loc(db) { + crate::ImplTraitId::ReturnTypeImplTrait(owner, _) => owner.module(db), + crate::ImplTraitId::TypeAliasImplTrait(owner, _) => owner.module(db), + }, + SolverDefId::Ctor(Ctor::Enum(id)) | SolverDefId::EnumVariantId(id) => id.module(db), + SolverDefId::Ctor(Ctor::Struct(id)) => id.module(db), + } + } +} + impl<'db> inherent::DefId<DbInterner<'db>> for SolverDefId { fn as_local(self) -> Option<SolverDefId> { Some(self)
diff --git a/crates/hir-ty/src/next_solver/fold.rs b/crates/hir-ty/src/next_solver/fold.rs index f776b6e..7836419 100644 --- a/crates/hir-ty/src/next_solver/fold.rs +++ b/crates/hir-ty/src/next_solver/fold.rs
@@ -5,7 +5,7 @@ TypeVisitableExt, inherent::IntoKind, }; -use crate::next_solver::BoundConst; +use crate::next_solver::{BoundConst, FxIndexMap}; use super::{ Binder, BoundRegion, BoundTy, Const, ConstKind, DbInterner, Predicate, Region, Ty, TyKind, @@ -158,3 +158,65 @@ t.fold_with(&mut Folder { interner, callback }) } + +impl<'db> DbInterner<'db> { + /// Replaces all regions bound by the given `Binder` with the + /// results returned by the closure; the closure is expected to + /// return a free region (relative to this binder), and hence the + /// binder is removed in the return type. The closure is invoked + /// once for each unique `BoundRegionKind`; multiple references to the + /// same `BoundRegionKind` will reuse the previous result. A map is + /// returned at the end with each bound region and the free region + /// that replaced it. + /// + /// # Panics + /// + /// This method only replaces late bound regions. Any types or + /// constants bound by `value` will cause an ICE. + pub fn instantiate_bound_regions<T, F>( + self, + value: Binder<'db, T>, + mut fld_r: F, + ) -> (T, FxIndexMap<BoundRegion, Region<'db>>) + where + F: FnMut(BoundRegion) -> Region<'db>, + T: TypeFoldable<DbInterner<'db>>, + { + let mut region_map = FxIndexMap::default(); + let real_fld_r = |br: BoundRegion| *region_map.entry(br).or_insert_with(|| fld_r(br)); + let value = self.instantiate_bound_regions_uncached(value, real_fld_r); + (value, region_map) + } + + pub fn instantiate_bound_regions_uncached<T, F>( + self, + value: Binder<'db, T>, + mut replace_regions: F, + ) -> T + where + F: FnMut(BoundRegion) -> Region<'db>, + T: TypeFoldable<DbInterner<'db>>, + { + let value = value.skip_binder(); + if !value.has_escaping_bound_vars() { + value + } else { + let delegate = FnMutDelegate { + regions: &mut replace_regions, + types: &mut |b| panic!("unexpected bound ty in binder: {b:?}"), + consts: &mut |b| panic!("unexpected bound ct in binder: {b:?}"), + }; + let mut replacer = BoundVarReplacer::new(self, delegate); + value.fold_with(&mut replacer) + } + } + + /// Replaces any late-bound regions bound in `value` with `'erased`. Useful in codegen but also + /// method lookup and a few other places where precise region relationships are not required. + pub fn instantiate_bound_regions_with_erased<T>(self, value: Binder<'db, T>) -> T + where + T: TypeFoldable<DbInterner<'db>>, + { + self.instantiate_bound_regions(value, |_| Region::new_erased(self)).0 + } +}
diff --git a/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs b/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs index 7995545..1029a7f 100644 --- a/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs +++ b/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs
@@ -10,8 +10,8 @@ use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar}; use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _}; use rustc_type_ir::{ - BoundVar, BoundVarIndexKind, CanonicalQueryInput, DebruijnIndex, Flags, InferConst, RegionKind, - TyVid, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex, + BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags, + TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex, }; use smallvec::SmallVec; use tracing::debug; @@ -19,7 +19,7 @@ use crate::next_solver::infer::InferCtxt; use crate::next_solver::{ Binder, Canonical, CanonicalVarKind, CanonicalVars, Const, ConstKind, DbInterner, GenericArg, - ParamEnvAnd, Placeholder, Region, Ty, TyKind, + Placeholder, Region, Ty, TyKind, }; /// When we canonicalize a value to form a query, we wind up replacing @@ -66,33 +66,19 @@ /// [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html#canonicalizing-the-query pub fn canonicalize_query<V>( &self, - value: ParamEnvAnd<'db, V>, + value: V, query_state: &mut OriginalQueryValues<'db>, - ) -> CanonicalQueryInput<DbInterner<'db>, ParamEnvAnd<'db, V>> + ) -> Canonical<'db, V> where V: TypeFoldable<DbInterner<'db>>, { - let (param_env, value) = value.into_parts(); - // FIXME(#118965): We don't canonicalize the static lifetimes that appear in the - // `param_env` because they are treated differently by trait selection. - let canonical_param_env = Canonicalizer::canonicalize( - param_env, - self, - self.interner, - &CanonicalizeFreeRegionsOtherThanStatic, - query_state, - ); - - let canonical = Canonicalizer::canonicalize_with_base( - canonical_param_env, + Canonicalizer::canonicalize( value, self, self.interner, &CanonicalizeAllFreeRegions, query_state, ) - .unchecked_map(|(param_env, value)| ParamEnvAnd { param_env, value }); - CanonicalQueryInput { canonical, typing_mode: self.typing_mode() } } /// Canonicalizes a query *response* `V`. When we canonicalize a @@ -285,26 +271,6 @@ } } -struct CanonicalizeFreeRegionsOtherThanStatic; - -impl CanonicalizeMode for CanonicalizeFreeRegionsOtherThanStatic { - fn canonicalize_free_region<'db>( - &self, - canonicalizer: &mut Canonicalizer<'_, 'db>, - r: Region<'db>, - ) -> Region<'db> { - if r.is_static() { r } else { canonicalizer.canonical_var_for_region_in_root_universe(r) } - } - - fn any(&self) -> bool { - true - } - - fn preserve_universes(&self) -> bool { - false - } -} - struct Canonicalizer<'cx, 'db> { /// Set to `None` to disable the resolution of inference variables. infcx: &'cx InferCtxt<'db>,
diff --git a/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs b/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs index 6360291..13c620c 100644 --- a/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs +++ b/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
@@ -1,21 +1,31 @@ //! This module contains code to instantiate new values into a -//! `Canonical<'tcx, T>`. +//! `Canonical<'db, T>`. //! //! For an overview of what canonicalization is and how it fits into //! rustc, check out the [chapter in the rustc dev guide][c]. //! //! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html +use std::{fmt::Debug, iter}; + use crate::next_solver::{ - BoundConst, BoundRegion, BoundTy, Canonical, CanonicalVarValues, Clauses, Const, ConstKind, - DbInterner, GenericArg, Predicate, Region, RegionKind, Ty, TyKind, fold::FnMutDelegate, + BoundConst, BoundRegion, BoundTy, Canonical, CanonicalVarKind, CanonicalVarValues, Clauses, + Const, ConstKind, DbInterner, GenericArg, ParamEnv, Predicate, Region, RegionKind, Ty, TyKind, + fold::FnMutDelegate, + infer::{ + InferCtxt, InferOk, InferResult, + canonical::{QueryRegionConstraints, QueryResponse, canonicalizer::OriginalQueryValues}, + traits::{ObligationCause, PredicateObligations}, + }, }; use rustc_hash::FxHashMap; +use rustc_index::{Idx as _, IndexVec}; use rustc_type_ir::{ - BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, - TypeVisitableExt, + BoundVar, BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder, + TypeSuperFoldable, TypeVisitableExt, UniverseIndex, inherent::{GenericArg as _, IntoKind, SliceLike}, }; +use tracing::{debug, instrument}; pub trait CanonicalExt<'db, V> { fn instantiate(&self, tcx: DbInterner<'db>, var_values: &CanonicalVarValues<'db>) -> V @@ -169,3 +179,331 @@ c.super_fold_with(self) } } + +impl<'db> InferCtxt<'db> { + /// A version of `make_canonicalized_query_response` that does + /// not pack in obligations, for contexts that want to drop + /// pending obligations instead of treating them as an ambiguity (e.g. + /// typeck "probing" contexts). + /// + /// If you DO want to keep track of pending obligations (which + /// include all region obligations, so this includes all cases + /// that care about regions) with this function, you have to + /// do it yourself, by e.g., having them be a part of the answer. + pub fn make_query_response_ignoring_pending_obligations<T>( + &self, + inference_vars: CanonicalVarValues<'db>, + answer: T, + ) -> Canonical<'db, QueryResponse<'db, T>> + where + T: TypeFoldable<DbInterner<'db>>, + { + // While we ignore region constraints and pending obligations, + // we do return constrained opaque types to avoid unconstrained + // inference variables in the response. This is important as we want + // to check that opaques in deref steps stay unconstrained. + // + // This doesn't handle the more general case for non-opaques as + // ambiguous `Projection` obligations have same the issue. + let opaque_types = self + .inner + .borrow_mut() + .opaque_type_storage + .iter_opaque_types() + .map(|(k, v)| (k, v.ty)) + .collect(); + + self.canonicalize_response(QueryResponse { + var_values: inference_vars, + region_constraints: QueryRegionConstraints::default(), + opaque_types, + value: answer, + }) + } + + /// Given the (canonicalized) result to a canonical query, + /// instantiates the result so it can be used, plugging in the + /// values from the canonical query. (Note that the result may + /// have been ambiguous; you should check the certainty level of + /// the query before applying this function.) + /// + /// To get a good understanding of what is happening here, check + /// out the [chapter in the rustc dev guide][c]. + /// + /// [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html#processing-the-canonicalized-query-result + pub fn instantiate_query_response_and_region_obligations<R>( + &self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + original_values: &OriginalQueryValues<'db>, + query_response: &Canonical<'db, QueryResponse<'db, R>>, + ) -> InferResult<'db, R> + where + R: TypeFoldable<DbInterner<'db>>, + { + let InferOk { value: result_args, obligations } = + self.query_response_instantiation(cause, param_env, original_values, query_response)?; + + for predicate in &query_response.value.region_constraints.outlives { + let predicate = instantiate_value(self.interner, &result_args, *predicate); + self.register_outlives_constraint(predicate); + } + + for assumption in &query_response.value.region_constraints.assumptions { + let assumption = instantiate_value(self.interner, &result_args, *assumption); + self.register_region_assumption(assumption); + } + + let user_result: R = + query_response + .instantiate_projected(self.interner, &result_args, |q_r| q_r.value.clone()); + + Ok(InferOk { value: user_result, obligations }) + } + + /// Given the original values and the (canonicalized) result from + /// computing a query, returns an instantiation that can be applied + /// to the query result to convert the result back into the + /// original namespace. + /// + /// The instantiation also comes accompanied with subobligations + /// that arose from unification; these might occur if (for + /// example) we are doing lazy normalization and the value + /// assigned to a type variable is unified with an unnormalized + /// projection. + fn query_response_instantiation<R>( + &self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + original_values: &OriginalQueryValues<'db>, + query_response: &Canonical<'db, QueryResponse<'db, R>>, + ) -> InferResult<'db, CanonicalVarValues<'db>> + where + R: Debug + TypeFoldable<DbInterner<'db>>, + { + debug!( + "query_response_instantiation(original_values={:#?}, query_response={:#?})", + original_values, query_response, + ); + + let mut value = self.query_response_instantiation_guess( + cause, + param_env, + original_values, + query_response, + )?; + + value.obligations.extend( + self.unify_query_response_instantiation_guess( + cause, + param_env, + original_values, + &value.value, + query_response, + )? + .into_obligations(), + ); + + Ok(value) + } + + /// Given the original values and the (canonicalized) result from + /// computing a query, returns a **guess** at an instantiation that + /// can be applied to the query result to convert the result back + /// into the original namespace. This is called a **guess** + /// because it uses a quick heuristic to find the values for each + /// canonical variable; if that quick heuristic fails, then we + /// will instantiate fresh inference variables for each canonical + /// variable instead. Therefore, the result of this method must be + /// properly unified + #[instrument(level = "debug", skip(self, param_env))] + fn query_response_instantiation_guess<R>( + &self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + original_values: &OriginalQueryValues<'db>, + query_response: &Canonical<'db, QueryResponse<'db, R>>, + ) -> InferResult<'db, CanonicalVarValues<'db>> + where + R: Debug + TypeFoldable<DbInterner<'db>>, + { + // For each new universe created in the query result that did + // not appear in the original query, create a local + // superuniverse. + let mut universe_map = original_values.universe_map.clone(); + let num_universes_in_query = original_values.universe_map.len(); + let num_universes_in_response = query_response.max_universe.as_usize() + 1; + for _ in num_universes_in_query..num_universes_in_response { + universe_map.push(self.create_next_universe()); + } + assert!(!universe_map.is_empty()); // always have the root universe + assert_eq!(universe_map[UniverseIndex::ROOT.as_usize()], UniverseIndex::ROOT); + + // Every canonical query result includes values for each of + // the inputs to the query. Therefore, we begin by unifying + // these values with the original inputs that were + // canonicalized. + let result_values = &query_response.value.var_values; + assert_eq!(original_values.var_values.len(), result_values.len()); + + // Quickly try to find initial values for the canonical + // variables in the result in terms of the query. We do this + // by iterating down the values that the query gave to each of + // the canonical inputs. If we find that one of those values + // is directly equal to one of the canonical variables in the + // result, then we can type the corresponding value from the + // input. See the example above. + let mut opt_values: IndexVec<BoundVar, Option<GenericArg<'db>>> = + IndexVec::from_elem_n(None, query_response.variables.len()); + + for (original_value, result_value) in iter::zip(&original_values.var_values, result_values) + { + match result_value.kind() { + GenericArgKind::Type(result_value) => { + // We disable the instantiation guess for inference variables + // and only use it for placeholders. We need to handle the + // `sub_root` of type inference variables which would make this + // more involved. They are also a lot rarer than region variables. + if let TyKind::Bound(index_kind, b) = result_value.kind() + && !matches!( + query_response.variables.as_slice()[b.var.as_usize()], + CanonicalVarKind::Ty { .. } + ) + { + // We only allow a `Canonical` index in generic parameters. + assert!(matches!(index_kind, BoundVarIndexKind::Canonical)); + opt_values[b.var] = Some(*original_value); + } + } + GenericArgKind::Lifetime(result_value) => { + if let RegionKind::ReBound(index_kind, b) = result_value.kind() { + // We only allow a `Canonical` index in generic parameters. + assert!(matches!(index_kind, BoundVarIndexKind::Canonical)); + opt_values[b.var] = Some(*original_value); + } + } + GenericArgKind::Const(result_value) => { + if let ConstKind::Bound(index_kind, b) = result_value.kind() { + // We only allow a `Canonical` index in generic parameters. + assert!(matches!(index_kind, BoundVarIndexKind::Canonical)); + opt_values[b.var] = Some(*original_value); + } + } + } + } + + // Create result arguments: if we found a value for a + // given variable in the loop above, use that. Otherwise, use + // a fresh inference variable. + let interner = self.interner; + let variables = query_response.variables; + let var_values = + CanonicalVarValues::instantiate(interner, variables, |var_values, kind| { + if kind.universe() != UniverseIndex::ROOT { + // A variable from inside a binder of the query. While ideally these shouldn't + // exist at all, we have to deal with them for now. + self.instantiate_canonical_var(kind, var_values, |u| universe_map[u.as_usize()]) + } else if kind.is_existential() { + match opt_values[BoundVar::new(var_values.len())] { + Some(k) => k, + None => self.instantiate_canonical_var(kind, var_values, |u| { + universe_map[u.as_usize()] + }), + } + } else { + // For placeholders which were already part of the input, we simply map this + // universal bound variable back the placeholder of the input. + opt_values[BoundVar::new(var_values.len())] + .expect("expected placeholder to be unified with itself during response") + } + }); + + let mut obligations = PredicateObligations::new(); + + // Carry all newly resolved opaque types to the caller's scope + for &(a, b) in &query_response.value.opaque_types { + let a = instantiate_value(self.interner, &var_values, a); + let b = instantiate_value(self.interner, &var_values, b); + debug!(?a, ?b, "constrain opaque type"); + // We use equate here instead of, for example, just registering the + // opaque type's hidden value directly, because the hidden type may have been an inference + // variable that got constrained to the opaque type itself. In that case we want to equate + // the generic args of the opaque with the generic params of its hidden type version. + obligations.extend( + self.at(cause, param_env) + .eq(Ty::new_opaque(self.interner, a.def_id, a.args), b)? + .obligations, + ); + } + + Ok(InferOk { value: var_values, obligations }) + } + + /// Given a "guess" at the values for the canonical variables in + /// the input, try to unify with the *actual* values found in the + /// query result. Often, but not always, this is a no-op, because + /// we already found the mapping in the "guessing" step. + /// + /// See also: [`Self::query_response_instantiation_guess`] + fn unify_query_response_instantiation_guess<R>( + &self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + original_values: &OriginalQueryValues<'db>, + result_args: &CanonicalVarValues<'db>, + query_response: &Canonical<'db, QueryResponse<'db, R>>, + ) -> InferResult<'db, ()> + where + R: Debug + TypeFoldable<DbInterner<'db>>, + { + // A closure that yields the result value for the given + // canonical variable; this is taken from + // `query_response.var_values` after applying the instantiation + // by `result_args`. + let instantiated_query_response = |index: BoundVar| -> GenericArg<'db> { + query_response + .instantiate_projected(self.interner, result_args, |v| v.var_values[index]) + }; + + // Unify the original value for each variable with the value + // taken from `query_response` (after applying `result_args`). + self.unify_canonical_vars(cause, param_env, original_values, instantiated_query_response) + } + + /// Given two sets of values for the same set of canonical variables, unify them. + /// The second set is produced lazily by supplying indices from the first set. + fn unify_canonical_vars( + &self, + cause: &ObligationCause, + param_env: ParamEnv<'db>, + variables1: &OriginalQueryValues<'db>, + variables2: impl Fn(BoundVar) -> GenericArg<'db>, + ) -> InferResult<'db, ()> { + let mut obligations = PredicateObligations::new(); + for (index, value1) in variables1.var_values.iter().enumerate() { + let value2 = variables2(BoundVar::new(index)); + + match (value1.kind(), value2.kind()) { + (GenericArgKind::Type(v1), GenericArgKind::Type(v2)) => { + obligations.extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations()); + } + (GenericArgKind::Lifetime(re1), GenericArgKind::Lifetime(re2)) + if re1.is_erased() && re2.is_erased() => + { + // no action needed + } + (GenericArgKind::Lifetime(v1), GenericArgKind::Lifetime(v2)) => { + self.inner.borrow_mut().unwrap_region_constraints().make_eqregion(v1, v2); + } + (GenericArgKind::Const(v1), GenericArgKind::Const(v2)) => { + let ok = self.at(cause, param_env).eq(v1, v2)?; + obligations.extend(ok.into_obligations()); + } + _ => { + panic!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,); + } + } + } + Ok(InferOk { value: (), obligations }) + } +}
diff --git a/crates/hir-ty/src/next_solver/infer/canonical/mod.rs b/crates/hir-ty/src/next_solver/infer/canonical/mod.rs index b3bd0a4..a0420a5 100644 --- a/crates/hir-ty/src/next_solver/infer/canonical/mod.rs +++ b/crates/hir-ty/src/next_solver/infer/canonical/mod.rs
@@ -22,10 +22,12 @@ //! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html use crate::next_solver::{ - Canonical, CanonicalVarValues, Const, DbInterner, GenericArg, PlaceholderConst, - PlaceholderRegion, PlaceholderTy, Region, Ty, TyKind, infer::InferCtxt, + ArgOutlivesPredicate, Canonical, CanonicalVarValues, Const, DbInterner, GenericArg, + OpaqueTypeKey, PlaceholderConst, PlaceholderRegion, PlaceholderTy, Region, Ty, TyKind, + infer::InferCtxt, }; use instantiate::CanonicalExt; +use macros::{TypeFoldable, TypeVisitable}; use rustc_index::IndexVec; use rustc_type_ir::inherent::IntoKind; use rustc_type_ir::{CanonicalVarKind, InferTy, TypeFoldable, UniverseIndex, inherent::Ty as _}; @@ -135,3 +137,22 @@ } } } + +/// After we execute a query with a canonicalized key, we get back a +/// `Canonical<QueryResponse<..>>`. You can use +/// `instantiate_query_result` to access the data in this result. +#[derive(Clone, Debug, TypeVisitable, TypeFoldable)] +pub struct QueryResponse<'db, R> { + pub var_values: CanonicalVarValues<'db>, + pub region_constraints: QueryRegionConstraints<'db>, + pub opaque_types: Vec<(OpaqueTypeKey<'db>, Ty<'db>)>, + pub value: R, +} + +#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)] +pub struct QueryRegionConstraints<'db> { + pub outlives: Vec<QueryOutlivesConstraint<'db>>, + pub assumptions: Vec<ArgOutlivesPredicate<'db>>, +} + +pub type QueryOutlivesConstraint<'tcx> = ArgOutlivesPredicate<'tcx>;
diff --git a/crates/hir-ty/src/next_solver/infer/mod.rs b/crates/hir-ty/src/next_solver/infer/mod.rs index 7b8f52b..fcce04f 100644 --- a/crates/hir-ty/src/next_solver/infer/mod.rs +++ b/crates/hir-ty/src/next_solver/infer/mod.rs
@@ -29,9 +29,10 @@ use unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey}; use crate::next_solver::{ - BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, SolverContext, + ArgOutlivesPredicate, BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, Predicate, + SolverContext, fold::BoundVarReplacerDelegate, - infer::{select::EvaluationResult, traits::PredicateObligation}, + infer::{at::ToTrace, select::EvaluationResult, traits::PredicateObligation}, obligation_ctxt::ObligationCtxt, }; @@ -47,6 +48,7 @@ pub mod canonical; mod context; pub mod opaque_types; +mod outlives; pub mod region_constraints; pub mod relate; pub mod resolve; @@ -141,7 +143,14 @@ /// for each body-id in this map, which will process the /// obligations within. This is expected to be done 'late enough' /// that all type inference variables have been bound and so forth. - pub(crate) region_obligations: Vec<RegionObligation<'db>>, + pub(crate) region_obligations: Vec<TypeOutlivesConstraint<'db>>, + + /// The outlives bounds that we assume must hold about placeholders that + /// come from instantiating the binder of coroutine-witnesses. These bounds + /// are deduced from the well-formedness of the witness's types, and are + /// necessary because of the way we anonymize the regions in a coroutine, + /// which may cause types to no longer be considered well-formed. + region_assumptions: Vec<ArgOutlivesPredicate<'db>>, /// Caches for opaque type inference. pub(crate) opaque_type_storage: OpaqueTypeStorage<'db>, @@ -158,12 +167,13 @@ float_unification_storage: Default::default(), region_constraint_storage: Some(Default::default()), region_obligations: vec![], + region_assumptions: Default::default(), opaque_type_storage: Default::default(), } } #[inline] - pub fn region_obligations(&self) -> &[RegionObligation<'db>] { + pub fn region_obligations(&self) -> &[TypeOutlivesConstraint<'db>] { &self.region_obligations } @@ -318,7 +328,7 @@ /// See the `region_obligations` field for more information. #[derive(Clone, Debug)] -pub struct RegionObligation<'db> { +pub struct TypeOutlivesConstraint<'db> { pub sub_region: Region<'db>, pub sup_type: Ty<'db>, } @@ -387,6 +397,12 @@ self.typing_mode } + /// Evaluates whether the predicate can be satisfied (by any means) + /// in the given `ParamEnv`. + pub fn predicate_may_hold(&self, obligation: &PredicateObligation<'db>) -> bool { + self.evaluate_obligation(obligation).may_apply() + } + /// See the comment on [OpaqueTypesJank](crate::solve::OpaqueTypesJank) /// for more details. pub fn predicate_may_hold_opaque_types_jank( @@ -507,6 +523,22 @@ }) } + pub fn can_eq<T: ToTrace<'db>>(&self, param_env: ParamEnv<'db>, a: T, b: T) -> bool { + self.probe(|_| { + let mut ocx = ObligationCtxt::new(self); + let Ok(()) = ocx.eq(&ObligationCause::dummy(), param_env, a, b) else { + return false; + }; + ocx.try_evaluate_obligations().is_empty() + }) + } + + /// See the comment on [OpaqueTypesJank](crate::solve::OpaqueTypesJank) + /// for more details. + pub fn goal_may_hold_opaque_types_jank(&self, goal: Goal<'db, Predicate<'db>>) -> bool { + <&SolverContext<'db>>::from(self).root_goal_may_hold_opaque_types_jank(goal) + } + pub fn type_is_copy_modulo_regions(&self, param_env: ParamEnv<'db>, ty: Ty<'db>) -> bool { let ty = self.resolve_vars_if_possible(ty); @@ -632,6 +664,14 @@ self.inner.borrow_mut().type_variables().num_vars() } + pub fn next_var_for_param(&self, id: GenericParamId) -> GenericArg<'db> { + match id { + GenericParamId::TypeParamId(_) => self.next_ty_var().into(), + GenericParamId::ConstParamId(_) => self.next_const_var().into(), + GenericParamId::LifetimeParamId(_) => self.next_region_var().into(), + } + } + pub fn next_ty_var(&self) -> Ty<'db> { self.next_ty_var_with_origin(TypeVariableOrigin { param_def_id: None }) } @@ -846,6 +886,22 @@ self.inner.borrow_mut().opaque_type_storage.iter_opaque_types().collect() } + pub fn has_opaques_with_sub_unified_hidden_type(&self, ty_vid: TyVid) -> bool { + let ty_sub_vid = self.sub_unification_table_root_var(ty_vid); + let inner = &mut *self.inner.borrow_mut(); + let mut type_variables = inner.type_variable_storage.with_log(&mut inner.undo_log); + inner.opaque_type_storage.iter_opaque_types().any(|(_, hidden_ty)| { + if let TyKind::Infer(InferTy::TyVar(hidden_vid)) = hidden_ty.ty.kind() { + let opaque_sub_vid = type_variables.sub_unification_table_root_var(hidden_vid); + if opaque_sub_vid == ty_sub_vid { + return true; + } + } + + false + }) + } + #[inline(always)] pub fn can_define_opaque_ty(&self, id: impl Into<SolverDefId>) -> bool { match self.typing_mode_unchecked() {
diff --git a/crates/hir-ty/src/next_solver/infer/outlives/mod.rs b/crates/hir-ty/src/next_solver/infer/outlives/mod.rs new file mode 100644 index 0000000..321c4b8 --- /dev/null +++ b/crates/hir-ty/src/next_solver/infer/outlives/mod.rs
@@ -0,0 +1 @@ +mod obligations;
diff --git a/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs b/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs new file mode 100644 index 0000000..befb200 --- /dev/null +++ b/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs
@@ -0,0 +1,68 @@ +use ena::undo_log::UndoLogs; +use rustc_type_ir::{OutlivesPredicate, TypeVisitableExt}; +use tracing::{debug, instrument}; + +use crate::next_solver::{ + ArgOutlivesPredicate, GenericArg, Region, RegionOutlivesPredicate, Ty, + infer::{InferCtxt, TypeOutlivesConstraint, snapshot::undo_log::UndoLog}, +}; + +impl<'db> InferCtxt<'db> { + pub fn register_outlives_constraint( + &self, + OutlivesPredicate(arg, r2): ArgOutlivesPredicate<'db>, + ) { + match arg { + GenericArg::Lifetime(r1) => { + self.register_region_outlives_constraint(OutlivesPredicate(r1, r2)); + } + GenericArg::Ty(ty1) => { + self.register_type_outlives_constraint(ty1, r2); + } + GenericArg::Const(_) => unreachable!(), + } + } + + pub fn register_region_outlives_constraint( + &self, + OutlivesPredicate(r_a, r_b): RegionOutlivesPredicate<'db>, + ) { + // `'a: 'b` ==> `'b <= 'a` + self.sub_regions(r_b, r_a); + } + + /// Registers that the given region obligation must be resolved + /// from within the scope of `body_id`. These regions are enqueued + /// and later processed by regionck, when full type information is + /// available (see `region_obligations` field for more + /// information). + #[instrument(level = "debug", skip(self))] + pub fn register_type_outlives_constraint_inner(&self, obligation: TypeOutlivesConstraint<'db>) { + let mut inner = self.inner.borrow_mut(); + inner.undo_log.push(UndoLog::PushTypeOutlivesConstraint); + inner.region_obligations.push(obligation); + } + + pub fn register_type_outlives_constraint(&self, sup_type: Ty<'db>, sub_region: Region<'db>) { + // `is_global` means the type has no params, infer, placeholder, or non-`'static` + // free regions. If the type has none of these things, then we can skip registering + // this outlives obligation since it has no components which affect lifetime + // checking in an interesting way. + if sup_type.is_global() { + return; + } + + debug!(?sup_type, ?sub_region); + + self.register_type_outlives_constraint_inner(TypeOutlivesConstraint { + sup_type, + sub_region, + }); + } + + pub fn register_region_assumption(&self, assumption: ArgOutlivesPredicate<'db>) { + let mut inner = self.inner.borrow_mut(); + inner.undo_log.push(UndoLog::PushRegionAssumption); + inner.region_assumptions.push(assumption); + } +}
diff --git a/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs b/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs index c8ec8da..f246af1 100644 --- a/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs +++ b/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs
@@ -28,8 +28,8 @@ FloatUnificationTable(sv::UndoLog<ut::Delegate<FloatVid>>), RegionConstraintCollector(region_constraints::UndoLog<'db>), RegionUnificationTable(sv::UndoLog<ut::Delegate<RegionVidKey<'db>>>), - #[expect(dead_code, reason = "this is used in rustc")] - PushRegionObligation, + PushTypeOutlivesConstraint, + PushRegionAssumption, } macro_rules! impl_from { @@ -75,8 +75,13 @@ UndoLog::RegionUnificationTable(undo) => { self.region_constraint_storage.as_mut().unwrap().unification_table.reverse(undo) } - UndoLog::PushRegionObligation => { - self.region_obligations.pop(); + UndoLog::PushTypeOutlivesConstraint => { + let popped = self.region_obligations.pop(); + assert!(popped.is_some(), "pushed region constraint but could not pop it"); + } + UndoLog::PushRegionAssumption => { + let popped = self.region_assumptions.pop(); + assert!(popped.is_some(), "pushed region assumption but could not pop it"); } } }
diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs index b18e08b..39de17e 100644 --- a/crates/hir-ty/src/next_solver/interner.rs +++ b/crates/hir-ty/src/next_solver/interner.rs
@@ -1,14 +1,15 @@ //! Things related to the Interner in the next-trait-solver. -use std::{fmt, ops::ControlFlow}; +use std::fmt; +use rustc_ast_ir::{FloatTy, IntTy, UintTy}; pub use tls_cache::clear_tls_solver_cache; pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db}; use base_db::Crate; use hir_def::{ - AdtId, AttrDefId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, ItemContainerId, - StructId, UnionId, VariantId, + AdtId, AttrDefId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, HasModule, + ItemContainerId, StructId, UnionId, VariantId, lang_item::LangItem, signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags}, }; @@ -22,6 +23,7 @@ TypeVisitableExt, UniverseIndex, Upcast, Variance, elaborate::elaborate, error::TypeError, + fast_reject, inherent::{self, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _}, lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem}, solve::SizedTraitKind, @@ -30,12 +32,13 @@ use crate::{ FnAbi, db::{HirDatabase, InternedCoroutine, InternedCoroutineId}, - method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint}, + lower::GenericPredicates, + method_resolution::TraitImpls, next_solver::{ AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper, CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, OpaqueTypeKey, - RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper, - util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls}, + RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds, TraitIdWrapper, + TypeAliasIdWrapper, util::explicit_item_bounds, }, }; @@ -583,6 +586,10 @@ self.inner().flags.is_enum } + pub fn is_box(&self) -> bool { + self.inner().flags.is_box + } + #[inline] pub fn repr(self) -> ReprOptions { self.inner().repr @@ -1264,27 +1271,21 @@ }) } - #[tracing::instrument(skip(self), ret)] + #[tracing::instrument(skip(self))] fn item_bounds( self, def_id: Self::DefId, ) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> { - explicit_item_bounds(self, def_id).map_bound(|bounds| { - Clauses::new_from_iter(self, elaborate(self, bounds).collect::<Vec<_>>()) - }) + explicit_item_bounds(self, def_id).map_bound(|bounds| elaborate(self, bounds)) } - #[tracing::instrument(skip(self), ret)] + #[tracing::instrument(skip(self))] fn item_self_bounds( self, def_id: Self::DefId, ) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> { - explicit_item_bounds(self, def_id).map_bound(|bounds| { - Clauses::new_from_iter( - self, - elaborate(self, bounds).filter_only_self().collect::<Vec<_>>(), - ) - }) + explicit_item_bounds(self, def_id) + .map_bound(|bounds| elaborate(self, bounds).filter_only_self()) } fn item_non_self_bounds( @@ -1309,9 +1310,8 @@ self, def_id: Self::DefId, ) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> { - let predicates = self.db().generic_predicates(def_id.try_into().unwrap()); - let predicates: Vec<_> = predicates.iter().cloned().collect(); - EarlyBinder::bind(predicates.into_iter()) + GenericPredicates::query_all(self.db, def_id.try_into().unwrap()) + .map_bound(|it| it.iter().copied()) } #[tracing::instrument(level = "debug", skip(self), ret)] @@ -1319,9 +1319,8 @@ self, def_id: Self::DefId, ) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> { - let predicates = self.db().generic_predicates_without_parent(def_id.try_into().unwrap()); - let predicates: Vec<_> = predicates.iter().cloned().collect(); - EarlyBinder::bind(predicates.into_iter()) + GenericPredicates::query_own(self.db, def_id.try_into().unwrap()) + .map_bound(|it| it.iter().copied()) } #[tracing::instrument(skip(self), ret)] @@ -1334,23 +1333,21 @@ _ => false, }; - let predicates: Vec<(Clause<'db>, Span)> = self - .db() - .generic_predicates(def_id.0.into()) - .iter() - .filter(|p| match p.kind().skip_binder() { - // rustc has the following assertion: - // https://github.com/rust-lang/rust/blob/52618eb338609df44978b0ca4451ab7941fd1c7a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs#L525-L608 - rustc_type_ir::ClauseKind::Trait(it) => is_self(it.self_ty()), - rustc_type_ir::ClauseKind::TypeOutlives(it) => is_self(it.0), - rustc_type_ir::ClauseKind::Projection(it) => is_self(it.self_ty()), - rustc_type_ir::ClauseKind::HostEffect(it) => is_self(it.self_ty()), - _ => false, - }) - .cloned() - .map(|p| (p, Span::dummy())) - .collect(); - EarlyBinder::bind(predicates) + GenericPredicates::query_explicit(self.db, def_id.0.into()).map_bound(move |predicates| { + predicates + .iter() + .copied() + .filter(move |p| match p.kind().skip_binder() { + // rustc has the following assertion: + // https://github.com/rust-lang/rust/blob/52618eb338609df44978b0ca4451ab7941fd1c7a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs#L525-L608 + ClauseKind::Trait(it) => is_self(it.self_ty()), + ClauseKind::TypeOutlives(it) => is_self(it.0), + ClauseKind::Projection(it) => is_self(it.self_ty()), + ClauseKind::HostEffect(it) => is_self(it.self_ty()), + _ => false, + }) + .map(|p| (p, Span::dummy())) + }) } #[tracing::instrument(skip(self), ret)] @@ -1368,25 +1365,25 @@ } } - let predicates: Vec<(Clause<'db>, Span)> = self - .db() - .generic_predicates(def_id.try_into().unwrap()) - .iter() - .filter(|p| match p.kind().skip_binder() { - rustc_type_ir::ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()), - rustc_type_ir::ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0), - rustc_type_ir::ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()), - rustc_type_ir::ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()), - // FIXME: Not sure is this correct to allow other clauses but we might replace - // `generic_predicates_ns` query here with something closer to rustc's - // `implied_bounds_with_filter`, which is more granular lowering than this - // "lower at once and then filter" implementation. - _ => true, - }) - .cloned() - .map(|p| (p, Span::dummy())) - .collect(); - EarlyBinder::bind(predicates) + GenericPredicates::query_explicit(self.db, def_id.try_into().unwrap()).map_bound( + |predicates| { + predicates + .iter() + .copied() + .filter(|p| match p.kind().skip_binder() { + ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()), + ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0), + ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()), + ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()), + // FIXME: Not sure is this correct to allow other clauses but we might replace + // `generic_predicates_ns` query here with something closer to rustc's + // `implied_bounds_with_filter`, which is more granular lowering than this + // "lower at once and then filter" implementation. + _ => true, + }) + .map(|p| (p, Span::dummy())) + }, + ) } fn impl_super_outlives( @@ -1396,15 +1393,12 @@ let trait_ref = self.db().impl_trait(impl_id.0).expect("expected an impl of trait"); trait_ref.map_bound(|trait_ref| { let clause: Clause<'_> = trait_ref.upcast(self); - Clauses::new_from_iter( - self, - rustc_type_ir::elaborate::elaborate(self, [clause]).filter(|clause| { - matches!( - clause.kind().skip_binder(), - ClauseKind::TypeOutlives(_) | ClauseKind::RegionOutlives(_) - ) - }), - ) + elaborate(self, [clause]).filter(|clause| { + matches!( + clause.kind().skip_binder(), + ClauseKind::TypeOutlives(_) | ClauseKind::RegionOutlives(_) + ) + }) }) } @@ -1609,79 +1603,152 @@ fn for_each_relevant_impl( self, - trait_: Self::TraitId, + trait_def_id: Self::TraitId, self_ty: Self::Ty, mut f: impl FnMut(Self::ImplId), ) { - let trait_ = trait_.0; - let self_ty_fp = TyFingerprint::for_trait_impl(self_ty); - let fps: &[TyFingerprint] = match self_ty.kind() { - TyKind::Infer(InferTy::IntVar(..)) => &ALL_INT_FPS, - TyKind::Infer(InferTy::FloatVar(..)) => &ALL_FLOAT_FPS, - _ => self_ty_fp.as_slice(), + let krate = self.krate.expect("trait solving requires setting `DbInterner::krate`"); + let trait_block = trait_def_id.0.loc(self.db).container.containing_block(); + let mut consider_impls_for_simplified_type = |simp: SimplifiedType| { + let type_block = simp.def().and_then(|def_id| { + let module = match def_id { + SolverDefId::AdtId(AdtId::StructId(id)) => id.module(self.db), + SolverDefId::AdtId(AdtId::EnumId(id)) => id.module(self.db), + SolverDefId::AdtId(AdtId::UnionId(id)) => id.module(self.db), + SolverDefId::TraitId(id) => id.module(self.db), + SolverDefId::TypeAliasId(id) => id.module(self.db), + SolverDefId::ConstId(_) + | SolverDefId::FunctionId(_) + | SolverDefId::ImplId(_) + | SolverDefId::StaticId(_) + | SolverDefId::InternedClosureId(_) + | SolverDefId::InternedCoroutineId(_) + | SolverDefId::InternedOpaqueTyId(_) + | SolverDefId::EnumVariantId(_) + | SolverDefId::Ctor(_) => return None, + }; + module.containing_block() + }); + TraitImpls::for_each_crate_and_block_trait_and_type( + self.db, + krate, + type_block, + trait_block, + &mut |impls| { + for &impl_ in impls.for_trait_and_self_ty(trait_def_id.0, &simp) { + f(impl_.into()); + } + }, + ); }; - if fps.is_empty() { - _ = for_trait_impls( - self.db(), - self.krate.expect("Must have self.krate"), - self.block, - trait_, - self_ty_fp, - |impls| { - for i in impls.for_trait(trait_) { - use rustc_type_ir::TypeVisitable; - let contains_errors = self.db().impl_trait(i).map_or(false, |b| { - b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break() - }); - if contains_errors { - continue; - } + match self_ty.kind() { + TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Adt(_, _) + | TyKind::Foreign(_) + | TyKind::Str + | TyKind::Array(_, _) + | TyKind::Pat(_, _) + | TyKind::Slice(_) + | TyKind::RawPtr(_, _) + | TyKind::Ref(_, _, _) + | TyKind::FnDef(_, _) + | TyKind::FnPtr(..) + | TyKind::Dynamic(_, _) + | TyKind::Closure(..) + | TyKind::CoroutineClosure(..) + | TyKind::Coroutine(_, _) + | TyKind::Never + | TyKind::Tuple(_) + | TyKind::UnsafeBinder(_) => { + let simp = + fast_reject::simplify_type(self, self_ty, fast_reject::TreatParams::AsRigid) + .unwrap(); + consider_impls_for_simplified_type(simp); + } - f(i.into()); - } - ControlFlow::Continue(()) - }, - ); - } else { - _ = for_trait_impls( - self.db(), - self.krate.expect("Must have self.krate"), - self.block, - trait_, - self_ty_fp, - |impls| { - for fp in fps { - for i in impls.for_trait_and_self_ty(trait_, *fp) { - use rustc_type_ir::TypeVisitable; - let contains_errors = self.db().impl_trait(i).map_or(false, |b| { - b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break() - }); - if contains_errors { - continue; - } + // HACK: For integer and float variables we have to manually look at all impls + // which have some integer or float as a self type. + TyKind::Infer(InferTy::IntVar(_)) => { + use IntTy::*; + use UintTy::*; + // This causes a compiler error if any new integer kinds are added. + let (I8 | I16 | I32 | I64 | I128 | Isize): IntTy; + let (U8 | U16 | U32 | U64 | U128 | Usize): UintTy; + let possible_integers = [ + // signed integers + SimplifiedType::Int(I8), + SimplifiedType::Int(I16), + SimplifiedType::Int(I32), + SimplifiedType::Int(I64), + SimplifiedType::Int(I128), + SimplifiedType::Int(Isize), + // unsigned integers + SimplifiedType::Uint(U8), + SimplifiedType::Uint(U16), + SimplifiedType::Uint(U32), + SimplifiedType::Uint(U64), + SimplifiedType::Uint(U128), + SimplifiedType::Uint(Usize), + ]; + for simp in possible_integers { + consider_impls_for_simplified_type(simp); + } + } - f(i.into()); - } - } - ControlFlow::Continue(()) - }, - ); + TyKind::Infer(InferTy::FloatVar(_)) => { + // This causes a compiler error if any new float kinds are added. + let (FloatTy::F16 | FloatTy::F32 | FloatTy::F64 | FloatTy::F128); + let possible_floats = [ + SimplifiedType::Float(FloatTy::F16), + SimplifiedType::Float(FloatTy::F32), + SimplifiedType::Float(FloatTy::F64), + SimplifiedType::Float(FloatTy::F128), + ]; + + for simp in possible_floats { + consider_impls_for_simplified_type(simp); + } + } + + // The only traits applying to aliases and placeholders are blanket impls. + // + // Impls which apply to an alias after normalization are handled by + // `assemble_candidates_after_normalizing_self_ty`. + TyKind::Alias(_, _) | TyKind::Placeholder(..) | TyKind::Error(_) => (), + + // FIXME: These should ideally not exist as a self type. It would be nice for + // the builtin auto trait impls of coroutines to instead directly recurse + // into the witness. + TyKind::CoroutineWitness(..) => (), + + // These variants should not exist as a self type. + TyKind::Infer( + InferTy::TyVar(_) + | InferTy::FreshTy(_) + | InferTy::FreshIntTy(_) + | InferTy::FreshFloatTy(_), + ) + | TyKind::Param(_) + | TyKind::Bound(_, _) => panic!("unexpected self type: {self_ty:?}"), } + + self.for_each_blanket_impl(trait_def_id, f) } fn for_each_blanket_impl(self, trait_def_id: Self::TraitId, mut f: impl FnMut(Self::ImplId)) { let Some(krate) = self.krate else { return }; + let block = trait_def_id.0.loc(self.db).container.containing_block(); - for impls in self.db.trait_impls_in_deps(krate).iter() { - for impl_id in impls.for_trait(trait_def_id.0) { - let impl_data = self.db.impl_signature(impl_id); - let self_ty_ref = &impl_data.store[impl_data.self_ty]; - if matches!(self_ty_ref, hir_def::type_ref::TypeRef::TypeParam(_)) { - f(impl_id.into()); - } + TraitImpls::for_each_crate_and_block(self.db, krate, block, &mut |impls| { + for &impl_ in impls.blanket_impls(trait_def_id.0) { + f(impl_.into()); } - } + }); } fn has_item_definition(self, _def_id: Self::DefId) -> bool {
diff --git a/crates/hir-ty/src/next_solver/predicate.rs b/crates/hir-ty/src/next_solver/predicate.rs index 3438b75..7cc3af7 100644 --- a/crates/hir-ty/src/next_solver/predicate.rs +++ b/crates/hir-ty/src/next_solver/predicate.rs
@@ -13,7 +13,7 @@ }; use smallvec::SmallVec; -use crate::next_solver::{InternedWrapperNoDebug, TraitIdWrapper}; +use crate::next_solver::{GenericArg, InternedWrapperNoDebug, TraitIdWrapper}; use super::{Binder, BoundVarKinds, DbInterner, Region, Ty, interned_vec_db}; @@ -43,6 +43,7 @@ pub type PolyTraitRef<'db> = Binder<'db, TraitRef<'db>>; pub type PolyExistentialTraitRef<'db> = Binder<'db, ExistentialTraitRef<'db>>; pub type PolyExistentialProjection<'db> = Binder<'db, ExistentialProjection<'db>>; +pub type ArgOutlivesPredicate<'db> = OutlivesPredicate<'db, GenericArg<'db>>; /// Compares via an ordering that will not change if modules are reordered or other changes are /// made to the tree. In particular, this ordering is preserved across incremental compilations.
diff --git a/crates/hir-ty/src/next_solver/region.rs b/crates/hir-ty/src/next_solver/region.rs index b5f0e6d..19f3c38 100644 --- a/crates/hir-ty/src/next_solver/region.rs +++ b/crates/hir-ty/src/next_solver/region.rs
@@ -79,6 +79,10 @@ matches!(self.inner(), RegionKind::ReStatic) } + pub fn is_erased(&self) -> bool { + matches!(self.inner(), RegionKind::ReErased) + } + pub fn is_var(&self) -> bool { matches!(self.inner(), RegionKind::ReVar(_)) }
diff --git a/crates/hir-ty/src/next_solver/solver.rs b/crates/hir-ty/src/next_solver/solver.rs index 7b96b40..8fae340 100644 --- a/crates/hir-ty/src/next_solver/solver.rs +++ b/crates/hir-ty/src/next_solver/solver.rs
@@ -11,13 +11,10 @@ }; use tracing::debug; -use crate::{ - ImplTraitId, - next_solver::{ - AliasTy, CanonicalVarKind, Clause, ClauseKind, CoercePredicate, GenericArgs, ImplIdWrapper, - ParamEnv, Predicate, PredicateKind, SubtypePredicate, Ty, TyKind, fold::fold_tys, - util::sizedness_fast_path, - }, +use crate::next_solver::{ + AliasTy, CanonicalVarKind, Clause, ClauseKind, CoercePredicate, GenericArgs, ImplIdWrapper, + ParamEnv, Predicate, PredicateKind, SubtypePredicate, Ty, TyKind, fold::fold_tys, + util::sizedness_fast_path, }; use super::{ @@ -163,20 +160,7 @@ }) }; - let db = interner.db; - let (opaques_table, opaque_idx) = match opaque_id.loc(db) { - ImplTraitId::ReturnTypeImplTrait(func, opaque_idx) => { - (db.return_type_impl_traits(func), opaque_idx) - } - ImplTraitId::TypeAliasImplTrait(type_alias, opaque_idx) => { - (db.type_alias_impl_traits(type_alias), opaque_idx) - } - }; - let item_bounds = opaques_table - .as_deref() - .unwrap() - .as_ref() - .map_bound(|table| &table.impl_traits[opaque_idx].predicates); + let item_bounds = opaque_id.predicates(interner.db); for predicate in item_bounds.iter_instantiated_copied(interner, args.as_slice()) { let predicate = replace_opaques_in(predicate);
diff --git a/crates/hir-ty/src/next_solver/ty.rs b/crates/hir-ty/src/next_solver/ty.rs index b8406fe..58849ce 100644 --- a/crates/hir-ty/src/next_solver/ty.rs +++ b/crates/hir-ty/src/next_solver/ty.rs
@@ -25,8 +25,8 @@ }; use crate::{ - ImplTraitId, db::{HirDatabase, InternedCoroutine}, + lower::GenericPredicates, next_solver::{ AdtDef, AliasTy, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const, CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper, @@ -41,6 +41,7 @@ util::{FloatExt, IntegerExt}, }; +pub type SimplifiedType = rustc_type_ir::fast_reject::SimplifiedType<SolverDefId>; pub type TyKind<'db> = rustc_type_ir::TyKind<DbInterner<'db>>; pub type FnHeader<'db> = rustc_type_ir::FnHeader<DbInterner<'db>>; @@ -127,6 +128,22 @@ Ty::new_tup(interner, &[]) } + pub fn new_imm_ptr(interner: DbInterner<'db>, ty: Ty<'db>) -> Self { + Ty::new_ptr(interner, ty, Mutability::Not) + } + + pub fn new_imm_ref(interner: DbInterner<'db>, region: Region<'db>, ty: Ty<'db>) -> Self { + Ty::new_ref(interner, region, ty, Mutability::Not) + } + + pub fn new_opaque( + interner: DbInterner<'db>, + def_id: SolverDefId, + args: GenericArgs<'db>, + ) -> Self { + Ty::new_alias(interner, AliasTyKind::Opaque, AliasTy::new_from_args(interner, def_id, args)) + } + /// Returns the `Size` for primitive types (bool, uint, int, char, float). pub fn primitive_size(self, interner: DbInterner<'db>) -> Size { match self.kind() { @@ -327,11 +344,40 @@ } #[inline] + pub fn is_bool(self) -> bool { + matches!(self.kind(), TyKind::Bool) + } + + /// A scalar type is one that denotes an atomic datum, with no sub-components. + /// (A RawPtr is scalar because it represents a non-managed pointer, so its + /// contents are abstract to rustc.) + #[inline] + pub fn is_scalar(self) -> bool { + matches!( + self.kind(), + TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Float(_) + | TyKind::Uint(_) + | TyKind::FnDef(..) + | TyKind::FnPtr(..) + | TyKind::RawPtr(_, _) + | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) + ) + } + + #[inline] pub fn is_infer(self) -> bool { matches!(self.kind(), TyKind::Infer(..)) } #[inline] + pub fn is_numeric(self) -> bool { + self.is_integral() || self.is_floating_point() + } + + #[inline] pub fn is_str(self) -> bool { matches!(self.kind(), TyKind::Str) } @@ -346,10 +392,27 @@ matches!(self.kind(), TyKind::RawPtr(..)) } + #[inline] + pub fn is_array(self) -> bool { + matches!(self.kind(), TyKind::Array(..)) + } + + #[inline] + pub fn is_slice(self) -> bool { + matches!(self.kind(), TyKind::Slice(..)) + } + pub fn is_union(self) -> bool { self.as_adt().is_some_and(|(adt, _)| matches!(adt, AdtId::UnionId(_))) } + pub fn boxed_ty(self) -> Option<Ty<'db>> { + match self.kind() { + TyKind::Adt(adt_def, args) if adt_def.is_box() => Some(args.type_at(0)), + _ => None, + } + } + #[inline] pub fn as_adt(self) -> Option<(AdtId, GenericArgs<'db>)> { match self.kind() { @@ -378,11 +441,9 @@ /// /// The parameter `explicit` indicates if this is an *explicit* dereference. /// Some types -- notably raw ptrs -- can only be dereferenced explicitly. - pub fn builtin_deref(self, db: &dyn HirDatabase, explicit: bool) -> Option<Ty<'db>> { + pub fn builtin_deref(self, explicit: bool) -> Option<Ty<'db>> { match self.kind() { - TyKind::Adt(adt, substs) if crate::lang_items::is_box(db, adt.def_id().0) => { - Some(substs.as_slice()[0].expect_ty()) - } + TyKind::Adt(adt, substs) if adt.is_box() => Some(substs.as_slice()[0].expect_ty()), TyKind::Ref(_, ty, _) => Some(ty), TyKind::RawPtr(ty, _) if explicit => Some(ty), _ => None, @@ -562,26 +623,14 @@ let interner = DbInterner::new_with(db, None, None); match self.kind() { - TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => { - match db.lookup_intern_impl_trait_id(opaque_ty.def_id.expect_opaque_ty()) { - ImplTraitId::ReturnTypeImplTrait(func, idx) => { - db.return_type_impl_traits(func).map(|it| { - let data = - (*it).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates); - data.iter_instantiated_copied(interner, opaque_ty.args.as_slice()) - .collect() - }) - } - ImplTraitId::TypeAliasImplTrait(alias, idx) => { - db.type_alias_impl_traits(alias).map(|it| { - let data = - (*it).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates); - data.iter_instantiated_copied(interner, opaque_ty.args.as_slice()) - .collect() - }) - } - } - } + TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => Some( + opaque_ty + .def_id + .expect_opaque_ty() + .predicates(db) + .iter_instantiated_copied(interner, opaque_ty.args.as_slice()) + .collect(), + ), TyKind::Param(param) => { // FIXME: We shouldn't use `param.id` here. let generic_params = db.generic_params(param.id.parent()); @@ -589,11 +638,8 @@ match param_data { TypeOrConstParamData::TypeParamData(p) => match p.provenance { TypeParamProvenance::ArgumentImplTrait => { - let predicates = db - .generic_predicates(param.id.parent()) - .instantiate_identity() - .into_iter() - .flatten() + let predicates = GenericPredicates::query_all(db, param.id.parent()) + .iter_identity_copied() .filter(|wc| match wc.kind().skip_binder() { ClauseKind::Trait(tr) => tr.self_ty() == self, ClauseKind::Projection(pred) => pred.self_ty() == self,
diff --git a/crates/hir-ty/src/next_solver/util.rs b/crates/hir-ty/src/next_solver/util.rs index d113f76..972c8e2 100644 --- a/crates/hir-ty/src/next_solver/util.rs +++ b/crates/hir-ty/src/next_solver/util.rs
@@ -1,39 +1,30 @@ //! Various utilities for the next-trait-solver. -use std::{ - iter, - ops::{self, ControlFlow}, -}; +use std::ops::ControlFlow; -use base_db::Crate; -use hir_def::{BlockId, HasModule, lang_item::LangItem}; -use la_arena::Idx; +use hir_def::TraitId; use rustc_abi::{Float, HasDataLayout, Integer, IntegerType, Primitive, ReprOptions}; use rustc_type_ir::{ ConstKind, CoroutineArgs, DebruijnIndex, FloatTy, INNERMOST, IntTy, Interner, PredicatePolarity, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable, - TypeVisitableExt, TypeVisitor, UintTy, UniverseIndex, - inherent::{ - AdtDef, GenericArg as _, GenericArgs as _, IntoKind, ParamEnv as _, SliceLike, Ty as _, - }, + TypeVisitableExt, TypeVisitor, UintTy, UniverseIndex, elaborate, + inherent::{AdtDef, GenericArg as _, IntoKind, ParamEnv as _, SliceLike, Ty as _}, lang_items::SolverTraitLangItem, solve::SizedTraitKind, }; -use crate::{ - db::HirDatabase, - lower::{LifetimeElisionKind, TyLoweringContext}, - method_resolution::{TraitImpls, TyFingerprint}, - next_solver::{ - BoundConst, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, PlaceholderRegion, - infer::InferCtxt, +use crate::next_solver::{ + BoundConst, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, PlaceholderRegion, + PolyTraitRef, + infer::{ + InferCtxt, + traits::{Obligation, ObligationCause, PredicateObligation}, }, }; use super::{ - Binder, BoundRegion, BoundTy, Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder, - GenericArgs, Predicate, PredicateKind, Region, SolverDefId, TraitPredicate, TraitRef, Ty, - TyKind, + Binder, BoundRegion, BoundTy, Clause, ClauseKind, Const, DbInterner, EarlyBinder, GenericArgs, + Predicate, PredicateKind, Region, SolverDefId, Ty, TyKind, fold::{BoundVarReplacer, FnMutDelegate}, }; @@ -388,54 +379,6 @@ } } -pub(crate) fn for_trait_impls( - db: &dyn HirDatabase, - krate: Crate, - block: Option<BlockId>, - trait_id: hir_def::TraitId, - self_ty_fp: Option<TyFingerprint>, - mut f: impl FnMut(&TraitImpls) -> ControlFlow<()>, -) -> ControlFlow<()> { - // Note: Since we're using `impls_for_trait` and `impl_provided_for`, - // only impls where the trait can be resolved should ever reach Chalk. - // `impl_datum` relies on that and will panic if the trait can't be resolved. - let in_self_and_deps = db.trait_impls_in_deps(krate); - let trait_module = trait_id.module(db); - let type_module = match self_ty_fp { - Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(db)), - Some(TyFingerprint::ForeignType(type_id)) => Some(type_id.module(db)), - Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(db)), - _ => None, - }; - - let mut def_blocks = - [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())]; - - let block_impls = iter::successors(block, |&block_id| { - cov_mark::hit!(block_local_impls); - block_id.loc(db).module.containing_block() - }) - .inspect(|&block_id| { - // make sure we don't search the same block twice - def_blocks.iter_mut().for_each(|block| { - if *block == Some(block_id) { - *block = None; - } - }); - }) - .filter_map(|block_id| db.trait_impls_in_block(block_id)); - for it in in_self_and_deps.iter().map(ops::Deref::deref) { - f(it)?; - } - for it in block_impls { - f(&it)?; - } - for it in def_blocks.into_iter().flatten().filter_map(|it| db.trait_impls_in_block(it)) { - f(&it)?; - } - ControlFlow::Continue(()) -} - // FIXME(next-trait-solver): uplift pub fn sizedness_constraint_for_ty<'db>( interner: DbInterner<'db>, @@ -507,79 +450,14 @@ pub fn explicit_item_bounds<'db>( interner: DbInterner<'db>, def_id: SolverDefId, -) -> EarlyBinder<'db, Clauses<'db>> { +) -> EarlyBinder<'db, impl DoubleEndedIterator<Item = Clause<'db>> + ExactSizeIterator> { let db = interner.db(); - match def_id { - SolverDefId::TypeAliasId(type_alias) => { - // Lower bounds -- we could/should maybe move this to a separate query in `lower` - let type_alias_data = db.type_alias_signature(type_alias); - let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &type_alias_data.store, - type_alias.into(), - LifetimeElisionKind::AnonymousReportError, - ); - - let item_args = GenericArgs::identity_for_item(interner, def_id); - let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args); - - let mut bounds = Vec::new(); - for bound in &type_alias_data.bounds { - ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| { - bounds.push(pred); - }); - } - - if !ctx.unsized_types.contains(&interner_ty) { - let sized_trait = LangItem::Sized - .resolve_trait(ctx.db, interner.krate.expect("Must have interner.krate")); - let sized_bound = sized_trait.map(|trait_id| { - let trait_ref = TraitRef::new_from_args( - interner, - trait_id.into(), - GenericArgs::new_from_iter(interner, [interner_ty.into()]), - ); - Clause(Predicate::new( - interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - )) - }); - bounds.extend(sized_bound); - bounds.shrink_to_fit(); - } - - rustc_type_ir::EarlyBinder::bind(Clauses::new_from_iter(interner, bounds)) - } - SolverDefId::InternedOpaqueTyId(id) => { - let full_id = db.lookup_intern_impl_trait_id(id); - match full_id { - crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { - let datas = db - .return_type_impl_traits(func) - .expect("impl trait id without impl traits"); - let datas = (*datas).as_ref().skip_binder(); - let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())]; - EarlyBinder::bind(Clauses::new_from_iter(interner, data.predicates.clone())) - } - crate::ImplTraitId::TypeAliasImplTrait(alias, idx) => { - let datas = db - .type_alias_impl_traits(alias) - .expect("impl trait id without impl traits"); - let datas = (*datas).as_ref().skip_binder(); - let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())]; - EarlyBinder::bind(Clauses::new_from_iter(interner, data.predicates.clone())) - } - } - } + let clauses = match def_id { + SolverDefId::TypeAliasId(type_alias) => crate::lower::type_alias_bounds(db, type_alias), + SolverDefId::InternedOpaqueTyId(id) => id.predicates(db), _ => panic!("Unexpected GenericDefId"), - } + }; + clauses.map_bound(|clauses| clauses.iter().copied()) } pub struct ContainsTypeErrors; @@ -792,3 +670,34 @@ false } + +/// Casts a trait reference into a reference to one of its super +/// traits; returns `None` if `target_trait_def_id` is not a +/// supertrait. +pub(crate) fn upcast_choices<'db>( + interner: DbInterner<'db>, + source_trait_ref: PolyTraitRef<'db>, + target_trait_def_id: TraitId, +) -> Vec<PolyTraitRef<'db>> { + if source_trait_ref.def_id().0 == target_trait_def_id { + return vec![source_trait_ref]; // Shortcut the most common case. + } + + elaborate::supertraits(interner, source_trait_ref) + .filter(|r| r.def_id().0 == target_trait_def_id) + .collect() +} + +#[inline] +pub(crate) fn clauses_as_obligations<'db>( + clauses: impl IntoIterator<Item = Clause<'db>>, + cause: ObligationCause, + param_env: ParamEnv<'db>, +) -> impl Iterator<Item = PredicateObligation<'db>> { + clauses.into_iter().map(move |clause| Obligation { + cause: cause.clone(), + param_env, + predicate: clause.as_predicate(), + recursion_depth: 0, + }) +}
diff --git a/crates/hir-ty/src/opaques.rs b/crates/hir-ty/src/opaques.rs index 8531f24..acf532c 100644 --- a/crates/hir-ty/src/opaques.rs +++ b/crates/hir-ty/src/opaques.rs
@@ -7,7 +7,6 @@ use la_arena::ArenaMap; use rustc_type_ir::inherent::Ty as _; use syntax::ast; -use triomphe::Arc; use crate::{ ImplTraitId, @@ -29,7 +28,7 @@ // A function may define its own RPITs. extend_with_opaques( db, - db.return_type_impl_traits(func), + ImplTraits::return_type_impl_traits(db, func), |opaque_idx| ImplTraitId::ReturnTypeImplTrait(func, opaque_idx), result, ); @@ -38,7 +37,7 @@ let extend_with_taits = |type_alias| { extend_with_opaques( db, - db.type_alias_impl_traits(type_alias), + ImplTraits::type_alias_impl_traits(db, type_alias), |opaque_idx| ImplTraitId::TypeAliasImplTrait(type_alias, opaque_idx), result, ); @@ -75,12 +74,12 @@ fn extend_with_opaques<'db>( db: &'db dyn HirDatabase, - opaques: Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>>, + opaques: &Option<Box<EarlyBinder<'db, ImplTraits<'db>>>>, mut make_impl_trait: impl FnMut(ImplTraitIdx<'db>) -> ImplTraitId<'db>, result: &mut Vec<SolverDefId>, ) { if let Some(opaques) = opaques { - for (opaque_idx, _) in (*opaques).as_ref().skip_binder().impl_traits.iter() { + for (opaque_idx, _) in (**opaques).as_ref().skip_binder().impl_traits.iter() { let opaque_id = InternedOpaqueTyId::new(db, make_impl_trait(opaque_idx)); result.push(opaque_id.into()); } @@ -109,6 +108,14 @@ db: &'db dyn HirDatabase, type_alias: TypeAliasId, ) -> ArenaMap<ImplTraitIdx<'db>, EarlyBinder<'db, Ty<'db>>> { + // Call this first, to not perform redundant work if there are no TAITs. + let Some(taits_count) = ImplTraits::type_alias_impl_traits(db, type_alias) + .as_deref() + .map(|taits| taits.as_ref().skip_binder().impl_traits.len()) + else { + return ArenaMap::new(); + }; + let loc = type_alias.loc(db); let module = loc.module(db); let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block()); @@ -119,10 +126,6 @@ let defining_bodies = tait_defining_bodies(db, &loc); - let taits_count = db - .type_alias_impl_traits(type_alias) - .map_or(0, |taits| (*taits).as_ref().skip_binder().impl_traits.len()); - let mut result = ArenaMap::with_capacity(taits_count); for defining_body in defining_bodies { let infer = db.infer(defining_body);
diff --git a/crates/hir-ty/src/specialization.rs b/crates/hir-ty/src/specialization.rs index f4ee4de..304679d 100644 --- a/crates/hir-ty/src/specialization.rs +++ b/crates/hir-ty/src/specialization.rs
@@ -2,17 +2,17 @@ use hir_def::{ImplId, nameres::crate_def_map}; use intern::sym; +use rustc_type_ir::inherent::SliceLike; use tracing::debug; use crate::{ db::HirDatabase, + lower::GenericPredicates, next_solver::{ DbInterner, TypingMode, - infer::{ - DbInternerInferExt, - traits::{Obligation, ObligationCause}, - }, + infer::{DbInternerInferExt, traits::ObligationCause}, obligation_ctxt::ObligationCtxt, + util::clauses_as_obligations, }, }; @@ -102,14 +102,12 @@ // Now check that the source trait ref satisfies all the where clauses of the target impl. // This is not just for correctness; we also need this to constrain any params that may // only be referenced via projection predicates. - if let Some(predicates) = - db.generic_predicates(parent_impl_def_id.into()).instantiate(interner, parent_args) - { - ocx.register_obligations( - predicates - .map(|predicate| Obligation::new(interner, cause.clone(), param_env, predicate)), - ); - } + ocx.register_obligations(clauses_as_obligations( + GenericPredicates::query_all(db, parent_impl_def_id.into()) + .iter_instantiated_copied(interner, parent_args.as_slice()), + cause.clone(), + param_env, + )); let errors = ocx.evaluate_obligations_error_on_ambiguity(); if !errors.is_empty() {
diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index 5a53db4..36630ab 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs
@@ -49,7 +49,7 @@ //- minicore: coerce_unsized fn test() { let x: &[isize] = &[1]; - // ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not)), Pointer(Unsize) + // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize) let x: *const [isize] = &[1]; // ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize) } @@ -88,6 +88,47 @@ } #[test] +fn unsized_from_keeps_type_info() { + check_types( + r#" +//- minicore: coerce_unsized, from +use core::{marker::Unsize, ops::CoerceUnsized}; + +struct MyBox<T: ?Sized> { + ptr: *const T, +} + +impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<MyBox<U>> for MyBox<T> {} + +struct MyRc<T: ?Sized> { + ptr: *const T, +} + +impl<T: ?Sized> core::convert::From<MyBox<T>> for MyRc<T> { + fn from(_: MyBox<T>) -> MyRc<T> { + loop {} + } +} + +fn make_box() -> MyBox<[i32; 2]> { + loop {} +} + +fn take<T: ?Sized>(value: MyRc<T>) -> MyRc<T> { + value +} + +fn test() { + let boxed: MyBox<[i32]> = make_box(); + let rc = MyRc::from(boxed); + //^^ MyRc<[i32]> + let _: MyRc<[i32]> = take(rc); +} +"#, + ); +} + +#[test] fn if_coerce() { check_no_mismatches( r#" @@ -96,7 +137,7 @@ fn test() { let x = if true { foo(&[1]) - // ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize) + // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize) } else { &[1] }; @@ -148,7 +189,7 @@ fn test(i: i32) { let x = match i { 2 => foo(&[2]), - // ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize) + // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize) 1 => &[1], _ => &[3], }; @@ -268,7 +309,7 @@ fn returns_string() -> String { loop {} } fn test() { takes_ref_str(&{ returns_string() }); - // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('{region error}, Not)) + // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not)) } "#, ); @@ -833,11 +874,11 @@ fn main() { let a: V<&dyn Tr>; (a,) = V { t: &S }; - //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + 'static)>,) + //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,) let mut a: V<&dyn Tr> = V { t: &S }; (a,) = V { t: &S }; - //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + 'static)>,) + //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,) } "#, ); @@ -854,8 +895,8 @@ } fn test() { Struct == Struct; - // ^^^^^^ adjustments: Borrow(Ref('{region error}, Not)) - // ^^^^^^ adjustments: Borrow(Ref('{region error}, Not)) + // ^^^^^^ adjustments: Borrow(Ref(Not)) + // ^^^^^^ adjustments: Borrow(Ref(Not)) }", ); } @@ -871,7 +912,7 @@ } fn test() { Struct += Struct; - // ^^^^^^ adjustments: Borrow(Ref('{region error}, Mut)) + // ^^^^^^ adjustments: Borrow(Ref(Mut { allow_two_phase_borrow: Yes })) // ^^^^^^ adjustments: }", ); @@ -885,7 +926,7 @@ fn test() { let x = [1, 2, 3]; x[2] = 6; - // ^ adjustments: Borrow(Ref('?0, Mut)) + // ^ adjustments: Borrow(Ref(Mut { allow_two_phase_borrow: No })) } ", ); @@ -910,11 +951,11 @@ } fn test() { Struct[0]; - // ^^^^^^ adjustments: Borrow(Ref('?0, Not)) + // ^^^^^^ adjustments: Borrow(Ref(Not)) StructMut[0]; - // ^^^^^^^^^ adjustments: Borrow(Ref('?1, Not)) + // ^^^^^^^^^ adjustments: Borrow(Ref(Not)) &mut StructMut[0]; - // ^^^^^^^^^ adjustments: Borrow(Ref('?2, Mut)) + // ^^^^^^^^^ adjustments: Borrow(Ref(Mut { allow_two_phase_borrow: No })) }", ); }
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs index 14ec161..e98e5e4 100644 --- a/crates/hir-ty/src/tests/incremental.rs +++ b/crates/hir-ty/src/tests/incremental.rs
@@ -4,7 +4,7 @@ use salsa::EventKind; use test_fixture::WithFixture; -use crate::{db::HirDatabase, test_db::TestDB}; +use crate::{db::HirDatabase, method_resolution::TraitImpls, test_db::TestDB}; use super::visit_module; @@ -44,10 +44,12 @@ "body_shim", "body_with_source_map_shim", "trait_environment_shim", - "return_type_impl_traits_shim", - "expr_scopes_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", "lang_item", "crate_lang_items", + "ImplTraits < 'db >::return_type_impl_traits_", + "expr_scopes_shim", + "lang_item", ] "#]], ); @@ -131,19 +133,22 @@ "body_shim", "body_with_source_map_shim", "trait_environment_shim", - "return_type_impl_traits_shim", - "expr_scopes_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", "lang_item", "crate_lang_items", "attrs_shim", "attrs_shim", + "ImplTraits < 'db >::return_type_impl_traits_", + "expr_scopes_shim", + "lang_item", "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", "body_shim", "body_with_source_map_shim", "trait_environment_shim", - "return_type_impl_traits_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", + "ImplTraits < 'db >::return_type_impl_traits_", "expr_scopes_shim", "infer_shim", "function_signature_shim", @@ -151,7 +156,8 @@ "body_shim", "body_with_source_map_shim", "trait_environment_shim", - "return_type_impl_traits_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", + "ImplTraits < 'db >::return_type_impl_traits_", "expr_scopes_shim", ] "#]], @@ -230,9 +236,9 @@ || { let module = db.module_for_file(pos.file_id.file_id(&db)); let _crate_def_map = module.def_map(&db); - db.trait_impls_in_crate(module.krate()); + TraitImpls::for_crate(&db, module.krate()); }, - &[("trait_impls_in_crate_shim", 1)], + &[("TraitImpls::for_crate_", 1)], expect_test::expect![[r#" [ "source_root_crates_shim", @@ -241,7 +247,7 @@ "ast_id_map_shim", "parse_shim", "real_span_map_shim", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_", ] "#]], ); @@ -267,9 +273,9 @@ || { let module = db.module_for_file(pos.file_id.file_id(&db)); let _crate_def_map = module.def_map(&db); - db.trait_impls_in_crate(module.krate()); + TraitImpls::for_crate(&db, module.krate()); }, - &[("trait_impls_in_crate_shim", 1)], + &[("TraitImpls::for_crate_", 1)], expect_test::expect![[r#" [ "parse_shim", @@ -277,7 +283,7 @@ "file_item_tree_query", "real_span_map_shim", "crate_local_def_map", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_", ] "#]], ); @@ -302,9 +308,9 @@ || { let module = db.module_for_file(pos.file_id.file_id(&db)); let _crate_def_map = module.def_map(&db); - db.trait_impls_in_crate(module.krate()); + TraitImpls::for_crate(&db, module.krate()); }, - &[("trait_impls_in_crate_shim", 1)], + &[("TraitImpls::for_crate_", 1)], expect_test::expect![[r#" [ "source_root_crates_shim", @@ -313,7 +319,7 @@ "ast_id_map_shim", "parse_shim", "real_span_map_shim", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_", ] "#]], ); @@ -340,9 +346,9 @@ || { let module = db.module_for_file(pos.file_id.file_id(&db)); let _crate_def_map = module.def_map(&db); - db.trait_impls_in_crate(module.krate()); + TraitImpls::for_crate(&db, module.krate()); }, - &[("trait_impls_in_crate_shim", 1)], + &[("TraitImpls::for_crate_", 1)], expect_test::expect![[r#" [ "parse_shim", @@ -350,7 +356,7 @@ "file_item_tree_query", "real_span_map_shim", "crate_local_def_map", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_", ] "#]], ); @@ -375,9 +381,9 @@ || { let module = db.module_for_file(pos.file_id.file_id(&db)); let _crate_def_map = module.def_map(&db); - db.trait_impls_in_crate(module.krate()); + TraitImpls::for_crate(&db, module.krate()); }, - &[("trait_impls_in_crate_shim", 1)], + &[("TraitImpls::for_crate_", 1)], expect_test::expect![[r#" [ "source_root_crates_shim", @@ -386,7 +392,7 @@ "ast_id_map_shim", "parse_shim", "real_span_map_shim", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_", ] "#]], ); @@ -410,9 +416,9 @@ || { let module = db.module_for_file(pos.file_id.file_id(&db)); let _crate_def_map = module.def_map(&db); - db.trait_impls_in_crate(module.krate()); + TraitImpls::for_crate(&db, module.krate()); }, - &[("trait_impls_in_crate_shim", 1)], + &[("TraitImpls::for_crate_", 1)], expect_test::expect![[r#" [ "parse_shim", @@ -420,7 +426,7 @@ "file_item_tree_query", "real_span_map_shim", "crate_local_def_map", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_", ] "#]], ); @@ -449,9 +455,9 @@ || { let module = db.module_for_file(pos.file_id.file_id(&db)); let _crate_def_map = module.def_map(&db); - db.trait_impls_in_crate(module.krate()); + TraitImpls::for_crate(&db, module.krate()); }, - &[("trait_impls_in_crate_shim", 1)], + &[("TraitImpls::for_crate_", 1)], expect_test::expect![[r#" [ "source_root_crates_shim", @@ -460,7 +466,7 @@ "ast_id_map_shim", "parse_shim", "real_span_map_shim", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_", ] "#]], ); @@ -492,9 +498,9 @@ || { let module = db.module_for_file(pos.file_id.file_id(&db)); let _crate_def_map = module.def_map(&db); - db.trait_impls_in_crate(module.krate()); + TraitImpls::for_crate(&db, module.krate()); }, - &[("trait_impls_in_crate_shim", 1)], + &[("TraitImpls::for_crate_", 1)], expect_test::expect![[r#" [ "parse_shim", @@ -502,7 +508,7 @@ "file_item_tree_query", "real_span_map_shim", "crate_local_def_map", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_", "attrs_shim", "impl_trait_with_diagnostics_shim", "impl_signature_shim", @@ -581,37 +587,37 @@ "body_shim", "body_with_source_map_shim", "trait_environment_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", "lang_item", "crate_lang_items", "attrs_shim", "attrs_shim", - "generic_predicates_shim", - "return_type_impl_traits_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", + "ImplTraits < 'db >::return_type_impl_traits_", "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", "trait_environment_shim", - "return_type_impl_traits_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", + "ImplTraits < 'db >::return_type_impl_traits_", "expr_scopes_shim", "struct_signature_shim", "struct_signature_with_source_map_shim", - "generic_predicates_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", "value_ty_shim", "VariantFields::firewall_", "VariantFields::query_", "lang_item", - "lang_item", - "inherent_impls_in_crate_shim", + "InherentImpls::for_crate_", "impl_signature_shim", "impl_signature_with_source_map_shim", "callable_item_signature_shim", - "trait_impls_in_deps_shim", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_and_deps_", + "TraitImpls::for_crate_", "impl_trait_with_diagnostics_shim", "impl_self_ty_with_diagnostics_shim", - "generic_predicates_shim", - "value_ty_shim", - "generic_predicates_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", + "lang_item", ] "#]], ); @@ -678,29 +684,29 @@ "function_signature_shim", "body_with_source_map_shim", "body_shim", - "trait_environment_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", "crate_lang_items", "attrs_shim", "attrs_shim", "attrs_shim", - "generic_predicates_shim", - "return_type_impl_traits_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", + "ImplTraits < 'db >::return_type_impl_traits_", "infer_shim", "function_signature_with_source_map_shim", - "return_type_impl_traits_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", + "ImplTraits < 'db >::return_type_impl_traits_", "expr_scopes_shim", "struct_signature_with_source_map_shim", - "generic_predicates_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", "VariantFields::query_", - "inherent_impls_in_crate_shim", + "InherentImpls::for_crate_", "impl_signature_with_source_map_shim", "impl_signature_shim", "callable_item_signature_shim", - "trait_impls_in_crate_shim", + "TraitImpls::for_crate_", "impl_trait_with_diagnostics_shim", "impl_self_ty_with_diagnostics_shim", - "generic_predicates_shim", - "generic_predicates_shim", + "GenericPredicates < 'db >::query_with_diagnostics_", ] "#]], );
diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs index b0afd60..274d33a 100644 --- a/crates/hir-ty/src/tests/method_resolution.rs +++ b/crates/hir-ty/src/tests/method_resolution.rs
@@ -8,6 +8,7 @@ fn infer_slice_method() { check_types( r#" +//- /core.rs crate:core impl<T> [T] { #[rustc_allow_incoherent_impl] fn foo(&self) -> T { @@ -27,13 +28,13 @@ fn cross_crate_primitive_method() { check_types( r#" -//- /main.rs crate:main deps:other_crate +//- /main.rs crate:main deps:core fn test() { let x = 1f32; x.foo(); } //^^^^^^^ f32 -//- /lib.rs crate:other_crate +//- /lib.rs crate:core mod foo { impl f32 { #[rustc_allow_incoherent_impl] @@ -48,6 +49,7 @@ fn infer_array_inherent_impl() { check_types( r#" +//- /core.rs crate:core impl<T, const N: usize> [T; N] { #[rustc_allow_incoherent_impl] fn foo(&self) -> T { @@ -981,7 +983,6 @@ #[test] fn method_resolution_overloaded_const() { - cov_mark::check!(const_candidate_self_type_mismatch); check_types( r#" struct Wrapper<T>(T); @@ -1376,7 +1377,6 @@ #[test] fn autoderef_visibility_method() { - cov_mark::check!(autoderef_candidate_not_visible); check( r#" //- minicore: receiver @@ -1415,7 +1415,6 @@ #[test] fn trait_vs_private_inherent_const() { - cov_mark::check!(const_candidate_not_visible); check( r#" mod a { @@ -1505,6 +1504,7 @@ fn resolve_const_generic_array_methods() { check_types( r#" +//- /core.rs crate:core #[lang = "array"] impl<T, const N: usize> [T; N] { #[rustc_allow_incoherent_impl] @@ -1536,6 +1536,7 @@ fn resolve_const_generic_method() { check_types( r#" +//- /core.rs crate:core struct Const<const N: usize>; #[lang = "array"] @@ -1714,8 +1715,8 @@ 95..103 'u32::foo': fn foo<u32>() -> u8 109..115 'S::foo': fn foo<S>() -> u8 121..127 'T::foo': fn foo<T>() -> u8 - 133..139 'U::foo': {unknown} - 145..157 '<[u32]>::foo': {unknown} + 133..139 'U::foo': fn foo<U>() -> u8 + 145..157 '<[u32]>::foo': fn foo<[u32]>() -> u8 "#]], ); } @@ -1869,6 +1870,7 @@ "#, ); } + #[test] fn receiver_adjustment_autoref() { check( @@ -1879,9 +1881,9 @@ } fn test() { Foo.foo(); - //^^^ adjustments: Borrow(Ref('?0, Not)) + //^^^ adjustments: Borrow(Ref(Not)) (&Foo).foo(); - // ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not)) + // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)) } "#, ); @@ -1895,7 +1897,7 @@ fn test() { let a = [1, 2, 3]; a.len(); -} //^ adjustments: Borrow(Ref('?0, Not)), Pointer(Unsize) +} //^ adjustments: Borrow(Ref(Not)), Pointer(Unsize) "#, ); } @@ -2036,6 +2038,7 @@ check( r#" //- minicore: error, send +//- /std.rs crate:std pub struct Box<T>(T); use core::error::Error; @@ -2108,7 +2111,7 @@ } fn test() { Box::new(Foo).foo(); - //^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref('?0, Not)) + //^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref(Not)) } "#, ); @@ -2126,7 +2129,7 @@ use core::mem::ManuallyDrop; fn test() { ManuallyDrop::new(Foo).foo(); - //^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('?0, Not)) + //^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not)) } "#, ); @@ -2176,6 +2179,8 @@ check( r#" //- minicore: receiver +#![feature(arbitrary_self_types)] + use core::ops::Receiver; struct Foo;
diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 607daad..5e150e2 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs
@@ -6,7 +6,7 @@ fn infer_pattern() { check_infer( r#" - //- minicore: iterator + //- minicore: iterator, add, builtin_impls fn test(x: &i32) { let y = x; let &z = x; @@ -189,24 +189,33 @@ fn infer_range_pattern() { check_infer_with_mismatches( r#" - fn test(x: &i32) { - if let 1..76 = 2u32 {} - if let 1..=76 = 2u32 {} - } +//- minicore: range +fn test(x..y: &core::ops::Range<u32>) { + if let 1..76 = 2u32 {} + if let 1..=76 = 2u32 {} +} "#, expect![[r#" - 8..9 'x': &'? i32 - 17..75 '{ ...2 {} }': () - 23..45 'if let...u32 {}': () - 26..42 'let 1....= 2u32': bool - 30..35 '1..76': u32 - 38..42 '2u32': u32 - 43..45 '{}': () - 50..73 'if let...u32 {}': () - 53..70 'let 1....= 2u32': bool - 57..63 '1..=76': u32 - 66..70 '2u32': u32 - 71..73 '{}': () + 8..9 'x': u32 + 8..12 'x..y': Range<u32> + 11..12 'y': u32 + 38..96 '{ ...2 {} }': () + 44..66 'if let...u32 {}': () + 47..63 'let 1....= 2u32': bool + 51..52 '1': i32 + 51..56 '1..76': Range<i32> + 54..56 '76': i32 + 59..63 '2u32': u32 + 64..66 '{}': () + 71..94 'if let...u32 {}': () + 74..91 'let 1....= 2u32': bool + 78..79 '1': i32 + 78..84 '1..=76': RangeInclusive<i32> + 82..84 '76': i32 + 87..91 '2u32': u32 + 92..94 '{}': () + 51..56: expected u32, got Range<i32> + 78..84: expected u32, got RangeInclusive<i32> "#]], ); } @@ -1259,3 +1268,22 @@ "#, ); } + +#[test] +fn destructuring_assign_ref() { + check_no_mismatches( + r#" +struct Foo; + +fn foo() -> (&'static Foo, u32) { + (&Foo, 0) +} + +fn bar() { + let ext: &Foo; + let v; + (ext, v) = foo(); +} + "#, + ); +}
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 75d3203..f03f8d7 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs
@@ -292,7 +292,7 @@ 149..156 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} 181..188 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} 191..313 'if ICE... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} - 194..231 'ICE_RE..._VALUE': bool + 194..231 'ICE_RE..._VALUE': {unknown} 194..247 'ICE_RE...&name)': bool 241..246 '&name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} 242..246 'name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown} @@ -629,7 +629,7 @@ 65..69 'self': Self 267..271 'self': Self 466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}> - 488..522 '{ ... }': <SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}> as BoxedDsl<DB>>::Output + 488..522 '{ ... }': {unknown} 498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}> 498..508 'self.order': O 498..515 'self.o...into()': dyn QueryFragment<DB> + 'static @@ -725,7 +725,7 @@ 138..146 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar> 142..145 'key': &'? K 162..165 'key': &'? K - 224..227 '{ }': impl Future<Output = <K as Foo<R>>::Bar> + 224..227 '{ }': () "#]], ); } @@ -2522,3 +2522,43 @@ "#, ); } + +#[test] +fn issue_9881_super_trait_blanket_impl() { + check_types( + r#" +pub trait TryStream: Stream { + fn try_poll_next(&self) {} +} + +pub trait Stream { + type Item; + fn poll_next(&self) {} +} + +trait StreamAlias: Stream<Item = ()> {} + +impl<S: Stream<Item = ()>> TryStream for S {} + +impl<S: Stream<Item = ()>> StreamAlias for S {} + +struct StreamImpl; + +impl Stream for StreamImpl { + type Item = (); +} + +fn foo() -> impl StreamAlias { + StreamImpl +} + +fn main() { + let alias = foo(); + let _: () = alias.try_poll_next(); + // ^ () + let _: () = alias.poll_next(); + // ^ () +} + "#, + ); +}
diff --git a/crates/hir-ty/src/tests/regression/new_solver.rs b/crates/hir-ty/src/tests/regression/new_solver.rs index 90c81d1..933a9a5 100644 --- a/crates/hir-ty/src/tests/regression/new_solver.rs +++ b/crates/hir-ty/src/tests/regression/new_solver.rs
@@ -180,7 +180,7 @@ "#, expect![[r#" 150..154 'self': &'a Grid - 174..181 '{ }': <&'a Grid as IntoIterator>::IntoIter + 174..181 '{ }': () "#]], ); } @@ -414,7 +414,7 @@ 244..246 '_x': {unknown} 249..257 'to_bytes': fn to_bytes() -> [u8; _] 249..259 'to_bytes()': [u8; _] - 249..268 'to_byt..._vec()': Vec<<[u8; _] as Foo>::Item> + 249..268 'to_byt..._vec()': {unknown} "#]], ); }
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index c2392b3..2e107b2 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs
@@ -645,10 +645,13 @@ fn infer_binary_op() { check_infer( r#" +//- minicore: add, builtin_impls fn f(x: bool) -> i32 { 0i32 } +const CONST_2: isize = 0; + fn test() -> bool { let x = a && b; let y = true || false; @@ -658,8 +661,9 @@ let h = minus_forty <= CONST_2; let c = f(z || y) + 5; let d = b; - let g = minus_forty ^= i; + let g = minus_forty += i; let ten: usize = 10; + let some_num = 0usize; let ten_is_eleven = ten == some_num; ten < 3 @@ -669,53 +673,56 @@ 5..6 'x': bool 21..33 '{ 0i32 }': i32 27..31 '0i32': i32 - 53..369 '{ ... < 3 }': bool - 63..64 'x': bool - 67..68 'a': bool - 67..73 'a && b': bool - 72..73 'b': bool - 83..84 'y': bool - 87..91 'true': bool - 87..100 'true || false': bool - 95..100 'false': bool - 110..111 'z': bool - 114..115 'x': bool - 114..120 'x == y': bool - 119..120 'y': bool - 130..131 't': bool - 134..135 'x': bool - 134..140 'x != y': bool - 139..140 'y': bool - 150..161 'minus_forty': isize - 171..179 '-40isize': isize - 172..179 '40isize': isize - 189..190 'h': bool - 193..204 'minus_forty': isize - 193..215 'minus_...ONST_2': bool - 208..215 'CONST_2': isize - 225..226 'c': i32 - 229..230 'f': fn f(bool) -> i32 - 229..238 'f(z || y)': i32 - 229..242 'f(z || y) + 5': i32 - 231..232 'z': bool - 231..237 'z || y': bool - 236..237 'y': bool - 241..242 '5': i32 - 252..253 'd': {unknown} - 256..257 'b': {unknown} - 267..268 'g': () - 271..282 'minus_forty': isize - 271..287 'minus_...y ^= i': () - 286..287 'i': isize - 297..300 'ten': usize - 310..312 '10': usize - 322..335 'ten_is_eleven': bool - 338..341 'ten': usize - 338..353 'ten == some_num': bool - 345..353 'some_num': usize - 360..363 'ten': usize - 360..367 'ten < 3': bool - 366..367 '3': usize + 58..59 '0': isize + 80..423 '{ ... < 3 }': bool + 90..91 'x': bool + 94..95 'a': bool + 94..100 'a && b': bool + 99..100 'b': bool + 110..111 'y': bool + 114..118 'true': bool + 114..127 'true || false': bool + 122..127 'false': bool + 137..138 'z': bool + 141..142 'x': bool + 141..147 'x == y': bool + 146..147 'y': bool + 157..158 't': bool + 161..162 'x': bool + 161..167 'x != y': bool + 166..167 'y': bool + 177..188 'minus_forty': isize + 198..206 '-40isize': isize + 199..206 '40isize': isize + 216..217 'h': bool + 220..231 'minus_forty': isize + 220..242 'minus_...ONST_2': bool + 235..242 'CONST_2': isize + 252..253 'c': i32 + 256..257 'f': fn f(bool) -> i32 + 256..265 'f(z || y)': i32 + 256..269 'f(z || y) + 5': i32 + 258..259 'z': bool + 258..264 'z || y': bool + 263..264 'y': bool + 268..269 '5': i32 + 279..280 'd': {unknown} + 283..284 'b': {unknown} + 294..295 'g': () + 298..309 'minus_forty': isize + 298..314 'minus_...y += i': () + 313..314 'i': isize + 324..327 'ten': usize + 337..339 '10': usize + 349..357 'some_num': usize + 360..366 '0usize': usize + 376..389 'ten_is_eleven': bool + 392..395 'ten': usize + 392..407 'ten == some_num': bool + 399..407 'some_num': usize + 414..417 'ten': usize + 414..421 'ten < 3': bool + 420..421 '3': usize "#]], ); } @@ -1071,6 +1078,7 @@ fn infer_inherent_method_str() { check_infer( r#" +//- /core.rs crate:core #![rustc_coherence_is_core] #[lang = "str"] impl str { @@ -2691,6 +2699,7 @@ fn box_into_vec() { check_infer( r#" +//- /core.rs crate:core #[lang = "sized"] pub trait Sized {} @@ -3934,3 +3943,16 @@ "#]], ); } + +#[test] +fn infer_array_size() { + check_no_mismatches( + r#" +fn foo(a: [u8; 3]) {} + +fn bar() { + foo([0; _]); +} + "#, + ); +}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index c0e4393..eb4ae5e 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs
@@ -1,4 +1,3 @@ -use cov_mark::check; use expect_test::expect; use crate::tests::infer_with_mismatches; @@ -278,11 +277,11 @@ fn infer_ops_neg() { check_types( r#" -//- /main.rs crate:main deps:std +//- minicore:unary_ops struct Bar; struct Foo; -impl std::ops::Neg for Bar { +impl core::ops::Neg for Bar { type Output = Foo; } @@ -291,15 +290,6 @@ let b = -a; b; } //^ Foo - -//- /std.rs crate:std -#[prelude_import] use ops::*; -mod ops { - #[lang = "neg"] - pub trait Neg { - type Output; - } -} "#, ); } @@ -308,11 +298,11 @@ fn infer_ops_not() { check_types( r#" -//- /main.rs crate:main deps:std +//- minicore:unary_ops struct Bar; struct Foo; -impl std::ops::Not for Bar { +impl core::ops::Not for Bar { type Output = Foo; } @@ -321,15 +311,6 @@ let b = !a; b; } //^ Foo - -//- /std.rs crate:std -#[prelude_import] use ops::*; -mod ops { - #[lang = "not"] - pub trait Not { - type Output; - } -} "#, ); } @@ -368,7 +349,6 @@ #[test] fn trait_default_method_self_bound_implements_trait() { - cov_mark::check!(trait_self_implements_self); check( r#" trait Trait { @@ -1211,7 +1191,7 @@ expect![[r#" 29..33 'self': &'? Self 54..58 'self': &'? Self - 98..100 '{}': impl Trait<u64> + 98..100 '{}': () 110..111 'x': impl Trait<u64> 130..131 'y': &'? impl Trait<u64> 151..268 '{ ...2(); }': () @@ -2982,13 +2962,13 @@ 140..146 'IsCopy': IsCopy 140..153 'IsCopy.test()': bool 159..166 'NotCopy': NotCopy - 159..173 'NotCopy.test()': {unknown} + 159..173 'NotCopy.test()': bool 179..195 '(IsCop...sCopy)': (IsCopy, IsCopy) 179..202 '(IsCop...test()': bool 180..186 'IsCopy': IsCopy 188..194 'IsCopy': IsCopy 208..225 '(IsCop...tCopy)': (IsCopy, NotCopy) - 208..232 '(IsCop...test()': {unknown} + 208..232 '(IsCop...test()': bool 209..215 'IsCopy': IsCopy 217..224 'NotCopy': NotCopy "#]], @@ -3081,7 +3061,7 @@ 79..194 '{ ...ized }': () 85..88 '1u8': u8 85..95 '1u8.test()': bool - 101..116 '(*"foo").test()': {unknown} + 101..116 '(*"foo").test()': bool 102..108 '*"foo"': str 103..108 '"foo"': &'static str 135..145 '(1u8, 1u8)': (u8, u8) @@ -3089,7 +3069,7 @@ 136..139 '1u8': u8 141..144 '1u8': u8 158..171 '(1u8, *"foo")': (u8, str) - 158..178 '(1u8, ...test()': {unknown} + 158..178 '(1u8, ...test()': bool 159..162 '1u8': u8 164..170 '*"foo"': str 165..170 '"foo"': &'static str @@ -3944,7 +3924,6 @@ #[test] fn foreign_trait_with_local_trait_impl() { - check!(block_local_impls); check( r#" mod module { @@ -3955,15 +3934,16 @@ } fn f() { + struct Foo; use module::T; - impl T for usize { + impl T for Foo { const C: usize = 0; fn f(&self) {} } - 0usize.f(); - //^^^^^^^^^^ type: () - usize::C; - //^^^^^^^^type: usize + Foo.f(); + //^^^^^^^ type: () + Foo::C; + //^^^^^^ type: usize } "#, ); @@ -4023,7 +4003,7 @@ 212..295 '{ ...ZED; }': () 218..239 'F::Exp..._SIZED': Yes 245..266 'F::Imp..._SIZED': Yes - 272..292 'F::Rel..._SIZED': {unknown} + 272..292 'F::Rel..._SIZED': Yes "#]], ); } @@ -4274,7 +4254,7 @@ 127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static) 164..195 '{ ...f(); }': () 170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static) - 170..184 'v.get::<i32>()': <dyn Trait<Assoc<i32> = &'a i32> + 'static as Trait>::Assoc<i32> + 170..184 'v.get::<i32>()': <{unknown} as Trait>::Assoc<i32> 170..192 'v.get:...eref()': {unknown} "#]], ); @@ -5051,3 +5031,28 @@ "#]], ); } + +#[test] +fn implicit_sized_bound_on_param() { + check( + r#" +//- minicore: sized +struct PBox<T, A>(T, A); + +impl<T, A> PBox<T, A> { + fn token_with(self) {} +} + +trait MoveMessage { + fn token<A>(self, alloc: A) + where + Self: Sized, + { + let b = PBox::<Self, A>(self, alloc); + b.token_with(); + // ^^^^^^^^^^^^^^ type: () + } +} + "#, + ); +}
diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index 00c8eb7..2055c31 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs
@@ -4,13 +4,18 @@ use std::hash::Hash; use base_db::Crate; -use hir_def::{BlockId, TraitId, lang_item::LangItem}; +use hir_def::{ + AdtId, AssocItemId, BlockId, HasModule, ImplId, Lookup, TraitId, + lang_item::LangItem, + nameres::DefMap, + signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags}, +}; use hir_expand::name::Name; use intern::sym; use rustc_next_trait_solver::solve::{HasChanged, SolverDelegateEvalExt}; use rustc_type_ir::{ TypingMode, - inherent::{IntoKind, Span as _}, + inherent::{AdtDef, BoundExistentialPredicates, IntoKind, Span as _}, solve::Certainty, }; use triomphe::Arc; @@ -263,3 +268,147 @@ let result = crate::traits::next_trait_solve_in_ctxt(&infcx, goal); matches!(result, Ok((_, Certainty::Yes))) } + +pub fn is_inherent_impl_coherent(db: &dyn HirDatabase, def_map: &DefMap, impl_id: ImplId) -> bool { + let self_ty = db.impl_self_ty(impl_id).instantiate_identity(); + let self_ty = self_ty.kind(); + let impl_allowed = match self_ty { + TyKind::Tuple(_) + | TyKind::FnDef(_, _) + | TyKind::Array(_, _) + | TyKind::Never + | TyKind::RawPtr(_, _) + | TyKind::Ref(_, _, _) + | TyKind::Slice(_) + | TyKind::Str + | TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) => def_map.is_rustc_coherence_is_core(), + + TyKind::Adt(adt_def, _) => adt_def.def_id().0.module(db).krate() == def_map.krate(), + TyKind::Dynamic(it, _) => it + .principal_def_id() + .is_some_and(|trait_id| trait_id.0.module(db).krate() == def_map.krate()), + + _ => true, + }; + impl_allowed || { + let rustc_has_incoherent_inherent_impls = match self_ty { + TyKind::Tuple(_) + | TyKind::FnDef(_, _) + | TyKind::Array(_, _) + | TyKind::Never + | TyKind::RawPtr(_, _) + | TyKind::Ref(_, _, _) + | TyKind::Slice(_) + | TyKind::Str + | TyKind::Bool + | TyKind::Char + | TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) => true, + + TyKind::Adt(adt_def, _) => match adt_def.def_id().0 { + hir_def::AdtId::StructId(id) => db + .struct_signature(id) + .flags + .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), + hir_def::AdtId::UnionId(id) => db + .union_signature(id) + .flags + .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), + hir_def::AdtId::EnumId(it) => db + .enum_signature(it) + .flags + .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS), + }, + TyKind::Dynamic(it, _) => it.principal_def_id().is_some_and(|trait_id| { + db.trait_signature(trait_id.0) + .flags + .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) + }), + + _ => false, + }; + let items = impl_id.impl_items(db); + rustc_has_incoherent_inherent_impls + && !items.items.is_empty() + && items.items.iter().all(|&(_, assoc)| match assoc { + AssocItemId::FunctionId(it) => { + db.function_signature(it).flags.contains(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL) + } + AssocItemId::ConstId(it) => { + db.const_signature(it).flags.contains(ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL) + } + AssocItemId::TypeAliasId(it) => db + .type_alias_signature(it) + .flags + .contains(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL), + }) + } +} + +/// Checks whether the impl satisfies the orphan rules. +/// +/// Given `impl<P1..=Pn> Trait<T1..=Tn> for T0`, an `impl`` is valid only if at least one of the following is true: +/// - Trait is a local trait +/// - All of +/// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type. +/// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`) +pub fn check_orphan_rules<'db>(db: &'db dyn HirDatabase, impl_: ImplId) -> bool { + let Some(impl_trait) = db.impl_trait(impl_) else { + // not a trait impl + return true; + }; + + let local_crate = impl_.lookup(db).container.krate(); + let is_local = |tgt_crate| tgt_crate == local_crate; + + let trait_ref = impl_trait.instantiate_identity(); + let trait_id = trait_ref.def_id.0; + if is_local(trait_id.module(db).krate()) { + // trait to be implemented is local + return true; + } + + let unwrap_fundamental = |mut ty: Ty<'db>| { + // Unwrap all layers of fundamental types with a loop. + loop { + match ty.kind() { + TyKind::Ref(_, referenced, _) => ty = referenced, + TyKind::Adt(adt_def, subs) => { + let AdtId::StructId(s) = adt_def.def_id().0 else { + break ty; + }; + let struct_signature = db.struct_signature(s); + if struct_signature.flags.contains(StructFlags::FUNDAMENTAL) { + let next = subs.types().next(); + match next { + Some(it) => ty = it, + None => break ty, + } + } else { + break ty; + } + } + _ => break ty, + } + } + }; + // - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type. + + // FIXME: param coverage + // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`) + let is_not_orphan = trait_ref.args.types().any(|ty| match unwrap_fundamental(ty).kind() { + TyKind::Adt(adt_def, _) => is_local(adt_def.def_id().0.module(db).krate()), + TyKind::Error(_) => true, + TyKind::Dynamic(it, _) => { + it.principal_def_id().is_some_and(|trait_id| is_local(trait_id.0.module(db).krate())) + } + _ => false, + }); + #[allow(clippy::let_and_return)] + is_not_orphan +}
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 147f1b8..cfc4080 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs
@@ -1,7 +1,5 @@ //! Attributes & documentation for hir types. -use std::ops::ControlFlow; - use hir_def::{ AssocItemId, AttrDefId, ModuleDefId, attr::AttrsWithOwner, @@ -14,7 +12,13 @@ mod_path::{ModPath, PathKind}, name::Name, }; -use hir_ty::{db::HirDatabase, method_resolution}; +use hir_ty::{ + db::HirDatabase, + method_resolution::{ + self, CandidateId, MethodError, MethodResolutionContext, MethodResolutionUnstableFeatures, + }, + next_solver::{DbInterner, TypingMode, infer::DbInternerInferExt}, +}; use crate::{ Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl, @@ -242,7 +246,7 @@ name: &Name, ns: Option<Namespace>, ) -> Option<DocLinkDef> { - ty.iterate_assoc_items(db, ty.krate(db), move |assoc_item| { + ty.iterate_assoc_items(db, move |assoc_item| { if assoc_item.name(db)? != *name { return None; } @@ -257,37 +261,39 @@ name: &Name, ns: Option<Namespace>, ) -> Option<DocLinkDef> { - let canonical = ty.canonical(db); let krate = ty.krate(db); let environment = resolver .generic_def() .map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); let traits_in_scope = resolver.traits_in_scope(db); - let mut result = None; - // `ty.iterate_path_candidates()` require a scope, which is not available when resolving // attributes here. Use path resolution directly instead. // // FIXME: resolve type aliases (which are not yielded by iterate_path_candidates) - _ = method_resolution::iterate_path_candidates( - &canonical, - db, - environment, - &traits_in_scope, - method_resolution::VisibleFromModule::None, - Some(name), - &mut |_, assoc_item_id: AssocItemId, _| { - // If two traits in scope define the same item, Rustdoc links to no specific trait (for - // instance, given two methods `a`, Rustdoc simply links to `method.a` with no - // disambiguation) so we just pick the first one we find as well. - result = as_module_def_if_namespace_matches(assoc_item_id.into(), ns); - - if result.is_some() { ControlFlow::Break(()) } else { ControlFlow::Continue(()) } - }, - ); - - result + let interner = DbInterner::new_with(db, Some(environment.krate), environment.block); + let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis); + let unstable_features = + MethodResolutionUnstableFeatures::from_def_map(resolver.top_level_def_map()); + let ctx = MethodResolutionContext { + infcx: &infcx, + resolver: &resolver, + env: &environment, + traits_in_scope: &traits_in_scope, + edition: krate.edition(db), + unstable_features: &unstable_features, + }; + let resolution = ctx.probe_for_name(method_resolution::Mode::Path, name.clone(), ty.ty); + let resolution = match resolution { + Ok(resolution) => resolution.item, + Err(MethodError::PrivateMatch(resolution)) => resolution.item, + _ => return None, + }; + let resolution = match resolution { + CandidateId::FunctionId(id) => AssocItem::Function(id.into()), + CandidateId::ConstId(id) => AssocItem::Const(id.into()), + }; + as_module_def_if_namespace_matches(resolution, ns) } fn resolve_field(
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index d61c2ec..c215438 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs
@@ -11,6 +11,7 @@ type_ref::{TypeBound, TypeRef, TypeRefId}, }; use hir_ty::{ + GenericPredicates, db::HirDatabase, display::{ HirDisplay, HirDisplayError, HirDisplayWithExpressionStore, HirFormatter, SizedByDefault, @@ -484,11 +485,9 @@ let param_data = ¶ms[self.id.local_id()]; let krate = self.id.parent().krate(f.db).id; let ty = self.ty(f.db).ty; - let predicates = f.db.generic_predicates(self.id.parent()); + let predicates = GenericPredicates::query_all(f.db, self.id.parent()); let predicates = predicates - .instantiate_identity() - .into_iter() - .flatten() + .iter_identity_copied() .filter(|wc| match wc.kind().skip_binder() { ClauseKind::Trait(tr) => tr.self_ty() == ty, ClauseKind::Projection(proj) => proj.self_ty() == ty,
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index f2faf99..82c6cf7 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs
@@ -35,6 +35,9 @@ mod display; +#[doc(hidden)] +pub use hir_def::ModuleId; + use std::{ fmt, mem::discriminant, @@ -48,8 +51,8 @@ AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, - LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, SyntheticSyntax, - TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, + LocalFieldId, Lookup, MacroExpander, MacroId, StaticId, StructId, SyntheticSyntax, TupleId, + TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap}, hir::{ BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat, @@ -72,26 +75,28 @@ proc_macro::ProcMacroKind, }; use hir_ty::{ - TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId, all_super_traits, autoderef, - check_orphan_rules, + GenericPredicates, TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId, + all_super_traits, autoderef, check_orphan_rules, consteval::try_const_usize, db::{InternedClosureId, InternedCoroutineId}, diagnostics::BodyValidationDiagnostic, direct_super_traits, known_const_to_ast, layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, - method_resolution, + method_resolution::{ + self, InherentImpls, MethodResolutionContext, MethodResolutionUnstableFeatures, + }, mir::{MutBorrowKind, interpret_mir}, next_solver::{ - AliasTy, Canonical, ClauseKind, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, - GenericArgs, PolyFnSig, Region, SolverDefId, Ty, TyKind, TypingMode, + AliasTy, ClauseKind, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, + PolyFnSig, Region, SolverDefId, Ty, TyKind, TypingMode, infer::{DbInternerInferExt, InferCtxt}, }, - traits::{self, FnTrait, structurally_normalize_ty}, + traits::{self, FnTrait, is_inherent_impl_coherent, structurally_normalize_ty}, }; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_type_ir::{ - AliasTyKind, TypeSuperVisitable, TypeVisitable, TypeVisitor, + AliasTyKind, TypeSuperVisitable, TypeVisitable, TypeVisitor, fast_reject, inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _}, }; use smallvec::SmallVec; @@ -133,7 +138,7 @@ attr::{AttrSourceMap, Attrs, AttrsWithOwner}, find_path::PrefixKind, import_map, - lang_item::LangItem, + lang_item::{LangItem, crate_lang_items}, nameres::{DefMap, ModuleSource, crate_def_map}, per_ns::Namespace, type_ref::{Mutability, TypeRef}, @@ -168,11 +173,12 @@ drop::DropGlue, dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode}, layout::LayoutError, - method_resolution::TyFingerprint, mir::{MirEvalError, MirLowerError}, next_solver::abi::Safety, next_solver::clear_tls_solver_cache, }, + // FIXME: These are needed for import assets, properly encapsulate them. + hir_ty::{method_resolution::TraitImpls, next_solver::SimplifiedType}, intern::{Symbol, sym}, }; @@ -751,8 +757,6 @@ } self.legacy_macros(db).into_iter().for_each(|m| emit_macro_def_diagnostics(db, acc, m)); - let inherent_impls = db.inherent_impls_in_crate(self.id.krate()); - let interner = DbInterner::new_with(db, Some(self.id.krate()), self.id.containing_block()); let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis()); @@ -781,7 +785,9 @@ emit_def_diagnostic(db, acc, diag, edition); } - if inherent_impls.invalid_impls().contains(&impl_def.id) { + if impl_signature.target_trait.is_none() + && !is_inherent_impl_coherent(db, def_map, impl_def.id) + { acc.push(IncoherentImpl { impl_: ast_id_map.get(loc.id.value), file_id }.into()) } @@ -3347,6 +3353,15 @@ TypeAlias(TypeAlias), } +impl From<method_resolution::CandidateId> for AssocItem { + fn from(value: method_resolution::CandidateId) -> Self { + match value { + method_resolution::CandidateId::FunctionId(id) => AssocItem::Function(Function { id }), + method_resolution::CandidateId::ConstId(id) => AssocItem::Const(Const { id }), + } + } +} + #[derive(Debug, Clone)] pub enum AssocItemContainer { Trait(Trait), @@ -3698,7 +3713,7 @@ push_ty_diagnostics( db, acc, - db.generic_predicates_without_parent_with_diagnostics(def).1, + GenericPredicates::query_with_diagnostics(db, def).1.clone(), &source_map, ); for (param_id, param) in generics.iter_type_or_consts() { @@ -4193,10 +4208,13 @@ /// parameter, not additional bounds that might be added e.g. by a method if /// the parameter comes from an impl! pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> { - db.generic_predicates_for_param(self.id.parent(), self.id.into(), None) - .iter() + let self_ty = self.ty(db).ty; + GenericPredicates::query_explicit(db, self.id.parent()) + .iter_identity_copied() .filter_map(|pred| match &pred.kind().skip_binder() { - ClauseKind::Trait(trait_ref) => Some(Trait::from(trait_ref.def_id().0)), + ClauseKind::Trait(trait_ref) if trait_ref.self_ty() == self_ty => { + Some(Trait::from(trait_ref.def_id().0)) + } _ => None, }) .collect() @@ -4358,90 +4376,81 @@ impl Impl { pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<Impl> { - let inherent = db.inherent_impls_in_crate(krate.id); - let trait_ = db.trait_impls_in_crate(krate.id); + let mut result = Vec::new(); + extend_with_def_map(db, crate_def_map(db, krate.id), &mut result); + return result; - inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect() + fn extend_with_def_map(db: &dyn HirDatabase, def_map: &DefMap, result: &mut Vec<Impl>) { + for (_, module) in def_map.modules() { + result.extend(module.scope.impls().map(Impl::from)); + + for unnamed_const in module.scope.unnamed_consts() { + for (_, block_def_map) in db.body(unnamed_const.into()).blocks(db) { + extend_with_def_map(db, block_def_map, result); + } + } + } + } } pub fn all_in_module(db: &dyn HirDatabase, module: Module) -> Vec<Impl> { module.id.def_map(db)[module.id.local_id].scope.impls().map(Into::into).collect() } + /// **Note:** This is an **approximation** that strives to give the *human-perceived notion* of an "impl for type", + /// **not** answer the technical question "what are all impls applying to this type". In particular, it excludes + /// blanket impls, and only does a shallow type constructor check. In fact, this should've probably been on `Adt` + /// etc., and not on `Type`. If you would want to create a precise list of all impls applying to a type, + /// you would need to include blanket impls, and try to prove to predicates for each candidate. pub fn all_for_type<'db>(db: &'db dyn HirDatabase, Type { ty, env }: Type<'db>) -> Vec<Impl> { - let def_crates = match method_resolution::def_crates(db, ty, env.krate) { - Some(def_crates) => def_crates, - None => return Vec::new(), + let mut result = Vec::new(); + let interner = DbInterner::new_with(db, Some(env.krate), env.block); + let Some(simplified_ty) = + fast_reject::simplify_type(interner, ty, fast_reject::TreatParams::AsRigid) + else { + return Vec::new(); }; - - let filter = |impl_def: &Impl| { - let self_ty = impl_def.self_ty(db); - let rref = self_ty.remove_ref(); - ty.equals_ctor(rref.as_ref().map_or(self_ty.ty, |it| it.ty)) - }; - - let fp = TyFingerprint::for_inherent_impl(ty); - let fp = match fp { - Some(fp) => fp, - None => return Vec::new(), - }; - - let mut all = Vec::new(); - def_crates.iter().for_each(|&id| { - all.extend( - db.inherent_impls_in_crate(id) - .for_self_ty(ty) - .iter() - .cloned() - .map(Self::from) - .filter(filter), - ) - }); - - for id in def_crates - .iter() - .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db)) - .map(|Crate { id }| id) - { - all.extend( - db.trait_impls_in_crate(id) - .for_self_ty_without_blanket_impls(fp) - .map(Self::from) - .filter(filter), + let mut extend_with_impls = + |impls: &[ImplId]| result.extend(impls.iter().copied().map(Impl::from)); + extend_with_impls(method_resolution::incoherent_inherent_impls(db, simplified_ty)); + if let Some(module) = method_resolution::simplified_type_module(db, &simplified_ty) { + InherentImpls::for_each_crate_and_block( + db, + module.krate(), + module.containing_block(), + &mut |impls| extend_with_impls(impls.for_self_ty(&simplified_ty)), ); - } - - if let Some(block) = ty.as_adt().and_then(|(def, _)| def.module(db).containing_block()) { - if let Some(inherent_impls) = db.inherent_impls_in_block(block) { - all.extend( - inherent_impls.for_self_ty(ty).iter().cloned().map(Self::from).filter(filter), - ); + std::iter::successors(module.containing_block(), |block| { + block.loc(db).module.containing_block() + }) + .filter_map(|block| TraitImpls::for_block(db, block).as_deref()) + .for_each(|impls| impls.for_self_ty(&simplified_ty, &mut extend_with_impls)); + for &krate in &**db.all_crates() { + TraitImpls::for_crate(db, krate) + .for_self_ty(&simplified_ty, &mut extend_with_impls); } - if let Some(trait_impls) = db.trait_impls_in_block(block) { - all.extend( - trait_impls - .for_self_ty_without_blanket_impls(fp) - .map(Self::from) - .filter(filter), - ); + } else { + for &krate in &**db.all_crates() { + TraitImpls::for_crate(db, krate) + .for_self_ty(&simplified_ty, &mut extend_with_impls); } } - - all + result } pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> { - let module = trait_.module(db); - let krate = module.krate(); + let module = trait_.module(db).id; let mut all = Vec::new(); - for Crate { id } in krate.transitive_reverse_dependencies(db) { - let impls = db.trait_impls_in_crate(id); - all.extend(impls.for_trait(trait_.id).map(Self::from)) + let mut handle_impls = |impls: &TraitImpls| { + impls.for_trait(trait_.id, |impls| all.extend(impls.iter().copied().map(Impl::from))); + }; + for krate in db.transitive_rev_deps(module.krate()) { + handle_impls(TraitImpls::for_crate(db, krate)); } - if let Some(block) = module.id.containing_block() - && let Some(trait_impls) = db.trait_impls_in_block(block) + if let Some(block) = module.containing_block() + && let Some(impls) = TraitImpls::for_block(db, block) { - all.extend(trait_impls.for_trait(trait_.id).map(Self::from)); + handle_impls(impls); } all } @@ -5262,13 +5271,12 @@ } } - pub fn fingerprint_for_trait_impl(&self) -> Option<TyFingerprint> { - TyFingerprint::for_trait_impl(self.ty) - } - - pub(crate) fn canonical(&self, db: &'db dyn HirDatabase) -> Canonical<'db, Ty<'db>> { - let interner = DbInterner::new_with(db, None, None); - hir_ty::replace_errors_with_variables(interner, &self.ty) + pub fn fingerprint_for_trait_impl(&self) -> Option<SimplifiedType> { + fast_reject::simplify_type( + DbInterner::conjure(), + self.ty, + fast_reject::TreatParams::AsRigid, + ) } /// Returns types that this type dereferences to (including this type itself). The returned @@ -5292,11 +5300,10 @@ pub fn iterate_assoc_items<T>( &self, db: &'db dyn HirDatabase, - krate: Crate, mut callback: impl FnMut(AssocItem) -> Option<T>, ) -> Option<T> { let mut slot = None; - self.iterate_assoc_items_dyn(db, krate, &mut |assoc_item_id| { + self.iterate_assoc_items_dyn(db, &mut |assoc_item_id| { slot = callback(assoc_item_id.into()); slot.is_some() }); @@ -5306,24 +5313,36 @@ fn iterate_assoc_items_dyn( &self, db: &'db dyn HirDatabase, - krate: Crate, callback: &mut dyn FnMut(AssocItemId) -> bool, ) { - let ty_ns = self.ty; - let def_crates = match method_resolution::def_crates(db, ty_ns, krate.id) { - Some(it) => it, - None => return, - }; - for krate in def_crates { - let impls = db.inherent_impls_in_crate(krate); - - for impl_def in impls.for_self_ty(ty_ns) { + let mut handle_impls = |impls: &[ImplId]| { + for &impl_def in impls { for &(_, item) in impl_def.impl_items(db).items.iter() { if callback(item) { return; } } } + }; + + let interner = DbInterner::new_with(db, None, None); + let Some(simplified_type) = + fast_reject::simplify_type(interner, self.ty, fast_reject::TreatParams::AsRigid) + else { + return; + }; + + handle_impls(method_resolution::incoherent_inherent_impls(db, simplified_type)); + + if let Some(module) = method_resolution::simplified_type_module(db, &simplified_type) { + InherentImpls::for_each_crate_and_block( + db, + module.krate(), + module.containing_block(), + &mut |impls| { + handle_impls(impls.for_self_ty(&simplified_type)); + }, + ); } } @@ -5414,26 +5433,20 @@ db: &'db dyn HirDatabase, scope: &SemanticsScope<'_>, traits_in_scope: &FxHashSet<TraitId>, - with_local_impls: Option<Module>, name: Option<&Name>, mut callback: impl FnMut(Function) -> Option<T>, ) -> Option<T> { let _p = tracing::info_span!("iterate_method_candidates_with_traits").entered(); let mut slot = None; - self.iterate_method_candidates_split_inherent( - db, - scope, - traits_in_scope, - with_local_impls, - name, - |f| match callback(f) { + self.iterate_method_candidates_split_inherent(db, scope, traits_in_scope, name, |f| { + match callback(f) { it @ Some(_) => { slot = it; ControlFlow::Break(()) } None => ControlFlow::Continue(()), - }, - ); + } + }); slot } @@ -5441,7 +5454,6 @@ &self, db: &'db dyn HirDatabase, scope: &SemanticsScope<'_>, - with_local_impls: Option<Module>, name: Option<&Name>, callback: impl FnMut(Function) -> Option<T>, ) -> Option<T> { @@ -5449,12 +5461,37 @@ db, scope, &scope.visible_traits().0, - with_local_impls, name, callback, ) } + fn with_method_resolution<R>( + &self, + db: &'db dyn HirDatabase, + resolver: &Resolver<'db>, + traits_in_scope: &FxHashSet<TraitId>, + f: impl FnOnce(&MethodResolutionContext<'_, 'db>) -> R, + ) -> R { + let module = resolver.module(); + let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block()); + let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis); + let unstable_features = + MethodResolutionUnstableFeatures::from_def_map(resolver.top_level_def_map()); + let environment = resolver + .generic_def() + .map_or_else(|| TraitEnvironment::empty(module.krate()), |d| db.trait_environment(d)); + let ctx = MethodResolutionContext { + infcx: &infcx, + resolver, + env: &environment, + traits_in_scope, + edition: resolver.krate().data(db).edition, + unstable_features: &unstable_features, + }; + f(&ctx) + } + /// Allows you to treat inherent and non-inherent methods differently. /// /// Note that inherent methods may actually be trait methods! For example, in `dyn Trait`, the trait's methods @@ -5464,67 +5501,77 @@ db: &'db dyn HirDatabase, scope: &SemanticsScope<'_>, traits_in_scope: &FxHashSet<TraitId>, - with_local_impls: Option<Module>, name: Option<&Name>, - callback: impl MethodCandidateCallback, + mut callback: impl MethodCandidateCallback, ) { - struct Callback<T>(T); - - impl<T: MethodCandidateCallback> method_resolution::MethodCandidateCallback for Callback<T> { - fn on_inherent_method( - &mut self, - _adjustments: method_resolution::ReceiverAdjustments, - item: AssocItemId, - _is_visible: bool, - ) -> ControlFlow<()> { - if let AssocItemId::FunctionId(func) = item { - self.0.on_inherent_method(func.into()) - } else { - ControlFlow::Continue(()) - } - } - - fn on_trait_method( - &mut self, - _adjustments: method_resolution::ReceiverAdjustments, - item: AssocItemId, - _is_visible: bool, - ) -> ControlFlow<()> { - if let AssocItemId::FunctionId(func) = item { - self.0.on_trait_method(func.into()) - } else { - ControlFlow::Continue(()) - } - } - } - let _p = tracing::info_span!( - "iterate_method_candidates_dyn", - with_local_impls = traits_in_scope.len(), + "iterate_method_candidates_split_inherent", traits_in_scope = traits_in_scope.len(), ?name, ) .entered(); - let interner = DbInterner::new_with(db, None, None); - // There should be no inference vars in types passed here - let canonical = hir_ty::replace_errors_with_variables(interner, &self.ty); - let krate = scope.krate(); - let environment = scope - .resolver() - .generic_def() - .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); + self.with_method_resolution(db, scope.resolver(), traits_in_scope, |ctx| { + // There should be no inference vars in types passed here + let canonical = hir_ty::replace_errors_with_variables(ctx.infcx.interner, &self.ty); + let (self_ty, _) = ctx.infcx.instantiate_canonical(&canonical); - _ = method_resolution::iterate_method_candidates_dyn( - &canonical, - db, - environment, - traits_in_scope, - with_local_impls.and_then(|b| b.id.containing_block()).into(), - name, - method_resolution::LookupMode::MethodCall, - &mut Callback(callback), - ); + match name { + Some(name) => { + match ctx.probe_for_name( + method_resolution::Mode::MethodCall, + name.clone(), + self_ty, + ) { + Ok(candidate) + | Err(method_resolution::MethodError::PrivateMatch(candidate)) => { + let method_resolution::CandidateId::FunctionId(id) = candidate.item + else { + unreachable!("`Mode::MethodCall` can only return functions"); + }; + let id = Function { id }; + match candidate.kind { + method_resolution::PickKind::InherentImplPick(_) + | method_resolution::PickKind::ObjectPick(..) + | method_resolution::PickKind::WhereClausePick(..) => { + // Candidates from where clauses and trait objects are considered inherent. + _ = callback.on_inherent_method(id); + } + method_resolution::PickKind::TraitPick(..) => { + _ = callback.on_trait_method(id); + } + } + } + Err(_) => {} + }; + } + None => { + _ = ctx.probe_all(method_resolution::Mode::MethodCall, self_ty).try_for_each( + |candidate| { + let method_resolution::CandidateId::FunctionId(id) = + candidate.candidate.item + else { + unreachable!("`Mode::MethodCall` can only return functions"); + }; + let id = Function { id }; + match candidate.candidate.kind { + method_resolution::CandidateKind::InherentImplCandidate { + .. + } + | method_resolution::CandidateKind::ObjectCandidate(..) + | method_resolution::CandidateKind::WhereClauseCandidate(..) => { + // Candidates from where clauses and trait objects are considered inherent. + callback.on_inherent_method(id) + } + method_resolution::CandidateKind::TraitCandidate(..) => { + callback.on_trait_method(id) + } + } + }, + ); + } + } + }) } #[tracing::instrument(skip_all, fields(name = ?name))] @@ -5533,27 +5580,21 @@ db: &'db dyn HirDatabase, scope: &SemanticsScope<'_>, traits_in_scope: &FxHashSet<TraitId>, - with_local_impls: Option<Module>, name: Option<&Name>, mut callback: impl FnMut(AssocItem) -> Option<T>, ) -> Option<T> { let _p = tracing::info_span!("iterate_path_candidates").entered(); let mut slot = None; - self.iterate_path_candidates_split_inherent( - db, - scope, - traits_in_scope, - with_local_impls, - name, - |item| match callback(item) { + self.iterate_path_candidates_split_inherent(db, scope, traits_in_scope, name, |item| { + match callback(item) { it @ Some(_) => { slot = it; ControlFlow::Break(()) } None => ControlFlow::Continue(()), - }, - ); + } + }); slot } @@ -5568,50 +5609,68 @@ db: &'db dyn HirDatabase, scope: &SemanticsScope<'_>, traits_in_scope: &FxHashSet<TraitId>, - with_local_impls: Option<Module>, name: Option<&Name>, - callback: impl PathCandidateCallback, + mut callback: impl PathCandidateCallback, ) { - struct Callback<T>(T); + let _p = tracing::info_span!( + "iterate_path_candidates_split_inherent", + traits_in_scope = traits_in_scope.len(), + ?name, + ) + .entered(); - impl<T: PathCandidateCallback> method_resolution::MethodCandidateCallback for Callback<T> { - fn on_inherent_method( - &mut self, - _adjustments: method_resolution::ReceiverAdjustments, - item: AssocItemId, - _is_visible: bool, - ) -> ControlFlow<()> { - self.0.on_inherent_item(item.into()) + self.with_method_resolution(db, scope.resolver(), traits_in_scope, |ctx| { + // There should be no inference vars in types passed here + let canonical = hir_ty::replace_errors_with_variables(ctx.infcx.interner, &self.ty); + let (self_ty, _) = ctx.infcx.instantiate_canonical(&canonical); + + match name { + Some(name) => { + match ctx.probe_for_name( + method_resolution::Mode::MethodCall, + name.clone(), + self_ty, + ) { + Ok(candidate) + | Err(method_resolution::MethodError::PrivateMatch(candidate)) => { + let id = candidate.item.into(); + match candidate.kind { + method_resolution::PickKind::InherentImplPick(_) + | method_resolution::PickKind::ObjectPick(..) + | method_resolution::PickKind::WhereClausePick(..) => { + // Candidates from where clauses and trait objects are considered inherent. + _ = callback.on_inherent_item(id); + } + method_resolution::PickKind::TraitPick(..) => { + _ = callback.on_trait_item(id); + } + } + } + Err(_) => {} + }; + } + None => { + _ = ctx.probe_all(method_resolution::Mode::Path, self_ty).try_for_each( + |candidate| { + let id = candidate.candidate.item.into(); + match candidate.candidate.kind { + method_resolution::CandidateKind::InherentImplCandidate { + .. + } + | method_resolution::CandidateKind::ObjectCandidate(..) + | method_resolution::CandidateKind::WhereClauseCandidate(..) => { + // Candidates from where clauses and trait objects are considered inherent. + callback.on_inherent_item(id) + } + method_resolution::CandidateKind::TraitCandidate(..) => { + callback.on_trait_item(id) + } + } + }, + ); + } } - - fn on_trait_method( - &mut self, - _adjustments: method_resolution::ReceiverAdjustments, - item: AssocItemId, - _is_visible: bool, - ) -> ControlFlow<()> { - self.0.on_trait_item(item.into()) - } - } - - let interner = DbInterner::new_with(db, None, None); - let canonical = hir_ty::replace_errors_with_variables(interner, &self.ty); - - let krate = scope.krate(); - let environment = scope - .resolver() - .generic_def() - .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d)); - - _ = method_resolution::iterate_path_candidates( - &canonical, - db, - environment, - traits_in_scope, - with_local_impls.and_then(|b| b.id.containing_block()).into(), - name, - &mut Callback(callback), - ); + }) } pub fn as_adt(&self) -> Option<Adt> {
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index ec43442..769cfd9 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs
@@ -1581,9 +1581,9 @@ hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => { Adjust::Borrow(AutoBorrow::RawPtr(mutability(m))) } - hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(_, m)) => { + hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => { // FIXME: Handle lifetimes here - Adjust::Borrow(AutoBorrow::Ref(mutability(m))) + Adjust::Borrow(AutoBorrow::Ref(mutability(m.into()))) } hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc), };
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index f994ed2..ae328a9 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs
@@ -35,7 +35,7 @@ unsafe_operations, }, lang_items::lang_items_for_bin_op, - method_resolution, + method_resolution::{self, CandidateId}, next_solver::{ DbInterner, ErrorGuaranteed, GenericArgs, Ty, TyKind, TypingMode, infer::DbInternerInferExt, }, @@ -651,8 +651,9 @@ let lhs = self.ty_of_expr(binop_expr.lhs()?)?; let rhs = self.ty_of_expr(binop_expr.rhs()?)?; - let (_op_trait, op_fn) = lang_items_for_bin_op(op) - .and_then(|(name, lang_item)| self.lang_trait_fn(db, lang_item, &name))?; + let (_op_trait, op_fn) = lang_items_for_bin_op(op).and_then(|(name, lang_item)| { + self.lang_trait_fn(db, lang_item, &Name::new_symbol_root(name)) + })?; // HACK: subst for `index()` coincides with that for `Index` because `index()` itself // doesn't have any generic parameters, so we skip building another subst for `index()`. let interner = DbInterner::new_with(db, None, None); @@ -861,7 +862,7 @@ let expr_id = self.expr_id(path_expr.into())?; if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_id) { let (assoc, subst) = match assoc { - AssocItemId::FunctionId(f_in_trait) => { + CandidateId::FunctionId(f_in_trait) => { match infer.type_of_expr_or_pat(expr_id) { None => { let subst = GenericSubstitution::new( @@ -869,7 +870,7 @@ subs, self.trait_environment(db), ); - (assoc, subst) + (AssocItemId::from(f_in_trait), subst) } Some(func_ty) => { if let TyKind::FnDef(_fn_def, subs) = func_ty.kind() { @@ -889,12 +890,12 @@ subs, self.trait_environment(db), ); - (assoc, subst) + (f_in_trait.into(), subst) } } } } - AssocItemId::ConstId(const_id) => { + CandidateId::ConstId(const_id) => { let (konst, subst) = self.resolve_impl_const_or_trait_def_with_subst(db, const_id, subs); let subst = GenericSubstitution::new( @@ -904,14 +905,6 @@ ); (konst.into(), subst) } - AssocItemId::TypeAliasId(type_alias) => ( - assoc, - GenericSubstitution::new( - type_alias.into(), - subs, - self.trait_environment(db), - ), - ), }; return Some((PathResolution::Def(AssocItem::from(assoc).into()), Some(subst))); @@ -927,7 +920,7 @@ if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_or_pat_id) { let (assoc, subst) = match assoc { - AssocItemId::ConstId(const_id) => { + CandidateId::ConstId(const_id) => { let (konst, subst) = self.resolve_impl_const_or_trait_def_with_subst(db, const_id, subs); let subst = GenericSubstitution::new( @@ -935,12 +928,12 @@ subst, self.trait_environment(db), ); - (konst.into(), subst) + (AssocItemId::from(konst), subst) } - assoc => ( - assoc, + CandidateId::FunctionId(function_id) => ( + function_id.into(), GenericSubstitution::new( - assoc.into(), + function_id.into(), subs, self.trait_environment(db), ),
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index d8c624e..bd4cff5 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs
@@ -18,7 +18,7 @@ }; use intern::Symbol; use rustc_hash::FxHashMap; -use syntax::{AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName}; +use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName}; use crate::{HasCrate, Module, ModuleDef, Semantics}; @@ -29,7 +29,7 @@ pub name: Symbol, pub def: ModuleDef, pub loc: DeclarationLocation, - pub container_name: Option<SmolStr>, + pub container_name: Option<Symbol>, /// Whether this symbol is a doc alias for the original symbol. pub is_alias: bool, pub is_assoc: bool, @@ -65,23 +65,29 @@ db: &'a dyn HirDatabase, symbols: FxIndexSet<FileSymbol>, work: Vec<SymbolCollectorWork>, - current_container_name: Option<SmolStr>, + current_container_name: Option<Symbol>, + collect_pub_only: bool, } /// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect /// all symbols that should be indexed for the given module. impl<'a> SymbolCollector<'a> { - pub fn new(db: &'a dyn HirDatabase) -> Self { + pub fn new(db: &'a dyn HirDatabase, collect_pub_only: bool) -> Self { SymbolCollector { db, symbols: Default::default(), work: Default::default(), current_container_name: None, + collect_pub_only, } } - pub fn new_module(db: &dyn HirDatabase, module: Module) -> Box<[FileSymbol]> { - let mut symbol_collector = SymbolCollector::new(db); + pub fn new_module( + db: &dyn HirDatabase, + module: Module, + collect_pub_only: bool, + ) -> Box<[FileSymbol]> { + let mut symbol_collector = SymbolCollector::new(db, collect_pub_only); symbol_collector.collect(module); symbol_collector.finish() } @@ -108,12 +114,16 @@ tracing::info!(?work, "SymbolCollector::do_work"); self.db.unwind_if_revision_cancelled(); - let parent_name = work.parent.map(|name| name.as_str().to_smolstr()); + let parent_name = work.parent.map(|name| Symbol::intern(name.as_str())); self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id)); } fn collect_from_module(&mut self, module_id: ModuleId) { - let push_decl = |this: &mut Self, def, name| { + let collect_pub_only = self.collect_pub_only; + let push_decl = |this: &mut Self, def: ModuleDefId, name, vis| { + if collect_pub_only && vis != Visibility::Public { + return; + } match def { ModuleDefId::ModuleId(id) => this.push_module(id, name), ModuleDefId::FunctionId(id) => { @@ -125,7 +135,7 @@ } ModuleDefId::AdtId(AdtId::EnumId(id)) => { this.push_decl(id, name, false, None); - let enum_name = this.db.enum_signature(id).name.as_str().to_smolstr(); + let enum_name = Symbol::intern(this.db.enum_signature(id).name.as_str()); this.with_container_name(Some(enum_name), |this| { let variants = id.enum_variants(this.db); for (variant_id, variant_name, _) in &variants.variants { @@ -175,6 +185,9 @@ }; let mut push_import = |this: &mut Self, i: ImportId, name: &Name, def: ModuleDefId, vis| { + if collect_pub_only && vis != Visibility::Public { + return; + } let source = import_child_source_cache .entry(i.use_) .or_insert_with(|| i.use_.child_source(this.db)); @@ -209,6 +222,9 @@ let push_extern_crate = |this: &mut Self, i: ExternCrateId, name: &Name, def: ModuleDefId, vis| { + if collect_pub_only && vis != Visibility::Public { + return; + } let loc = i.lookup(this.db); let source = loc.source(this.db); let rename = source.value.rename().and_then(|rename| rename.name()); @@ -258,7 +274,7 @@ continue; } // self is a declaration - push_decl(self, def, name) + push_decl(self, def, name, vis) } for (name, Item { def, vis, import }) in scope.macros() { @@ -271,7 +287,7 @@ continue; } // self is a declaration - push_decl(self, def.into(), name) + push_decl(self, ModuleDefId::MacroId(def), name, vis) } for (name, Item { def, vis, import }) in scope.values() { @@ -283,7 +299,7 @@ continue; } // self is a declaration - push_decl(self, def, name) + push_decl(self, def, name, vis) } for const_id in scope.unnamed_consts() { @@ -304,6 +320,9 @@ } fn collect_from_body(&mut self, body_id: impl Into<DefWithBodyId>, name: Option<Name>) { + if self.collect_pub_only { + return; + } let body_id = body_id.into(); let body = self.db.body(body_id); @@ -328,8 +347,13 @@ ) .to_smolstr(), ); - self.with_container_name(impl_name, |s| { + self.with_container_name(impl_name.as_deref().map(Symbol::intern), |s| { for &(ref name, assoc_item_id) in &impl_id.impl_items(self.db).items { + if s.collect_pub_only && s.db.assoc_visibility(assoc_item_id) != Visibility::Public + { + continue; + } + s.push_assoc_item(assoc_item_id, name, None) } }) @@ -337,14 +361,14 @@ fn collect_from_trait(&mut self, trait_id: TraitId, trait_do_not_complete: Complete) { let trait_data = self.db.trait_signature(trait_id); - self.with_container_name(Some(trait_data.name.as_str().into()), |s| { + self.with_container_name(Some(Symbol::intern(trait_data.name.as_str())), |s| { for &(ref name, assoc_item_id) in &trait_id.trait_items(self.db).items { s.push_assoc_item(assoc_item_id, name, Some(trait_do_not_complete)); } }); } - fn with_container_name(&mut self, container_name: Option<SmolStr>, f: impl FnOnce(&mut Self)) { + fn with_container_name(&mut self, container_name: Option<Symbol>, f: impl FnOnce(&mut Self)) { if let Some(container_name) = container_name { let prev = self.current_container_name.replace(container_name); f(self);
diff --git a/crates/ide-assists/src/handlers/add_label_to_loop.rs b/crates/ide-assists/src/handlers/add_label_to_loop.rs index d2b9034..b84ad24 100644 --- a/crates/ide-assists/src/handlers/add_label_to_loop.rs +++ b/crates/ide-assists/src/handlers/add_label_to_loop.rs
@@ -1,7 +1,14 @@ -use ide_db::syntax_helpers::node_ext::for_each_break_and_continue_expr; +use ide_db::{ + source_change::SourceChangeBuilder, syntax_helpers::node_ext::for_each_break_and_continue_expr, +}; use syntax::{ - T, - ast::{self, AstNode, HasLoopBody}, + SyntaxToken, T, + ast::{ + self, AstNode, HasLoopBody, + make::{self, tokens}, + syntax_factory::SyntaxFactory, + }, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{AssistContext, AssistId, Assists}; @@ -21,9 +28,9 @@ // -> // ``` // fn main() { -// 'l: loop { -// break 'l; -// continue 'l; +// ${1:'l}: loop { +// break ${2:'l}; +// continue ${0:'l}; // } // } // ``` @@ -39,30 +46,56 @@ "Add Label", loop_expr.syntax().text_range(), |builder| { - builder.insert(loop_kw.text_range().start(), "'l: "); + let make = SyntaxFactory::with_mappings(); + let mut editor = builder.make_editor(loop_expr.syntax()); + + let label = make.lifetime("'l"); + let elements = vec![ + label.syntax().clone().into(), + make::token(T![:]).into(), + tokens::single_space().into(), + ]; + editor.insert_all(Position::before(&loop_kw), elements); + + if let Some(cap) = ctx.config.snippet_cap { + editor.add_annotation(label.syntax(), builder.make_placeholder_snippet(cap)); + } let loop_body = loop_expr.loop_body().and_then(|it| it.stmt_list()); - for_each_break_and_continue_expr( - loop_expr.label(), - loop_body, - &mut |expr| match expr { - ast::Expr::BreakExpr(break_expr) => { - if let Some(break_token) = break_expr.break_token() { - builder.insert(break_token.text_range().end(), " 'l") - } - } - ast::Expr::ContinueExpr(continue_expr) => { - if let Some(continue_token) = continue_expr.continue_token() { - builder.insert(continue_token.text_range().end(), " 'l") - } - } - _ => {} - }, - ); + for_each_break_and_continue_expr(loop_expr.label(), loop_body, &mut |expr| { + let token = match expr { + ast::Expr::BreakExpr(break_expr) => break_expr.break_token(), + ast::Expr::ContinueExpr(continue_expr) => continue_expr.continue_token(), + _ => return, + }; + if let Some(token) = token { + insert_label_after_token(&mut editor, &make, &token, ctx, builder); + } + }); + + editor.add_mappings(make.finish_with_mappings()); + builder.add_file_edits(ctx.vfs_file_id(), editor); + builder.rename(); }, ) } +fn insert_label_after_token( + editor: &mut SyntaxEditor, + make: &SyntaxFactory, + token: &SyntaxToken, + ctx: &AssistContext<'_>, + builder: &mut SourceChangeBuilder, +) { + let label = make.lifetime("'l"); + let elements = vec![tokens::single_space().into(), label.syntax().clone().into()]; + editor.insert_all(Position::after(token), elements); + + if let Some(cap) = ctx.config.snippet_cap { + editor.add_annotation(label.syntax(), builder.make_placeholder_snippet(cap)); + } +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; @@ -82,9 +115,9 @@ }"#, r#" fn main() { - 'l: loop { - break 'l; - continue 'l; + ${1:'l}: loop { + break ${2:'l}; + continue ${0:'l}; } }"#, ); @@ -107,9 +140,9 @@ }"#, r#" fn main() { - 'l: loop { - break 'l; - continue 'l; + ${1:'l}: loop { + break ${2:'l}; + continue ${0:'l}; loop { break; continue; @@ -139,9 +172,9 @@ loop { break; continue; - 'l: loop { - break 'l; - continue 'l; + ${1:'l}: loop { + break ${2:'l}; + continue ${0:'l}; } } }"#,
diff --git a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs index 2d6a59a..156286d 100644 --- a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs +++ b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
@@ -129,7 +129,7 @@ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?; let has_wanted_method = ty - .iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| { + .iterate_method_candidates(sema.db, &scope, Some(&wanted_method), |func| { if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) { return Some(()); }
diff --git a/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs index 3917ca1..c8a244b 100644 --- a/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs +++ b/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
@@ -165,7 +165,7 @@ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?; let has_wanted_method = ty - .iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| { + .iterate_method_candidates(sema.db, &scope, Some(&wanted_method), |func| { if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) { return Some(()); }
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 9a9adf2..44d020a 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -2060,7 +2060,7 @@ .filter_map(|reference| path_element_of_reference(syntax, reference)) .map(|expr| tm.make_mut(&expr)); - usages_for_param.push((param, usages.collect())); + usages_for_param.push((param, usages.unique().collect())); } let res = tm.make_syntax_mut(syntax); @@ -4250,7 +4250,7 @@ check_assist( extract_function, r#" -//- minicore: option +//- minicore: option, add, builtin_impls fn bar() -> Option<i32> { None } fn foo() -> Option<()> { let n = bar()?; @@ -4314,7 +4314,7 @@ check_assist( extract_function, r#" -//- minicore: result +//- minicore: result, add, builtin_impls fn foo() -> Result<(), i64> { let n = 1; $0let k = foo()?; @@ -4345,7 +4345,7 @@ check_assist( extract_function, r#" -//- minicore: option +//- minicore: option, add, builtin_impls fn foo() -> Option<()> { let n = 1; $0let k = foo()?; @@ -4382,7 +4382,7 @@ check_assist( extract_function, r#" -//- minicore: result +//- minicore: result, add, builtin_impls fn foo() -> Result<(), i64> { let n = 1; $0let k = foo()?; @@ -4441,7 +4441,7 @@ check_assist( extract_function, r#" -//- minicore: result +//- minicore: result, add, builtin_impls fn foo() -> Result<(), i64> { let n = 1; $0let k = foo()?; @@ -6233,4 +6233,64 @@ cov_mark::check!(extract_function_in_braces_is_not_applicable); check_assist_not_applicable(extract_function, r"fn foo(arr: &mut $0[$0i32]) {}"); } + + #[test] + fn issue_20965_panic() { + check_assist( + extract_function, + r#" +//- minicore: fmt +#[derive(Debug)] +struct Foo(&'static str); + +impl Foo { + fn text(&self) -> &str { self.0 } +} + +fn main() { + let s = Foo(""); + $0print!("{}{}", s, s);$0 + let _ = s.text() == ""; +}"#, + r#" +#[derive(Debug)] +struct Foo(&'static str); + +impl Foo { + fn text(&self) -> &str { self.0 } +} + +fn main() { + let s = Foo(""); + fun_name(&s); + let _ = s.text() == ""; +} + +fn $0fun_name(s: &Foo) { + *print!("{}{}", s, s); +}"#, + ); + } + + #[test] + fn parameter_is_added_used_in_eq_expression_in_macro() { + check_assist( + extract_function, + r#" +//- minicore: fmt +fn foo() { + let v = 123; + $0print!("{v:?}{}", v == 123);$0 +}"#, + r#" +fn foo() { + let v = 123; + fun_name(v); +} + +fn $0fun_name(v: i32) { + print!("{v:?}{}", v == 123); +}"#, + ); + } }
diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs index da59626..7c60184 100644 --- a/crates/ide-assists/src/handlers/extract_variable.rs +++ b/crates/ide-assists/src/handlers/extract_variable.rs
@@ -2189,7 +2189,7 @@ //- minicore: index struct X; -impl std::ops::Index<usize> for X { +impl core::ops::Index<usize> for X { type Output = i32; fn index(&self) -> &Self::Output { 0 } } @@ -2204,7 +2204,7 @@ r#" struct X; -impl std::ops::Index<usize> for X { +impl core::ops::Index<usize> for X { type Output = i32; fn index(&self) -> &Self::Output { 0 } } @@ -2214,8 +2214,8 @@ } fn foo(s: &S) { - let $0sub = &s.sub; - sub[0]; + let $0x = &s.sub; + x[0]; }"#, "Extract into variable", );
diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index d8a2e03..b2e791a 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,4 +1,4 @@ -use hir::{HasCrate, HasVisibility}; +use hir::HasVisibility; use ide_db::{FxHashSet, path_transform::PathTransform}; use syntax::{ ast::{ @@ -79,8 +79,7 @@ let mut seen_names = FxHashSet::default(); for ty in sema_field_ty.autoderef(ctx.db()) { - let krate = ty.krate(ctx.db()); - ty.iterate_assoc_items(ctx.db(), krate, |item| { + ty.iterate_assoc_items(ctx.db(), |item| { if let hir::AssocItem::Function(f) = item { let name = f.name(ctx.db()); if f.self_param(ctx.db()).is_some()
diff --git a/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs index d88b0f3..6a86823 100644 --- a/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs +++ b/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
@@ -2,7 +2,10 @@ use ide_db::{RootDatabase, famous_defs::FamousDefs}; use syntax::ast::{self, AstNode, HasName}; -use crate::{AssistContext, AssistId, Assists, utils::generate_trait_impl_text_intransitive}; +use crate::{ + AssistContext, AssistId, Assists, + utils::{generate_trait_impl_text_intransitive, is_selected}, +}; // Assist: generate_from_impl_for_enum // @@ -26,8 +29,68 @@ ctx: &AssistContext<'_>, ) -> Option<()> { let variant = ctx.find_node_at_offset::<ast::Variant>()?; - let variant_name = variant.name()?; - let enum_ = ast::Adt::Enum(variant.parent_enum()); + let adt = ast::Adt::Enum(variant.parent_enum()); + let variants = selected_variants(ctx, &variant)?; + + let target = variant.syntax().text_range(); + acc.add( + AssistId::generate("generate_from_impl_for_enum"), + "Generate `From` impl for this enum variant(s)", + target, + |edit| { + let start_offset = variant.parent_enum().syntax().text_range().end(); + let from_impl = variants + .into_iter() + .map(|variant_info| { + let from_trait = format!("From<{}>", variant_info.ty); + let impl_code = generate_impl_code(variant_info); + generate_trait_impl_text_intransitive(&adt, &from_trait, &impl_code) + }) + .collect::<String>(); + edit.insert(start_offset, from_impl); + }, + ) +} + +fn generate_impl_code(VariantInfo { name, field_name, ty }: VariantInfo) -> String { + if let Some(field) = field_name { + format!( + r#" fn from({field}: {ty}) -> Self {{ + Self::{name} {{ {field} }} + }}"# + ) + } else { + format!( + r#" fn from(v: {ty}) -> Self {{ + Self::{name}(v) + }}"# + ) + } +} + +struct VariantInfo { + name: ast::Name, + field_name: Option<ast::Name>, + ty: ast::Type, +} + +fn selected_variants(ctx: &AssistContext<'_>, variant: &ast::Variant) -> Option<Vec<VariantInfo>> { + variant + .parent_enum() + .variant_list()? + .variants() + .filter(|it| is_selected(it, ctx.selection_trimmed(), true)) + .map(|variant| { + let (name, ty) = extract_variant_info(&ctx.sema, &variant)?; + Some(VariantInfo { name: variant.name()?, field_name: name, ty }) + }) + .collect() +} + +fn extract_variant_info( + sema: &'_ hir::Semantics<'_, RootDatabase>, + variant: &ast::Variant, +) -> Option<(Option<ast::Name>, ast::Type)> { let (field_name, field_type) = match variant.kind() { ast::StructKind::Tuple(field_list) => { if field_list.fields().count() != 1 { @@ -45,36 +108,11 @@ ast::StructKind::Unit => return None, }; - if existing_from_impl(&ctx.sema, &variant).is_some() { + if existing_from_impl(sema, variant).is_some() { cov_mark::hit!(test_add_from_impl_already_exists); return None; } - - let target = variant.syntax().text_range(); - acc.add( - AssistId::generate("generate_from_impl_for_enum"), - "Generate `From` impl for this enum variant", - target, - |edit| { - let start_offset = variant.parent_enum().syntax().text_range().end(); - let from_trait = format!("From<{field_type}>"); - let impl_code = if let Some(name) = field_name { - format!( - r#" fn from({name}: {field_type}) -> Self {{ - Self::{variant_name} {{ {name} }} - }}"# - ) - } else { - format!( - r#" fn from(v: {field_type}) -> Self {{ - Self::{variant_name}(v) - }}"# - ) - }; - let from_impl = generate_trait_impl_text_intransitive(&enum_, &from_trait, &impl_code); - edit.insert(start_offset, from_impl); - }, - ) + Some((field_name, field_type)) } fn existing_from_impl( @@ -123,6 +161,32 @@ ); } + #[test] + fn test_generate_from_impl_for_multiple_enum_variants() { + check_assist( + generate_from_impl_for_enum, + r#" +//- minicore: from +enum A { $0Foo(u32), Bar$0(i32) } +"#, + r#" +enum A { Foo(u32), Bar(i32) } + +impl From<u32> for A { + fn from(v: u32) -> Self { + Self::Foo(v) + } +} + +impl From<i32> for A { + fn from(v: i32) -> Self { + Self::Bar(v) + } +} +"#, + ); + } + // FIXME(next-solver): it would be nice to not be *required* to resolve the // path in order to properly generate assists #[test]
diff --git a/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs b/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs index af9c493..f10b21b 100644 --- a/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs +++ b/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
@@ -95,7 +95,7 @@ let scope = ctx.sema.scope(impl_.syntax())?; let ty = impl_def.self_ty(db); - ty.iterate_method_candidates(db, &scope, None, Some(fn_name), Some) + ty.iterate_method_candidates(db, &scope, Some(fn_name), Some) } #[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/move_const_to_impl.rs b/crates/ide-assists/src/handlers/move_const_to_impl.rs index a645c8b..102d7e6 100644 --- a/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/crates/ide-assists/src/handlers/move_const_to_impl.rs
@@ -1,4 +1,4 @@ -use hir::{AsAssocItem, AssocItemContainer, FileRange, HasCrate, HasSource}; +use hir::{AsAssocItem, AssocItemContainer, FileRange, HasSource}; use ide_db::{assists::AssistId, defs::Definition, search::SearchScope}; use syntax::{ SyntaxKind, @@ -70,7 +70,7 @@ let ty = impl_.self_ty(db); // If there exists another associated item with the same name, skip the assist. if ty - .iterate_assoc_items(db, ty.krate(db), |assoc| { + .iterate_assoc_items(db, |assoc| { // Type aliases wouldn't conflict due to different namespaces, but we're only checking // the items in inherent impls, so we assume `assoc` is never type alias for the sake // of brevity (inherent associated types exist in nightly Rust, but it's *very*
diff --git a/crates/ide-assists/src/handlers/replace_arith_op.rs b/crates/ide-assists/src/handlers/replace_arith_op.rs index a3fb851..b686dc0 100644 --- a/crates/ide-assists/src/handlers/replace_arith_op.rs +++ b/crates/ide-assists/src/handlers/replace_arith_op.rs
@@ -240,12 +240,12 @@ replace_arith_with_wrapping, r#" fn main() { - let x = 1*x $0+ 2; + let x = 1*3 $0+ 2; } "#, r#" fn main() { - let x = (1*x).wrapping_add(2); + let x = (1*3).wrapping_add(2); } "#, )
diff --git a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs index 3b815a4..b7e5344 100644 --- a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs +++ b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -1,15 +1,10 @@ use std::iter::successors; -use either::Either; -use ide_db::{ - RootDatabase, - defs::NameClass, - syntax_helpers::node_ext::{is_pattern_cond, single_let}, - ty_filter::TryEnum, -}; +use ide_db::{RootDatabase, defs::NameClass, ty_filter::TryEnum}; use syntax::{ - AstNode, Edition, T, TextRange, + AstNode, Edition, SyntaxKind, T, TextRange, ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory}, + syntax_editor::SyntaxEditor, }; use crate::{ @@ -54,42 +49,46 @@ return None; } let mut else_block = None; + let indent = if_expr.indent_level(); let if_exprs = successors(Some(if_expr.clone()), |expr| match expr.else_branch()? { ast::ElseBranch::IfExpr(expr) => Some(expr), ast::ElseBranch::Block(block) => { + let block = unwrap_trivial_block(block).clone_for_update(); + block.reindent_to(IndentLevel(1)); else_block = Some(block); None } }); let scrutinee_to_be_expr = if_expr.condition()?; - let scrutinee_to_be_expr = match single_let(scrutinee_to_be_expr.clone()) { - Some(cond) => cond.expr()?, - None => scrutinee_to_be_expr, + let scrutinee_to_be_expr = match let_and_guard(&scrutinee_to_be_expr) { + (Some(let_expr), _) => let_expr.expr()?, + (None, cond) => cond?, }; let mut pat_seen = false; let mut cond_bodies = Vec::new(); for if_expr in if_exprs { let cond = if_expr.condition()?; - let cond = match single_let(cond.clone()) { - Some(let_) => { + let (cond, guard) = match let_and_guard(&cond) { + (None, guard) => (None, Some(guard?)), + (Some(let_), guard) => { let pat = let_.pat()?; let expr = let_.expr()?; - // FIXME: If one `let` is wrapped in parentheses and the second is not, - // we'll exit here. if scrutinee_to_be_expr.syntax().text() != expr.syntax().text() { // Only if all condition expressions are equal we can merge them into a match return None; } pat_seen = true; - Either::Left(pat) + (Some(pat), guard) } - // Multiple `let`, unsupported. - None if is_pattern_cond(cond.clone()) => return None, - None => Either::Right(cond), }; - let body = if_expr.then_branch()?; - cond_bodies.push((cond, body)); + if let Some(guard) = &guard { + guard.dedent(indent); + guard.indent(IndentLevel(1)); + } + let body = if_expr.then_branch()?.clone_for_update(); + body.indent(IndentLevel(1)); + cond_bodies.push((cond, guard, body)); } if !pat_seen && cond_bodies.len() != 1 { @@ -106,27 +105,25 @@ available_range, move |builder| { let make = SyntaxFactory::with_mappings(); - let match_expr = { + let match_expr: ast::Expr = { let else_arm = make_else_arm(ctx, &make, else_block, &cond_bodies); - let make_match_arm = |(pat, body): (_, ast::BlockExpr)| { - let body = make.block_expr(body.statements(), body.tail_expr()); - body.indent(IndentLevel::from(1)); - let body = unwrap_trivial_block(body); - match pat { - Either::Left(pat) => make.match_arm(pat, None, body), - Either::Right(_) if !pat_seen => { - make.match_arm(make.literal_pat("true").into(), None, body) + let make_match_arm = + |(pat, guard, body): (_, Option<ast::Expr>, ast::BlockExpr)| { + body.reindent_to(IndentLevel::single()); + let body = unwrap_trivial_block(body); + match (pat, guard.map(|it| make.match_guard(it))) { + (Some(pat), guard) => make.match_arm(pat, guard, body), + (None, _) if !pat_seen => { + make.match_arm(make.literal_pat("true").into(), None, body) + } + (None, guard) => { + make.match_arm(make.wildcard_pat().into(), guard, body) + } } - Either::Right(expr) => make.match_arm( - make.wildcard_pat().into(), - Some(make.match_guard(expr)), - body, - ), - } - }; + }; let arms = cond_bodies.into_iter().map(make_match_arm).chain([else_arm]); let match_expr = make.expr_match(scrutinee_to_be_expr, make.match_arm_list(arms)); - match_expr.indent(IndentLevel::from_node(if_expr.syntax())); + match_expr.indent(indent); match_expr.into() }; @@ -134,7 +131,11 @@ if_expr.syntax().parent().is_some_and(|it| ast::IfExpr::can_cast(it.kind())); let expr = if has_preceding_if_expr { // make sure we replace the `else if let ...` with a block so we don't end up with `else expr` - make.block_expr([], Some(match_expr)).into() + match_expr.dedent(indent); + match_expr.indent(IndentLevel(1)); + let block_expr = make.block_expr([], Some(match_expr)); + block_expr.indent(indent); + block_expr.into() } else { match_expr }; @@ -150,13 +151,13 @@ fn make_else_arm( ctx: &AssistContext<'_>, make: &SyntaxFactory, - else_block: Option<ast::BlockExpr>, - conditionals: &[(Either<ast::Pat, ast::Expr>, ast::BlockExpr)], + else_expr: Option<ast::Expr>, + conditionals: &[(Option<ast::Pat>, Option<ast::Expr>, ast::BlockExpr)], ) -> ast::MatchArm { - let (pattern, expr) = if let Some(else_block) = else_block { + let (pattern, expr) = if let Some(else_expr) = else_expr { let pattern = match conditionals { - [(Either::Right(_), _)] => make.literal_pat("false").into(), - [(Either::Left(pat), _)] => match ctx + [(None, Some(_), _)] => make.literal_pat("false").into(), + [(Some(pat), _, _)] => match ctx .sema .type_of_pat(pat) .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted())) @@ -174,10 +175,10 @@ }, _ => make.wildcard_pat().into(), }; - (pattern, unwrap_trivial_block(else_block)) + (pattern, else_expr) } else { let pattern = match conditionals { - [(Either::Right(_), _)] => make.literal_pat("false").into(), + [(None, Some(_), _)] => make.literal_pat("false").into(), _ => make.wildcard_pat().into(), }; (pattern, make.expr_unit()) @@ -266,7 +267,10 @@ // wrap them in another BlockExpr. match expr { ast::Expr::BlockExpr(block) if block.modifier().is_none() => block, - expr => make.block_expr([], Some(expr)), + expr => { + expr.indent(IndentLevel(1)); + make.block_expr([], Some(expr)) + } } }; @@ -289,7 +293,9 @@ condition }; let then_expr = then_expr.clone_for_update(); + let else_expr = else_expr.clone_for_update(); then_expr.reindent_to(IndentLevel::single()); + else_expr.reindent_to(IndentLevel::single()); let then_block = make_block_expr(then_expr); let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) }; let if_let_expr = make.expr_if( @@ -382,6 +388,48 @@ .is_some_and(|it| does_pat_match_variant(pat, &it.sad_pattern())) } +fn let_and_guard(cond: &ast::Expr) -> (Option<ast::LetExpr>, Option<ast::Expr>) { + if let ast::Expr::ParenExpr(expr) = cond + && let Some(sub_expr) = expr.expr() + { + let_and_guard(&sub_expr) + } else if let ast::Expr::LetExpr(let_expr) = cond { + (Some(let_expr.clone()), None) + } else if let ast::Expr::BinExpr(bin_expr) = cond + && let Some(ast::Expr::LetExpr(let_expr)) = and_bin_expr_left(bin_expr).lhs() + { + let new_expr = bin_expr.clone_subtree(); + let mut edit = SyntaxEditor::new(new_expr.syntax().clone()); + + let left_bin = and_bin_expr_left(&new_expr); + if let Some(rhs) = left_bin.rhs() { + edit.replace(left_bin.syntax(), rhs.syntax()); + } else { + if let Some(next) = left_bin.syntax().next_sibling_or_token() + && next.kind() == SyntaxKind::WHITESPACE + { + edit.delete(next); + } + edit.delete(left_bin.syntax()); + } + + let new_expr = edit.finish().new_root().clone(); + (Some(let_expr), ast::Expr::cast(new_expr)) + } else { + (None, Some(cond.clone())) + } +} + +fn and_bin_expr_left(expr: &ast::BinExpr) -> ast::BinExpr { + if expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) + && let Some(ast::Expr::BinExpr(left)) = expr.lhs() + { + and_bin_expr_left(&left) + } else { + expr.clone() + } +} + #[cfg(test)] mod tests { use super::*; @@ -453,6 +501,45 @@ } #[test] + fn test_if_with_match_comments() { + check_assist( + replace_if_let_with_match, + r#" +pub fn foo(foo: i32) { + $0if let 1 = foo { + // some comment + self.foo(); + } else if let 2 = foo { + // some comment 2 + self.bar() + } else { + // some comment 3 + self.baz(); + } +} +"#, + r#" +pub fn foo(foo: i32) { + match foo { + 1 => { + // some comment + self.foo(); + } + 2 => { + // some comment 2 + self.bar() + } + _ => { + // some comment 3 + self.baz(); + } + } +} +"#, + ) + } + + #[test] fn test_if_let_with_match_no_else() { check_assist( replace_if_let_with_match, @@ -514,14 +601,151 @@ #[test] fn test_if_let_with_match_let_chain() { - check_assist_not_applicable( + check_assist( replace_if_let_with_match, r#" +#![feature(if_let_guard)] +fn main() { + if $0let true = true && let Some(1) = None {} else { other() } +} +"#, + r#" +#![feature(if_let_guard)] +fn main() { + match true { + true if let Some(1) = None => {} + _ => other(), + } +} +"#, + ); + + check_assist( + replace_if_let_with_match, + r#" +#![feature(if_let_guard)] +fn main() { + if true { + $0if let ParenExpr(expr) = cond + && let Some(sub_expr) = expr.expr() + { + branch1( + "..." + ) + } else if let LetExpr(let_expr) = cond { + branch2( + "..." + ) + } else if let BinExpr(bin_expr) = cond + && let Some(kind) = bin_expr.op_kind() + && let Some(LetExpr(let_expr)) = foo(bin_expr) + { + branch3() + } else { + branch4( + "..." + ) + } + } +} +"#, + r#" +#![feature(if_let_guard)] +fn main() { + if true { + match cond { + ParenExpr(expr) if let Some(sub_expr) = expr.expr() => { + branch1( + "..." + ) + } + LetExpr(let_expr) => { + branch2( + "..." + ) + } + BinExpr(bin_expr) if let Some(kind) = bin_expr.op_kind() + && let Some(LetExpr(let_expr)) = foo(bin_expr) => branch3(), + _ => { + branch4( + "..." + ) + } + } + } +} +"#, + ); + + check_assist( + replace_if_let_with_match, + r#" +fn main() { + if $0let true = true + && true + && false + { + code() + } else { + other() + } +} +"#, + r#" +fn main() { + match true { + true if true + && false => code(), + _ => other(), + } +} +"#, + ); + } + + #[test] + fn test_if_let_with_match_let_chain_no_else() { + check_assist( + replace_if_let_with_match, + r#" +#![feature(if_let_guard)] fn main() { if $0let true = true && let Some(1) = None {} } "#, - ) + r#" +#![feature(if_let_guard)] +fn main() { + match true { + true if let Some(1) = None => {} + _ => (), + } +} +"#, + ); + + check_assist( + replace_if_let_with_match, + r#" +fn main() { + if $0let true = true + && true + && false + { + code() + } +} +"#, + r#" +fn main() { + match true { + true if true + && false => code(), + _ => (), + } +} +"#, + ); } #[test] @@ -553,10 +777,10 @@ VariantData::Tuple(..) => false, _ if cond() => true, _ => { - bar( - 123 - ) - } + bar( + 123 + ) + } } } } @@ -587,11 +811,11 @@ if let VariantData::Struct(..) = *self { true } else { - match *self { - VariantData::Tuple(..) => false, - _ => false, + match *self { + VariantData::Tuple(..) => false, + _ => false, + } } -} } } "#, @@ -706,9 +930,12 @@ fn main() { if true { $0if let Ok(rel_path) = path.strip_prefix(root_path) { - let rel_path = RelativePathBuf::from_path(rel_path).ok()?; + let rel_path = RelativePathBuf::from_path(rel_path) + .ok()?; Some((*id, rel_path)) } else { + let _ = some_code() + .clone(); None } } @@ -719,10 +946,52 @@ if true { match path.strip_prefix(root_path) { Ok(rel_path) => { - let rel_path = RelativePathBuf::from_path(rel_path).ok()?; + let rel_path = RelativePathBuf::from_path(rel_path) + .ok()?; Some((*id, rel_path)) } - _ => None, + _ => { + let _ = some_code() + .clone(); + None + } + } + } +} +"#, + ); + + check_assist( + replace_if_let_with_match, + r#" +fn main() { + if true { + $0if let Ok(rel_path) = path.strip_prefix(root_path) { + Foo { + x: 1 + } + } else { + Foo { + x: 2 + } + } + } +} +"#, + r#" +fn main() { + if true { + match path.strip_prefix(root_path) { + Ok(rel_path) => { + Foo { + x: 1 + } + } + _ => { + Foo { + x: 2 + } + } } } } @@ -1583,11 +1852,12 @@ fn main() { if true { $0match path.strip_prefix(root_path) { - Ok(rel_path) => { - let rel_path = RelativePathBuf::from_path(rel_path).ok()?; - Some((*id, rel_path)) + Ok(rel_path) => Foo { + x: 2 } - _ => None, + _ => Foo { + x: 3 + }, } } } @@ -1596,15 +1866,55 @@ fn main() { if true { if let Ok(rel_path) = path.strip_prefix(root_path) { - let rel_path = RelativePathBuf::from_path(rel_path).ok()?; + Foo { + x: 2 + } + } else { + Foo { + x: 3 + } + } + } +} +"#, + ); + + check_assist( + replace_match_with_if_let, + r#" +fn main() { + if true { + $0match path.strip_prefix(root_path) { + Ok(rel_path) => { + let rel_path = RelativePathBuf::from_path(rel_path) + .ok()?; + Some((*id, rel_path)) + } + _ => { + let _ = some_code() + .clone(); + None + }, + } + } +} +"#, + r#" +fn main() { + if true { + if let Ok(rel_path) = path.strip_prefix(root_path) { + let rel_path = RelativePathBuf::from_path(rel_path) + .ok()?; Some((*id, rel_path)) } else { + let _ = some_code() + .clone(); None } } } "#, - ) + ); } #[test]
diff --git a/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs b/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs index 14161d9..753f2ab 100644 --- a/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs +++ b/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
@@ -45,7 +45,6 @@ &scope, &scope.visible_traits().0, None, - None, |func| { let valid = func.name(ctx.sema.db).as_str() == &*method_name_lazy && func.num_params(ctx.sema.db) == n_params @@ -127,7 +126,6 @@ &scope, &scope.visible_traits().0, None, - None, |func| { let valid = func.name(ctx.sema.db).as_str() == method_name_eager && func.num_params(ctx.sema.db) == n_params;
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 7f0836a..160b31a 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs
@@ -183,9 +183,9 @@ "#####, r#####" fn main() { - 'l: loop { - break 'l; - continue 'l; + ${1:'l}: loop { + break ${2:'l}; + continue ${0:'l}; } } "#####,
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index e43516f..4350749 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs
@@ -57,6 +57,14 @@ }); non_trivial_children.next().is_some() }; + if stmt_list + .syntax() + .children_with_tokens() + .filter_map(NodeOrToken::into_token) + .any(|token| token.kind() == syntax::SyntaxKind::COMMENT) + { + return None; + } if let Some(expr) = stmt_list.tail_expr() { if has_anything_else(expr.syntax()) { @@ -1155,6 +1163,15 @@ } } +pub(crate) fn is_selected( + it: &impl AstNode, + selection: syntax::TextRange, + allow_empty: bool, +) -> bool { + selection.intersect(it.syntax().text_range()).is_some_and(|it| !it.is_empty()) + || allow_empty && it.syntax().text_range().contains_range(selection) +} + pub fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bool { let mut is_const = true; preorder_expr(expr, &mut |ev| {
diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs index 297ce33..20776f6 100644 --- a/crates/ide-completion/src/completions/attribute.rs +++ b/crates/ide-completion/src/completions/attribute.rs
@@ -231,7 +231,7 @@ macro_rules! attrs { // attributes applicable to all items [@ { item $($tt:tt)* } {$($acc:tt)*}] => { - attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "docinclude", "must_use", "no_mangle" }) + attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "docinclude", "must_use", "no_mangle", "unsafe" }) }; // attributes applicable to all adts [@ { adt $($tt:tt)* } {$($acc:tt)*}] => { @@ -395,6 +395,7 @@ attr("track_caller", None, None), attr("type_length_limit = …", Some("type_length_limit"), Some("type_length_limit = ${0:128}")) .prefer_inner(), + attr("unsafe(…)", Some("unsafe"), Some("unsafe($0)")), attr("used", None, None), attr("warn(…)", Some("warn"), Some("warn(${0:lint})")), attr(
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index 511b593..c9f4405 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs
@@ -270,7 +270,6 @@ ctx.db, &ctx.scope, traits_in_scope, - Some(ctx.module), None, Callback { ctx, f, seen_methods: FxHashSet::default() }, ); @@ -597,7 +596,6 @@ } "#, expect![[r#" - me local_method() fn(&self) me pub_module_method() fn(&self) "#]], ); @@ -1526,6 +1524,8 @@ check_no_kw( r#" //- minicore: receiver +#![feature(arbitrary_self_types)] + use core::ops::Receiver; struct Foo;
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 5cae7bd..6bdf873 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs
@@ -126,13 +126,12 @@ ctx.db, &ctx.scope, &ctx.traits_in_scope(), - Some(ctx.module), None, PathCallback { ctx, acc, add_assoc_item, seen: FxHashSet::default() }, ); // Iterate assoc types separately - ty.iterate_assoc_items(ctx.db, ctx.krate, |item| { + ty.iterate_assoc_items(ctx.db, |item| { if let hir::AssocItem::TypeAlias(ty) = item { acc.add_type_alias(ctx, ty) } @@ -196,13 +195,12 @@ ctx.db, &ctx.scope, &ctx.traits_in_scope(), - Some(ctx.module), None, PathCallback { ctx, acc, add_assoc_item, seen: FxHashSet::default() }, ); // Iterate assoc types separately - ty.iterate_assoc_items(ctx.db, ctx.krate, |item| { + ty.iterate_assoc_items(ctx.db, |item| { if let hir::AssocItem::TypeAlias(ty) = item { acc.add_type_alias(ctx, ty) } @@ -232,7 +230,6 @@ ctx.db, &ctx.scope, &ctx.traits_in_scope(), - Some(ctx.module), None, PathCallback { ctx, acc, add_assoc_item, seen: FxHashSet::default() }, );
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index ab3f619..ba1fe64 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs
@@ -11,10 +11,11 @@ text_edit::TextEdit, ty_filter::TryEnum, }; +use itertools::Itertools; use stdx::never; use syntax::{ - SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR}, - T, TextRange, TextSize, + SyntaxKind::{EXPR_STMT, STMT_LIST}, + T, TextRange, TextSize, ToSmolStr, ast::{self, AstNode, AstToken}, match_ast, }; @@ -253,18 +254,15 @@ } } - let mut block_should_be_wrapped = true; - if dot_receiver.syntax().kind() == BLOCK_EXPR { - block_should_be_wrapped = false; - if let Some(parent) = dot_receiver.syntax().parent() - && matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) - { - block_should_be_wrapped = true; - } + let block_should_be_wrapped = if let ast::Expr::BlockExpr(block) = dot_receiver { + block.modifier().is_some() || !block.is_standalone() + } else { + true }; { let (open_brace, close_brace) = if block_should_be_wrapped { ("{ ", " }") } else { ("", "") }; + // FIXME: Why add parentheses let (open_paren, close_paren) = if is_in_cond { ("(", ")") } else { ("", "") }; let unsafe_completion_string = format!("{open_paren}unsafe {open_brace}{receiver_text}{close_brace}{close_paren}"); @@ -363,10 +361,18 @@ resulting_element.syntax().parent().and_then(ast::RefExpr::cast) { found_ref_or_deref = true; - let exclusive = parent_ref_element.mut_token().is_some(); + let last_child_or_token = parent_ref_element.syntax().last_child_or_token(); + prefix.insert_str( + 0, + parent_ref_element + .syntax() + .children_with_tokens() + .filter(|it| Some(it) != last_child_or_token.as_ref()) + .format("") + .to_smolstr() + .as_str(), + ); resulting_element = ast::Expr::from(parent_ref_element); - - prefix.insert_str(0, if exclusive { "&mut " } else { "&" }); } if !found_ref_or_deref { @@ -842,6 +848,20 @@ &format!("fn main() {{ let x = {kind} {{ if true {{1}} else {{2}} }} }}"), ); + if kind == "const" { + check_edit( + kind, + r#"fn main() { unsafe {1}.$0 }"#, + &format!("fn main() {{ {kind} {{ unsafe {{1}} }} }}"), + ); + } else { + check_edit( + kind, + r#"fn main() { const {1}.$0 }"#, + &format!("fn main() {{ {kind} {{ const {{1}} }} }}"), + ); + } + // completion will not be triggered check_edit( kind, @@ -995,6 +1015,20 @@ ); check_edit_with_config( + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, + "ok", + r#"fn main() { &raw mut 42.$0 }"#, + r#"fn main() { Ok(&raw mut 42) }"#, + ); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, + "ok", + r#"fn main() { &raw const 42.$0 }"#, + r#"fn main() { Ok(&raw const 42) }"#, + ); + + check_edit_with_config( CompletionConfig { snippets: vec![snippet], ..TEST_CONFIG }, "ok", r#" @@ -1021,6 +1055,55 @@ } #[test] + fn postfix_custom_snippets_completion_for_reference_expr() { + // https://github.com/rust-lang/rust-analyzer/issues/21035 + let snippet = Snippet::new( + &[], + &["group".into()], + &["(${receiver})".into()], + "", + &[], + crate::SnippetScope::Expr, + ) + .unwrap(); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, + "group", + r#"fn main() { &[1, 2, 3].g$0 }"#, + r#"fn main() { (&[1, 2, 3]) }"#, + ); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, + "group", + r#"fn main() { &&foo(a, b, 1+1).$0 }"#, + r#"fn main() { (&&foo(a, b, 1+1)) }"#, + ); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, + "group", + r#"fn main() { &mut Foo { a: 1, b: 2, c: 3 }.$0 }"#, + r#"fn main() { (&mut Foo { a: 1, b: 2, c: 3 }) }"#, + ); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, + "group", + r#"fn main() { &raw mut Foo::new().$0 }"#, + r#"fn main() { (&raw mut Foo::new()) }"#, + ); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, + "group", + r#"fn main() { &raw const Foo::bar::SOME_CONST.$0 }"#, + r#"fn main() { (&raw const Foo::bar::SOME_CONST) }"#, + ); + } + + #[test] fn no_postfix_completions_in_if_block_that_has_an_else() { check( r#"
diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs index 3465b73..abcf9fc 100644 --- a/crates/ide-completion/src/completions/type.rs +++ b/crates/ide-completion/src/completions/type.rs
@@ -67,7 +67,7 @@ }); // Iterate assoc types separately - ty.iterate_assoc_items(ctx.db, ctx.krate, |item| { + ty.iterate_assoc_items(ctx.db, |item| { if let hir::AssocItem::TypeAlias(ty) = item { acc.add_type_alias(ctx, ty) } @@ -110,7 +110,7 @@ }); // Iterate assoc types separately - ty.iterate_assoc_items(ctx.db, ctx.krate, |item| { + ty.iterate_assoc_items(ctx.db, |item| { if let hir::AssocItem::TypeAlias(ty) = item { acc.add_type_alias(ctx, ty) }
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 2f166b7..31a9a74 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs
@@ -615,21 +615,14 @@ mut cb: impl FnMut(hir::AssocItem), ) { let mut seen = FxHashSet::default(); - ty.iterate_path_candidates( - self.db, - &self.scope, - &self.traits_in_scope(), - Some(self.module), - None, - |item| { - // We might iterate candidates of a trait multiple times here, so deduplicate - // them. - if seen.insert(item) { - cb(item) - } - None::<()> - }, - ); + ty.iterate_path_candidates(self.db, &self.scope, &self.traits_in_scope(), None, |item| { + // We might iterate candidates of a trait multiple times here, so deduplicate + // them. + if seen.insert(item) { + cb(item) + } + None::<()> + }); } /// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and
diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs index 2a62389..3538dac 100644 --- a/crates/ide-completion/src/tests/attribute.rs +++ b/crates/ide-completion/src/tests/attribute.rs
@@ -61,6 +61,7 @@ at target_feature(enable = "…") at test at track_caller + at unsafe(…) at used at warn(…) md mac @@ -95,6 +96,7 @@ at no_mangle at non_exhaustive at repr(…) + at unsafe(…) at warn(…) md proc_macros kw crate:: @@ -173,6 +175,7 @@ at no_std at recursion_limit = "…" at type_length_limit = … + at unsafe(…) at warn(…) at windows_subsystem = "…" kw crate:: @@ -201,6 +204,7 @@ at must_use at no_mangle at path = "…" + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -224,6 +228,7 @@ at must_use at no_implicit_prelude at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -252,6 +257,7 @@ at macro_use at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -277,6 +283,7 @@ at forbid(…) at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -303,6 +310,7 @@ at macro_use at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -328,6 +336,7 @@ at forbid(…) at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -353,6 +362,7 @@ at forbid(…) at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -387,6 +397,7 @@ at no_mangle at non_exhaustive at repr(…) + at unsafe(…) at warn(…) md core kw crate:: @@ -416,6 +427,7 @@ at no_mangle at non_exhaustive at repr(…) + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -441,6 +453,7 @@ at forbid(…) at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -470,6 +483,7 @@ at link_section = "…" at must_use at no_mangle + at unsafe(…) at used at warn(…) kw crate:: @@ -497,6 +511,7 @@ at forbid(…) at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -524,6 +539,7 @@ at forbid(…) at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -545,6 +561,7 @@ at forbid(…) at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -572,6 +589,7 @@ at forbid(…) at must_use at no_mangle + at unsafe(…) at warn(…) "#]], ); @@ -592,12 +610,44 @@ at must_use at no_mangle at on_unimplemented + at unsafe(…) at warn(…) "#]], ); } #[test] +fn attr_on_unsafe_attr() { + check( + r#"#[unsafe($0)] static FOO: () = ()"#, + expect![[r#" + at allow(…) + at cfg(…) + at cfg_attr(…) + at deny(…) + at deprecated + at doc = "…" + at doc = include_str!("…") + at doc(alias = "…") + at doc(hidden) + at expect(…) + at export_name = "…" + at forbid(…) + at global_allocator + at link_name = "…" + at link_section = "…" + at must_use + at no_mangle + at unsafe(…) + at used + at warn(…) + kw crate:: + kw self:: + "#]], + ); +} + +#[test] fn attr_diagnostic_on_unimplemented() { check( r#"#[diagnostic::on_unimplemented($0)] trait Foo {}"#, @@ -643,6 +693,7 @@ at link at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -665,6 +716,7 @@ at link at must_use at no_mangle + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -723,6 +775,7 @@ at target_feature(enable = "…") at test at track_caller + at unsafe(…) at warn(…) kw crate:: kw self:: @@ -773,6 +826,7 @@ at target_feature(enable = "…") at test at track_caller + at unsafe(…) at used at warn(…) kw crate::
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 4033aa5..034bc24 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs
@@ -3198,6 +3198,7 @@ fn ambiguous_float_literal() { check( r#" +//- /core.rs crate:core #![rustc_coherence_is_core] impl i32 { @@ -3232,6 +3233,7 @@ fn ambiguous_float_literal_in_ambiguous_method_call() { check( r#" +//- /core.rs crate:core #![rustc_coherence_is_core] impl i32 {
diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs index 6eb0b81..df0c4e5 100644 --- a/crates/ide-completion/src/tests/pattern.rs +++ b/crates/ide-completion/src/tests/pattern.rs
@@ -653,6 +653,7 @@ check( r#" +//- /core.rs crate:core #![rustc_coherence_is_core] #[lang = "u32"] impl u32 {
diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 008b6fd..6a85c6e 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs
@@ -3,10 +3,12 @@ use base_db::SourceRootId; use profile::Bytes; use rustc_hash::FxHashSet; -use salsa::{Database as _, Durability}; -use triomphe::Arc; +use salsa::{Database as _, Durability, Setter as _}; -use crate::{ChangeWithProcMacros, RootDatabase, symbol_index::SymbolsDatabase}; +use crate::{ + ChangeWithProcMacros, RootDatabase, + symbol_index::{LibraryRoots, LocalRoots}, +}; impl RootDatabase { pub fn request_cancellation(&mut self) { @@ -29,8 +31,8 @@ local_roots.insert(root_id); } } - self.set_local_roots_with_durability(Arc::new(local_roots), Durability::MEDIUM); - self.set_library_roots_with_durability(Arc::new(library_roots), Durability::MEDIUM); + LocalRoots::get(self).set_roots(self).to(local_roots); + LibraryRoots::get(self).set_roots(self).to(library_roots); } change.apply(self); }
diff --git a/crates/ide-db/src/generated/lints.rs b/crates/ide-db/src/generated/lints.rs index 5eb7e92..7f4dad8 100644 --- a/crates/ide-db/src/generated/lints.rs +++ b/crates/ide-db/src/generated/lints.rs
@@ -10723,9 +10723,9 @@ label: "strict_provenance_lints", description: r##"# `strict_provenance_lints` -The tracking issue for this feature is: [#95228] +The tracking issue for this feature is: [#130351] -[#95228]: https://github.com/rust-lang/rust/issues/95228 +[#130351]: https://github.com/rust-lang/rust/issues/130351 ----- The `strict_provenance_lints` feature allows to enable the `fuzzy_provenance_casts` and `lossy_provenance_casts` lints.
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs index 50edfca..c49ade2 100644 --- a/crates/ide-db/src/imports/import_assets.rs +++ b/crates/ide-db/src/imports/import_assets.rs
@@ -5,7 +5,7 @@ use hir::{ AsAssocItem, AssocItem, AssocItemContainer, Complete, Crate, FindPathConfig, HasCrate, ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics, - SemanticsScope, Trait, TyFingerprint, Type, db::HirDatabase, + SemanticsScope, Trait, Type, }; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; @@ -500,44 +500,37 @@ ModuleDef::Adt(adt) => adt.ty(db), _ => return SmallVec::new(), }; - ty.iterate_path_candidates::<Infallible>( - db, - scope, - &FxHashSet::default(), - None, - None, - |assoc| { - // FIXME: Support extra trait imports - if assoc.container_or_implemented_trait(db).is_some() { - return None; + ty.iterate_path_candidates::<Infallible>(db, scope, &FxHashSet::default(), None, |assoc| { + // FIXME: Support extra trait imports + if assoc.container_or_implemented_trait(db).is_some() { + return None; + } + let name = assoc.name(db)?; + let is_match = match candidate { + NameToImport::Prefix(text, true) => name.as_str().starts_with(text), + NameToImport::Prefix(text, false) => { + name.as_str().chars().zip(text.chars()).all(|(name_char, candidate_char)| { + name_char.eq_ignore_ascii_case(&candidate_char) + }) } - let name = assoc.name(db)?; - let is_match = match candidate { - NameToImport::Prefix(text, true) => name.as_str().starts_with(text), - NameToImport::Prefix(text, false) => { - name.as_str().chars().zip(text.chars()).all(|(name_char, candidate_char)| { - name_char.eq_ignore_ascii_case(&candidate_char) - }) - } - NameToImport::Exact(text, true) => name.as_str() == text, - NameToImport::Exact(text, false) => name.as_str().eq_ignore_ascii_case(text), - NameToImport::Fuzzy(text, true) => text.chars().all(|c| name.as_str().contains(c)), - NameToImport::Fuzzy(text, false) => text.chars().all(|c| { - name.as_str().chars().any(|name_char| name_char.eq_ignore_ascii_case(&c)) - }), - }; - if !is_match { - return None; - } - result.push(LocatedImport::new( - import_path_candidate.clone(), - resolved_qualifier, - assoc_to_item(assoc), - complete_in_flyimport, - )); - None - }, - ); + NameToImport::Exact(text, true) => name.as_str() == text, + NameToImport::Exact(text, false) => name.as_str().eq_ignore_ascii_case(text), + NameToImport::Fuzzy(text, true) => text.chars().all(|c| name.as_str().contains(c)), + NameToImport::Fuzzy(text, false) => text + .chars() + .all(|c| name.as_str().chars().any(|name_char| name_char.eq_ignore_ascii_case(&c))), + }; + if !is_match { + return None; + } + result.push(LocatedImport::new( + import_path_candidate.clone(), + resolved_qualifier, + assoc_to_item(assoc), + complete_in_flyimport, + )); + None + }); result } @@ -608,7 +601,6 @@ deref_chain .into_iter() .filter_map(|ty| Some((ty.krate(db).into(), ty.fingerprint_for_trait_impl()?))) - .sorted() .unique() .collect::<Vec<_>>() }; @@ -619,11 +611,11 @@ } // in order to handle implied bounds through an associated type, keep all traits if any - // type in the deref chain matches `TyFingerprint::Unnameable`. This fingerprint + // type in the deref chain matches `SimplifiedType::Placeholder`. This fingerprint // won't be in `TraitImpls` anyways, as `TraitImpls` only contains actual implementations. if !autoderef_method_receiver .iter() - .any(|(_, fingerprint)| matches!(fingerprint, TyFingerprint::Unnameable)) + .any(|(_, fingerprint)| matches!(fingerprint, hir::SimplifiedType::Placeholder)) { trait_candidates.retain(|&candidate_trait_id| { // we care about the following cases: @@ -635,17 +627,18 @@ // a. This is recursive for fundamental types let defining_crate_for_trait = Trait::from(candidate_trait_id).krate(db); - let trait_impls_in_crate = db.trait_impls_in_crate(defining_crate_for_trait.into()); + let trait_impls_in_crate = + hir::TraitImpls::for_crate(db, defining_crate_for_trait.into()); let definitions_exist_in_trait_crate = - autoderef_method_receiver.iter().any(|&(_, fingerprint)| { + autoderef_method_receiver.iter().any(|(_, fingerprint)| { trait_impls_in_crate .has_impls_for_trait_and_self_ty(candidate_trait_id, fingerprint) }); // this is a closure for laziness: if `definitions_exist_in_trait_crate` is true, // we can avoid a second db lookup. let definitions_exist_in_receiver_crate = || { - autoderef_method_receiver.iter().any(|&(krate, fingerprint)| { - db.trait_impls_in_crate(krate) + autoderef_method_receiver.iter().any(|(krate, fingerprint)| { + hir::TraitImpls::for_crate(db, *krate) .has_impls_for_trait_and_self_ty(candidate_trait_id, fingerprint) }) }; @@ -663,7 +656,6 @@ scope, &trait_candidates, None, - None, |assoc| { if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) { let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?; @@ -688,7 +680,6 @@ scope, &trait_candidates, None, - None, |function| { let assoc = function.as_assoc_item(db)?; if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) {
diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs index 4b0a84a..0d30553 100644 --- a/crates/ide-db/src/items_locator.rs +++ b/crates/ide-db/src/items_locator.rs
@@ -10,7 +10,7 @@ use crate::{ RootDatabase, imports::import_assets::NameToImport, - symbol_index::{self, SymbolsDatabase as _}, + symbol_index::{self, SymbolIndex}, }; /// A value to use, when uncertain which limit to pick. @@ -110,7 +110,7 @@ local_query } }; - local_query.search(&[db.module_symbols(module)], |local_candidate| { + local_query.search(&[SymbolIndex::module_symbols(db, module)], |local_candidate| { cb(match local_candidate.def { hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), def => ItemInNs::from(def),
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 7efa97b..0301b50 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs
@@ -64,7 +64,7 @@ }; use triomphe::Arc; -use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; +use crate::line_index::LineIndex; pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; pub use ::line_index; @@ -195,8 +195,12 @@ db.set_all_crates(Arc::new(Box::new([]))); CrateGraphBuilder::default().set_in_db(&mut db); db.set_proc_macros_with_durability(Default::default(), Durability::MEDIUM); - db.set_local_roots_with_durability(Default::default(), Durability::MEDIUM); - db.set_library_roots_with_durability(Default::default(), Durability::MEDIUM); + _ = crate::symbol_index::LibraryRoots::builder(Default::default()) + .durability(Durability::MEDIUM) + .new(&db); + _ = crate::symbol_index::LocalRoots::builder(Default::default()) + .durability(Durability::MEDIUM) + .new(&db); db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH); db.update_base_query_lru_capacities(lru_capacity); db
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs index e661857..4f8dc4a 100644 --- a/crates/ide-db/src/prime_caches.rs +++ b/crates/ide-db/src/prime_caches.rs
@@ -11,7 +11,7 @@ use crate::{ FxIndexMap, RootDatabase, base_db::{Crate, RootQueryDb}, - symbol_index::SymbolsDatabase, + symbol_index::SymbolIndex, }; /// We're indexing many crates. @@ -83,7 +83,12 @@ crate_name, })?; - let cancelled = Cancelled::catch(|| _ = hir::crate_def_map(&db, crate_id)); + let cancelled = Cancelled::catch(|| { + _ = hir::crate_def_map(&db, crate_id); + // we compute the lang items here as the work for them is also highly recursive and will be trigger by the module symbols query + // slowing down leaf crate analysis tremendously as we go back to being blocked on a single thread + _ = hir::crate_lang_items(&db, crate_id); + }); match cancelled { Ok(()) => progress_sender @@ -107,8 +112,9 @@ Ok::<_, crossbeam_channel::SendError<_>>(()) }; let handle_symbols = |module| { - let cancelled = - Cancelled::catch(AssertUnwindSafe(|| _ = db.module_symbols(module))); + let cancelled = Cancelled::catch(AssertUnwindSafe(|| { + _ = SymbolIndex::module_symbols(&db, module) + })); match cancelled { Ok(()) => progress_sender
diff --git a/crates/ide-db/src/ra_fixture.rs b/crates/ide-db/src/ra_fixture.rs index 1f056a8..a9d596d 100644 --- a/crates/ide-db/src/ra_fixture.rs +++ b/crates/ide-db/src/ra_fixture.rs
@@ -2,7 +2,7 @@ use std::hash::{BuildHasher, Hash}; -use hir::{CfgExpr, FilePositionWrapper, FileRangeWrapper, Semantics}; +use hir::{CfgExpr, FilePositionWrapper, FileRangeWrapper, Semantics, Symbol}; use smallvec::SmallVec; use span::{TextRange, TextSize}; use syntax::{ @@ -524,6 +524,7 @@ f64, &str, String, + Symbol, SmolStr, Documentation, SymbolKind,
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index c5ea9bc..ef67fbf 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs
@@ -27,7 +27,7 @@ ops::ControlFlow, }; -use base_db::{RootQueryDb, SourceDatabase, SourceRootId}; +use base_db::{RootQueryDb, SourceRootId}; use fst::{Automaton, Streamer, raw::IndexedValue}; use hir::{ Crate, Module, @@ -37,7 +37,6 @@ }; use rayon::prelude::*; use rustc_hash::FxHashSet; -use triomphe::Arc; use crate::RootDatabase; @@ -102,63 +101,26 @@ } } -#[query_group::query_group] -pub trait SymbolsDatabase: HirDatabase + SourceDatabase { - /// The symbol index for a given module. These modules should only be in source roots that - /// are inside local_roots. - // FIXME: Is it worth breaking the encapsulation boundary of `hir`, and make this take a `ModuleId`, - // in order for it to be a non-interned query? - #[salsa::invoke_interned(module_symbols)] - fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>; - - /// The symbol index for a given source root within library_roots. - #[salsa::invoke_interned(library_symbols)] - fn library_symbols(&self, source_root_id: SourceRootId) -> Arc<SymbolIndex>; - - #[salsa::transparent] - /// The symbol indices of modules that make up a given crate. - fn crate_symbols(&self, krate: Crate) -> Box<[Arc<SymbolIndex>]>; - - /// The set of "local" (that is, from the current workspace) roots. - /// Files in local roots are assumed to change frequently. - #[salsa::input] - fn local_roots(&self) -> Arc<FxHashSet<SourceRootId>>; - - /// The set of roots for crates.io libraries. - /// Files in libraries are assumed to never change. - #[salsa::input] - fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>; +/// The set of roots for crates.io libraries. +/// Files in libraries are assumed to never change. +#[salsa::input(singleton, debug)] +pub struct LibraryRoots { + #[returns(ref)] + pub roots: FxHashSet<SourceRootId>, } -fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc<SymbolIndex> { - let _p = tracing::info_span!("library_symbols").entered(); - - // We call this without attaching because this runs in parallel, so we need to attach here. - hir::attach_db(db, || { - let mut symbol_collector = SymbolCollector::new(db); - - db.source_root_crates(source_root_id) - .iter() - .flat_map(|&krate| Crate::from(krate).modules(db)) - // we specifically avoid calling other SymbolsDatabase queries here, even though they do the same thing, - // as the index for a library is not going to really ever change, and we do not want to store each - // the module or crate indices for those in salsa unless we need to. - .for_each(|module| symbol_collector.collect(module)); - - Arc::new(SymbolIndex::new(symbol_collector.finish())) - }) +/// The set of "local" (that is, from the current workspace) roots. +/// Files in local roots are assumed to change frequently. +#[salsa::input(singleton, debug)] +pub struct LocalRoots { + #[returns(ref)] + pub roots: FxHashSet<SourceRootId>, } -fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> { - let _p = tracing::info_span!("module_symbols").entered(); - - // We call this without attaching because this runs in parallel, so we need to attach here. - hir::attach_db(db, || Arc::new(SymbolIndex::new(SymbolCollector::new_module(db, module)))) -} - -pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolIndex>]> { +/// The symbol indices of modules that make up a given crate. +pub fn crate_symbols(db: &dyn HirDatabase, krate: Crate) -> Box<[&SymbolIndex]> { let _p = tracing::info_span!("crate_symbols").entered(); - krate.modules(db).into_iter().map(|module| db.module_symbols(module)).collect() + krate.modules(db).into_iter().map(|module| SymbolIndex::module_symbols(db, module)).collect() } // Feature: Workspace Symbol @@ -190,20 +152,26 @@ let _p = tracing::info_span!("world_symbols", query = ?query.query).entered(); let indices: Vec<_> = if query.libs { - db.library_roots() + LibraryRoots::get(db) + .roots(db) .par_iter() - .map_with(db.clone(), |snap, &root| snap.library_symbols(root)) + .for_each_with(db.clone(), |snap, &root| _ = SymbolIndex::library_symbols(snap, root)); + LibraryRoots::get(db) + .roots(db) + .iter() + .map(|&root| SymbolIndex::library_symbols(db, root)) .collect() } else { let mut crates = Vec::new(); - for &root in db.local_roots().iter() { + for &root in LocalRoots::get(db).roots(db).iter() { crates.extend(db.source_root_crates(root).iter().copied()) } - let indices: Vec<_> = crates - .into_par_iter() - .map_with(db.clone(), |snap, krate| snap.crate_symbols(krate.into())) - .collect(); + crates + .par_iter() + .for_each_with(db.clone(), |snap, &krate| _ = crate_symbols(snap, krate.into())); + let indices: Vec<_> = + crates.into_iter().map(|krate| crate_symbols(db, krate.into())).collect(); indices.iter().flat_map(|indices| indices.iter().cloned()).collect() }; @@ -221,6 +189,67 @@ map: fst::Map<Vec<u8>>, } +impl SymbolIndex { + /// The symbol index for a given source root within library_roots. + pub fn library_symbols(db: &dyn HirDatabase, source_root_id: SourceRootId) -> &SymbolIndex { + // FIXME: + #[salsa::interned] + struct InternedSourceRootId { + id: SourceRootId, + } + #[salsa::tracked(returns(ref))] + fn library_symbols( + db: &dyn HirDatabase, + source_root_id: InternedSourceRootId<'_>, + ) -> SymbolIndex { + let _p = tracing::info_span!("library_symbols").entered(); + + // We call this without attaching because this runs in parallel, so we need to attach here. + hir::attach_db(db, || { + let mut symbol_collector = SymbolCollector::new(db, true); + + db.source_root_crates(source_root_id.id(db)) + .iter() + .flat_map(|&krate| Crate::from(krate).modules(db)) + // we specifically avoid calling other SymbolsDatabase queries here, even though they do the same thing, + // as the index for a library is not going to really ever change, and we do not want to store + // the module or crate indices for those in salsa unless we need to. + .for_each(|module| symbol_collector.collect(module)); + + SymbolIndex::new(symbol_collector.finish()) + }) + } + library_symbols(db, InternedSourceRootId::new(db, source_root_id)) + } + + /// The symbol index for a given module. These modules should only be in source roots that + /// are inside local_roots. + pub fn module_symbols(db: &dyn HirDatabase, module: Module) -> &SymbolIndex { + // FIXME: + #[salsa::interned] + struct InternedModuleId { + id: hir::ModuleId, + } + + #[salsa::tracked(returns(ref))] + fn module_symbols(db: &dyn HirDatabase, module: InternedModuleId<'_>) -> SymbolIndex { + let _p = tracing::info_span!("module_symbols").entered(); + + // We call this without attaching because this runs in parallel, so we need to attach here. + hir::attach_db(db, || { + let module: Module = module.id(db).into(); + SymbolIndex::new(SymbolCollector::new_module( + db, + module, + !module.krate().origin(db).is_local(), + )) + }) + } + + module_symbols(db, InternedModuleId::new(db, hir::ModuleId::from(module))) + } +} + impl fmt::Debug for SymbolIndex { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish() @@ -309,7 +338,7 @@ impl Query { pub(crate) fn search<'sym, T>( self, - indices: &'sym [Arc<SymbolIndex>], + indices: &'sym [&SymbolIndex], cb: impl FnMut(&'sym FileSymbol) -> ControlFlow<T>, ) -> Option<T> { let _p = tracing::info_span!("symbol_index::Query::search").entered(); @@ -344,7 +373,7 @@ fn search_maps<'sym, T>( &self, - indices: &'sym [Arc<SymbolIndex>], + indices: &'sym [&SymbolIndex], mut stream: fst::map::Union<'_>, mut cb: impl FnMut(&'sym FileSymbol) -> ControlFlow<T>, ) -> Option<T> { @@ -397,7 +426,7 @@ mod tests { use expect_test::expect_file; - use salsa::Durability; + use salsa::Setter; use test_fixture::{WORKSPACE, WithFixture}; use super::*; @@ -484,7 +513,7 @@ .modules(&db) .into_iter() .map(|module_id| { - let mut symbols = SymbolCollector::new_module(&db, module_id); + let mut symbols = SymbolCollector::new_module(&db, module_id, false); symbols.sort_by_key(|it| it.name.as_str().to_owned()); (module_id, symbols) }) @@ -511,7 +540,7 @@ .modules(&db) .into_iter() .map(|module_id| { - let mut symbols = SymbolCollector::new_module(&db, module_id); + let mut symbols = SymbolCollector::new_module(&db, module_id, false); symbols.sort_by_key(|it| it.name.as_str().to_owned()); (module_id, symbols) }) @@ -535,7 +564,7 @@ let mut local_roots = FxHashSet::default(); local_roots.insert(WORKSPACE); - db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); let mut query = Query::new("Foo".to_owned()); let mut symbols = world_symbols(&db, query.clone());
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index 519ff19..fdc426c 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -439,10 +439,27 @@ #[test] fn ignores_no_mangle_items() { cov_mark::check!(extern_func_no_mangle_ignored); + cov_mark::check!(no_mangle_static_incorrect_case_ignored); check_diagnostics( r#" #[no_mangle] extern "C" fn NonSnakeCaseName(some_var: u8) -> u8; +#[no_mangle] +static lower_case: () = (); + "#, + ); + } + + #[test] + fn ignores_unsafe_no_mangle_items() { + cov_mark::check!(extern_func_no_mangle_ignored); + cov_mark::check!(no_mangle_static_incorrect_case_ignored); + check_diagnostics( + r#" +#[unsafe(no_mangle)] +extern "C" fn NonSnakeCaseName(some_var: u8) -> u8; +#[unsafe(no_mangle)] +static lower_case: () = (); "#, ); }
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index 49f925e..ab7256d 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -226,7 +226,7 @@ // Look for a ::new() associated function let has_new_func = ty - .iterate_assoc_items(ctx.sema.db, krate, |assoc_item| { + .iterate_assoc_items(ctx.sema.db, |assoc_item| { if let AssocItem::Function(func) = assoc_item && func.name(ctx.sema.db) == sym::new && func.assoc_fn_params(ctx.sema.db).is_empty()
diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 1fc96b7..d52fc73 100644 --- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -390,7 +390,6 @@ #[test] fn expr_diverges() { - cov_mark::check_count!(validate_match_bailed_out, 2); check_diagnostics( r#" enum Either { A, B } @@ -401,6 +400,7 @@ Either::B => (), } match loop {} { + // ^^^^^^^ error: missing match arm: `B` not covered Either::A => (), } match loop { break Either::A } {
diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index eefa1ac..18280a4 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -806,7 +806,7 @@ _ = (x, y); let x = Foo; let y = &mut *x; - //^^ 💡 error: cannot mutate immutable variable `x` + // ^ 💡 error: cannot mutate immutable variable `x` _ = (x, y); let x = Foo; //^ 💡 warn: unused variable @@ -815,13 +815,13 @@ //^^^^^^ 💡 error: cannot mutate immutable variable `x` _ = (x, y); let ref mut y = *x; - //^^ 💡 error: cannot mutate immutable variable `x` + // ^ 💡 error: cannot mutate immutable variable `x` _ = y; let (ref mut y, _) = *x; - //^^ 💡 error: cannot mutate immutable variable `x` + // ^ 💡 error: cannot mutate immutable variable `x` _ = y; match *x { - //^^ 💡 error: cannot mutate immutable variable `x` + // ^ 💡 error: cannot mutate immutable variable `x` (ref y, 5) => _ = y, (_, ref mut y) => _ = y, } @@ -1130,7 +1130,7 @@ //^^^^^^^ 💡 error: cannot mutate immutable variable `x` let x = Box::new(5); let closure = || *x = 2; - //^ 💡 error: cannot mutate immutable variable `x` + //^^^^^^ 💡 error: cannot mutate immutable variable `x` _ = closure; } "#,
diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs index 69cd0d2..23f0460 100644 --- a/crates/ide-diagnostics/src/handlers/private_field.rs +++ b/crates/ide-diagnostics/src/handlers/private_field.rs
@@ -1,6 +1,9 @@ use hir::{EditionedFileId, FileRange, HasCrate, HasSource, Semantics}; use ide_db::{RootDatabase, assists::Assist, source_change::SourceChange, text_edit::TextEdit}; -use syntax::{AstNode, TextRange, TextSize, ast::HasVisibility}; +use syntax::{ + AstNode, TextRange, + ast::{HasName, HasVisibility}, +}; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix}; @@ -8,7 +11,6 @@ // // This diagnostic is triggered if the accessed field is not visible from the current module. pub(crate) fn private_field(ctx: &DiagnosticsContext<'_>, d: &hir::PrivateField) -> Diagnostic { - // FIXME: add quickfix Diagnostic::new_with_syntax_node_ptr( ctx, DiagnosticCode::RustcHardError("E0616"), @@ -50,11 +52,19 @@ source.with_value(visibility.syntax()).original_file_range_opt(sema.db)?.0 } None => { - let (range, _) = source.syntax().original_file_range_opt(sema.db)?; - FileRange { - file_id: range.file_id, - range: TextRange::at(range.range.start(), TextSize::new(0)), - } + let (range, _) = source + .map(|it| { + Some(match it { + hir::FieldSource::Named(it) => { + it.unsafe_token().or(it.name()?.ident_token())?.text_range() + } + hir::FieldSource::Pos(it) => it.ty()?.syntax().text_range(), + }) + }) + .transpose()? + .original_node_file_range_opt(sema.db)?; + + FileRange { file_id: range.file_id, range: TextRange::empty(range.range.start()) } } }; let source_change = SourceChange::from_text_edit( @@ -229,4 +239,186 @@ "#, ); } + + #[test] + fn change_visibility_of_field_with_doc_comment() { + check_fix( + r#" +pub mod foo { + pub struct Foo { + /// This is a doc comment + bar: u32, + } +} + +fn main() { + let x = foo::Foo { bar: 0 }; + x.bar$0; +} + "#, + r#" +pub mod foo { + pub struct Foo { + /// This is a doc comment + pub(crate) bar: u32, + } +} + +fn main() { + let x = foo::Foo { bar: 0 }; + x.bar; +} + "#, + ); + } + + #[test] + fn change_visibility_of_field_with_line_comment() { + check_fix( + r#" +pub mod foo { + pub struct Foo { + // This is a line comment + bar: u32, + } +} + +fn main() { + let x = foo::Foo { bar: 0 }; + x.bar$0; +} + "#, + r#" +pub mod foo { + pub struct Foo { + // This is a line comment + pub(crate) bar: u32, + } +} + +fn main() { + let x = foo::Foo { bar: 0 }; + x.bar; +} + "#, + ); + } + + #[test] + fn change_visibility_of_field_with_multiple_doc_comments() { + check_fix( + r#" +pub mod foo { + pub struct Foo { + /// First line + /// Second line + bar: u32, + } +} + +fn main() { + let x = foo::Foo { bar: 0 }; + x.bar$0; +} + "#, + r#" +pub mod foo { + pub struct Foo { + /// First line + /// Second line + pub(crate) bar: u32, + } +} + +fn main() { + let x = foo::Foo { bar: 0 }; + x.bar; +} + "#, + ); + } + + #[test] + fn change_visibility_of_field_with_attr_and_comment() { + check_fix( + r#" +mod foo { + pub struct Foo { + #[rustfmt::skip] + /// First line + /// Second line + bar: u32, + } +} +fn main() { + foo::Foo { $0bar: 42 }; +} + "#, + r#" +mod foo { + pub struct Foo { + #[rustfmt::skip] + /// First line + /// Second line + pub(crate) bar: u32, + } +} +fn main() { + foo::Foo { bar: 42 }; +} + "#, + ); + } + + #[test] + fn change_visibility_of_field_with_macro() { + check_fix( + r#" +macro_rules! allow_unused { + ($vis:vis $struct:ident $name:ident { $($fvis:vis $field:ident : $ty:ty,)* }) => { + $vis $struct $name { + $( + #[allow(unused)] + $fvis $field : $ty, + )* + } + }; +} +mod foo { + allow_unused!( + pub struct Foo { + x: i32, + } + ); +} +fn main() { + let foo = foo::Foo { x: 2 }; + let _ = foo.$0x +} + "#, + r#" +macro_rules! allow_unused { + ($vis:vis $struct:ident $name:ident { $($fvis:vis $field:ident : $ty:ty,)* }) => { + $vis $struct $name { + $( + #[allow(unused)] + $fvis $field : $ty, + )* + } + }; +} +mod foo { + allow_unused!( + pub struct Foo { + pub(crate) x: i32, + } + ); +} +fn main() { + let foo = foo::Foo { x: 2 }; + let _ = foo.x +} + "#, + ); + } }
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs b/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs index 4ae528b..f181021 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs
@@ -49,4 +49,25 @@ "#, ); } + + #[test] + fn dyn_super_trait_assoc_type() { + check_diagnostics( + r#" +//- minicore: future, send + +use core::{future::Future, marker::Send, pin::Pin}; + +trait FusedFuture: Future { + fn is_terminated(&self) -> bool; +} + +struct Box<T: ?Sized>(*const T); + +fn main() { + let _fut: Pin<Box<dyn FusedFuture<Output = ()> + Send>> = loop {}; +} +"#, + ); + } }
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs index 43ad12c1..66ece4e 100644 --- a/crates/ide-ssr/src/lib.rs +++ b/crates/ide-ssr/src/lib.rs
@@ -80,7 +80,7 @@ use crate::{errors::bail, matching::MatchFailureReason}; use hir::{FileRange, Semantics}; -use ide_db::symbol_index::SymbolsDatabase; +use ide_db::symbol_index::LocalRoots; use ide_db::text_edit::TextEdit; use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase, base_db::SourceDatabase}; use resolving::ResolvedRule; @@ -138,8 +138,8 @@ /// Constructs an instance using the start of the first file in `db` as the lookup context. pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> { - if let Some(first_file_id) = db - .local_roots() + if let Some(first_file_id) = LocalRoots::get(db) + .roots(db) .iter() .next() .and_then(|root| db.source_root(*root).source_root(db).iter().next())
diff --git a/crates/ide-ssr/src/resolving.rs b/crates/ide-ssr/src/resolving.rs index a48c0f8..461de40 100644 --- a/crates/ide-ssr/src/resolving.rs +++ b/crates/ide-ssr/src/resolving.rs
@@ -228,12 +228,10 @@ let resolved_qualifier = self.scope.speculative_resolve(&path.qualifier()?)?; if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier { let name = path.segment()?.name_ref()?; - let module = self.scope.module(); adt.ty(self.scope.db).iterate_path_candidates( self.scope.db, &self.scope, &self.scope.visible_traits().0, - Some(module), None, |assoc_item| { let item_name = assoc_item.name(self.scope.db)?;
diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs index 72f857c..56484ae 100644 --- a/crates/ide-ssr/src/search.rs +++ b/crates/ide-ssr/src/search.rs
@@ -9,6 +9,7 @@ EditionedFileId, FileId, FxHashSet, defs::Definition, search::{SearchScope, UsageSearchResult}, + symbol_index::LocalRoots, }; use syntax::{AstNode, SyntaxKind, SyntaxNode, ast}; @@ -156,8 +157,7 @@ if self.restrict_ranges.is_empty() { // Unrestricted search. use ide_db::base_db::SourceDatabase; - use ide_db::symbol_index::SymbolsDatabase; - for &root in self.sema.db.local_roots().iter() { + for &root in LocalRoots::get(self.sema.db).roots(self.sema.db).iter() { let sr = self.sema.db.source_root(root).source_root(self.sema.db); for file_id in sr.iter() { callback(file_id);
diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs index 1bb435f..8520335 100644 --- a/crates/ide-ssr/src/tests.rs +++ b/crates/ide-ssr/src/tests.rs
@@ -2,10 +2,10 @@ use hir::{FilePosition, FileRange}; use ide_db::{ EditionedFileId, FxHashSet, - base_db::{SourceDatabase, salsa::Durability}, + base_db::{SourceDatabase, salsa::Setter}, + symbol_index::LocalRoots, }; use test_utils::RangeOrOffset; -use triomphe::Arc; use crate::{MatchFinder, SsrRule}; @@ -66,7 +66,6 @@ /// `code` may optionally contain a cursor marker `$0`. If it doesn't, then the position will be /// the start of the file. If there's a second cursor marker, then we'll return a single range. pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec<FileRange>) { - use ide_db::symbol_index::SymbolsDatabase; use test_fixture::{WORKSPACE, WithFixture}; let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) { ide_db::RootDatabase::with_range_or_offset(code) @@ -88,7 +87,7 @@ } let mut local_roots = FxHashSet::default(); local_roots.insert(WORKSPACE); - db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); (db, position, selections) }
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 0ee9795..d663b70 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs
@@ -245,7 +245,7 @@ Some(NavigationTarget { file_id, full_range: TextRange::new(0.into(), size), - name: path.into(), + name: hir::Symbol::intern(&path), alias: None, focus_range: None, kind: None, @@ -598,7 +598,13 @@ let value_range = value.syntax().text_range(); let navs = navigation_target::orig_range_with_focus_r(db, file_id, value_range, focus_range); navs.map(|(hir::FileRangeWrapper { file_id, range }, focus_range)| { - NavigationTarget::from_syntax(file_id, "<expr>".into(), focus_range, range, kind) + NavigationTarget::from_syntax( + file_id, + hir::Symbol::intern("<expr>"), + focus_range, + range, + kind, + ) }) } @@ -607,7 +613,6 @@ use crate::{GotoDefinitionConfig, fixture}; use ide_db::{FileRange, MiniCore}; use itertools::Itertools; - use syntax::SmolStr; const TEST_CONFIG: GotoDefinitionConfig<'_> = GotoDefinitionConfig { minicore: MiniCore::default() }; @@ -658,7 +663,7 @@ let Some(target) = navs.into_iter().next() else { panic!("expected single navigation target but encountered none"); }; - assert_eq!(target.name, SmolStr::new_inline(expected_name)); + assert_eq!(target.name, hir::Symbol::intern(expected_name)); } #[test]
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 3a19531..071eacf 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs
@@ -350,7 +350,7 @@ fn hover_closure() { check( r#" -//- minicore: copy +//- minicore: copy, add, builtin_impls fn main() { let x = 2; let y = $0|z| x + z; @@ -3280,7 +3280,7 @@ check_hover_no_memory_layout( r#" -//- minicore: copy +//- minicore: copy, add, builtin_impls fn main() { let x = 2; let y = $0|z| x + z;
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 21550d5..2b4fe54 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs
@@ -316,6 +316,7 @@ pub closure_capture_hints: bool, pub binding_mode_hints: bool, pub implicit_drop_hints: bool, + pub implied_dyn_trait_hints: bool, pub lifetime_elision_hints: LifetimeElisionHints, pub param_names_for_lifetime_elision_hints: bool, pub hide_named_constructor_hints: bool, @@ -907,6 +908,7 @@ closing_brace_hints_min_lines: None, fields_to_resolve: InlayFieldsToResolve::empty(), implicit_drop_hints: false, + implied_dyn_trait_hints: false, range_exclusive_hints: false, minicore: MiniCore::default(), };
diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index ebb0d57..b2584b6 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs
@@ -352,7 +352,7 @@ check_with_config( InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, r#" -//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn +//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn, builtin_impls fn main() { let _: u32 = loop {}; //^^^^^^^<never-to-any> @@ -466,9 +466,8 @@ ..DISABLED_CONFIG }, r#" -//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn +//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn, builtin_impls fn main() { - Struct.consume(); Struct.by_ref(); //^^^^^^.&
diff --git a/crates/ide/src/inlay_hints/implied_dyn_trait.rs b/crates/ide/src/inlay_hints/implied_dyn_trait.rs index 562eb1e..4fbc88a 100644 --- a/crates/ide/src/inlay_hints/implied_dyn_trait.rs +++ b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
@@ -14,6 +14,10 @@ config: &InlayHintsConfig<'_>, path: Either<ast::PathType, ast::DynTraitType>, ) -> Option<()> { + if !config.implied_dyn_trait_hints { + return None; + } + let parent = path.syntax().parent()?; let range = match path { Either::Left(path) => { @@ -76,7 +80,14 @@ #[track_caller] fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { - check_with_config(InlayHintsConfig { sized_bound: true, ..DISABLED_CONFIG }, ra_fixture); + check_with_config( + InlayHintsConfig { + sized_bound: true, + implied_dyn_trait_hints: true, + ..DISABLED_CONFIG + }, + ra_fixture, + ); } #[test] @@ -125,7 +136,7 @@ #[test] fn edit() { check_edit( - DISABLED_CONFIG, + InlayHintsConfig { implied_dyn_trait_hints: true, ..DISABLED_CONFIG }, r#" trait T {} fn foo(
diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs index 4122c16..8d03487 100644 --- a/crates/ide/src/inlay_hints/param_name.rs +++ b/crates/ide/src/inlay_hints/param_name.rs
@@ -111,7 +111,8 @@ } const INSIGNIFICANT_METHOD_NAMES: &[&str] = &["clone", "as_ref", "into"]; -const INSIGNIFICANT_PARAMETER_NAMES: &[&str] = &["predicate", "value", "pat", "rhs", "other"]; +const INSIGNIFICANT_PARAMETER_NAMES: &[&str] = + &["predicate", "value", "pat", "rhs", "other", "msg", "op"]; fn should_hide_param_name_hint( sema: &Semantics<'_, RootDatabase>,
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index b222ff3..7d5d905 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs
@@ -6,7 +6,8 @@ use either::Either; use hir::{ AssocItem, Crate, FieldSource, HasContainer, HasCrate, HasSource, HirDisplay, HirFileId, - InFile, LocalSource, ModuleSource, Semantics, db::ExpandDatabase, symbols::FileSymbol, + InFile, LocalSource, ModuleSource, Semantics, Symbol, db::ExpandDatabase, sym, + symbols::FileSymbol, }; use ide_db::{ FileId, FileRange, RootDatabase, SymbolKind, @@ -16,12 +17,10 @@ famous_defs::FamousDefs, ra_fixture::UpmapFromRaFixture, }; -use span::Edition; use stdx::never; use syntax::{ - AstNode, SmolStr, SyntaxNode, TextRange, ToSmolStr, + AstNode, SyntaxNode, TextRange, ast::{self, HasName}, - format_smolstr, }; /// `NavigationTarget` represents an element in the editor's UI which you can @@ -48,17 +47,14 @@ /// /// This range must be contained within [`Self::full_range`]. pub focus_range: Option<TextRange>, - // FIXME: Symbol - pub name: SmolStr, + pub name: Symbol, pub kind: Option<SymbolKind>, - // FIXME: Symbol - pub container_name: Option<SmolStr>, + pub container_name: Option<Symbol>, pub description: Option<String>, pub docs: Option<Documentation>, /// In addition to a `name` field, a `NavigationTarget` may also be aliased /// In such cases we want a `NavigationTarget` to be accessible by its alias - // FIXME: Symbol - pub alias: Option<SmolStr>, + pub alias: Option<Symbol>, } impl fmt::Debug for NavigationTarget { @@ -149,9 +145,7 @@ db: &RootDatabase, module: hir::Module, ) -> UpmappingResult<NavigationTarget> { - let edition = module.krate().edition(db); - let name = - module.name(db).map(|it| it.display_no_db(edition).to_smolstr()).unwrap_or_default(); + let name = module.name(db).map(|it| it.symbol().clone()).unwrap_or_else(|| sym::underscore); match module.declaration_source(db) { Some(InFile { value, file_id }) => { orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( @@ -199,7 +193,8 @@ InFile { file_id, value }: InFile<&dyn ast::HasName>, kind: SymbolKind, ) -> UpmappingResult<NavigationTarget> { - let name: SmolStr = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into()); + let name = + value.name().map(|it| Symbol::intern(&it.text())).unwrap_or_else(|| sym::underscore); orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( |(FileRange { file_id, range: full_range }, focus_range)| { @@ -210,7 +205,7 @@ pub(crate) fn from_syntax( file_id: FileId, - name: SmolStr, + name: Symbol, focus_range: Option<TextRange>, full_range: TextRange, kind: SymbolKind, @@ -235,8 +230,6 @@ sema: &Semantics<'_, RootDatabase>, ) -> Option<UpmappingResult<NavigationTarget>> { let db = sema.db; - let edition = - self.def.module(db).map(|it| it.krate().edition(db)).unwrap_or(Edition::CURRENT); let display_target = self.def.krate(db).to_display_target(db); Some( orig_range_with_focus_r( @@ -248,11 +241,12 @@ .map(|(FileRange { file_id, range: full_range }, focus_range)| { NavigationTarget { file_id, - name: self.is_alias.then(|| self.def.name(db)).flatten().map_or_else( - || self.name.as_str().into(), - |it| it.display_no_db(edition).to_smolstr(), - ), - alias: self.is_alias.then(|| self.name.as_str().into()), + name: self + .is_alias + .then(|| self.def.name(db)) + .flatten() + .map_or_else(|| self.name.clone(), |it| it.symbol().clone()), + alias: self.is_alias.then(|| self.name.clone()), kind: Some(self.def.into()), full_range, focus_range, @@ -349,52 +343,50 @@ pub(crate) trait ToNavFromAst: Sized { const KIND: SymbolKind; - fn container_name(self, db: &RootDatabase) -> Option<SmolStr> { + fn container_name(self, db: &RootDatabase) -> Option<Symbol> { _ = db; None } } -fn container_name(db: &RootDatabase, t: impl HasContainer, edition: Edition) -> Option<SmolStr> { +fn container_name(db: &RootDatabase, t: impl HasContainer) -> Option<Symbol> { match t.container(db) { - hir::ItemContainer::Trait(it) => Some(it.name(db).display_no_db(edition).to_smolstr()), + hir::ItemContainer::Trait(it) => Some(it.name(db).symbol().clone()), // FIXME: Handle owners of blocks correctly here - hir::ItemContainer::Module(it) => { - it.name(db).map(|name| name.display_no_db(edition).to_smolstr()) - } + hir::ItemContainer::Module(it) => it.name(db).map(|name| name.symbol().clone()), _ => None, } } impl ToNavFromAst for hir::Function { const KIND: SymbolKind = SymbolKind::Function; - fn container_name(self, db: &RootDatabase) -> Option<SmolStr> { - container_name(db, self, self.krate(db).edition(db)) + fn container_name(self, db: &RootDatabase) -> Option<Symbol> { + container_name(db, self) } } impl ToNavFromAst for hir::Const { const KIND: SymbolKind = SymbolKind::Const; - fn container_name(self, db: &RootDatabase) -> Option<SmolStr> { - container_name(db, self, self.krate(db).edition(db)) + fn container_name(self, db: &RootDatabase) -> Option<Symbol> { + container_name(db, self) } } impl ToNavFromAst for hir::Static { const KIND: SymbolKind = SymbolKind::Static; - fn container_name(self, db: &RootDatabase) -> Option<SmolStr> { - container_name(db, self, self.krate(db).edition(db)) + fn container_name(self, db: &RootDatabase) -> Option<Symbol> { + container_name(db, self) } } impl ToNavFromAst for hir::Struct { const KIND: SymbolKind = SymbolKind::Struct; - fn container_name(self, db: &RootDatabase) -> Option<SmolStr> { - container_name(db, self, self.krate(db).edition(db)) + fn container_name(self, db: &RootDatabase) -> Option<Symbol> { + container_name(db, self) } } impl ToNavFromAst for hir::Enum { const KIND: SymbolKind = SymbolKind::Enum; - fn container_name(self, db: &RootDatabase) -> Option<SmolStr> { - container_name(db, self, self.krate(db).edition(db)) + fn container_name(self, db: &RootDatabase) -> Option<Symbol> { + container_name(db, self) } } impl ToNavFromAst for hir::Variant { @@ -402,20 +394,20 @@ } impl ToNavFromAst for hir::Union { const KIND: SymbolKind = SymbolKind::Union; - fn container_name(self, db: &RootDatabase) -> Option<SmolStr> { - container_name(db, self, self.krate(db).edition(db)) + fn container_name(self, db: &RootDatabase) -> Option<Symbol> { + container_name(db, self) } } impl ToNavFromAst for hir::TypeAlias { const KIND: SymbolKind = SymbolKind::TypeAlias; - fn container_name(self, db: &RootDatabase) -> Option<SmolStr> { - container_name(db, self, self.krate(db).edition(db)) + fn container_name(self, db: &RootDatabase) -> Option<Symbol> { + container_name(db, self) } } impl ToNavFromAst for hir::Trait { const KIND: SymbolKind = SymbolKind::Trait; - fn container_name(self, db: &RootDatabase) -> Option<SmolStr> { - container_name(db, self, self.krate(db).edition(db)) + fn container_name(self, db: &RootDatabase) -> Option<Symbol> { + container_name(db, self) } } @@ -451,10 +443,8 @@ impl ToNav for hir::Module { fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> { let InFile { file_id, value } = self.definition_source(db); - let edition = self.krate(db).edition(db); - let name = - self.name(db).map(|it| it.display_no_db(edition).to_smolstr()).unwrap_or_default(); + let name = self.name(db).map(|it| it.symbol().clone()).unwrap_or_else(|| sym::underscore); let (syntax, focus) = match &value { ModuleSource::SourceFile(node) => (node.syntax(), None), ModuleSource::Module(node) => (node.syntax(), node.name()), @@ -499,7 +489,7 @@ |(FileRange { file_id, range: full_range }, focus_range)| { NavigationTarget::from_syntax( file_id, - "impl".into(), + sym::kw_impl, focus_range, full_range, SymbolKind::Impl, @@ -521,16 +511,12 @@ .rename() .map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right)); let krate = self.module(db).krate(); - let edition = krate.edition(db); Some(orig_range_with_focus(db, file_id, value.syntax(), focus).map( |(FileRange { file_id, range: full_range }, focus_range)| { let mut res = NavigationTarget::from_syntax( file_id, - self.alias_or_name(db) - .unwrap_or_else(|| self.name(db)) - .display_no_db(edition) - .to_smolstr(), + self.alias_or_name(db).unwrap_or_else(|| self.name(db)).symbol().clone(), focus_range, full_range, SymbolKind::Module, @@ -538,7 +524,7 @@ res.docs = self.docs(db); res.description = Some(self.display(db, krate.to_display_target(db)).to_string()); - res.container_name = container_name(db, *self, edition); + res.container_name = container_name(db, *self); res }, )) @@ -570,7 +556,7 @@ |(FileRange { file_id, range: full_range }, focus_range)| { NavigationTarget::from_syntax( file_id, - format_smolstr!("{}", self.index()), + Symbol::integer(self.index()), focus_range, full_range, SymbolKind::Field, @@ -655,11 +641,10 @@ Either::Left(bind_pat) => (bind_pat.syntax(), bind_pat.name()), Either::Right(it) => (it.syntax(), it.name()), }; - let edition = self.local.parent(db).module(db).krate().edition(db); orig_range_with_focus(db, file_id, node, name).map( |(FileRange { file_id, range: full_range }, focus_range)| { - let name = local.name(db).display_no_db(edition).to_smolstr(); + let name = local.name(db).symbol().clone(); let kind = if local.is_self(db) { SymbolKind::SelfParam } else if local.is_param(db) { @@ -696,8 +681,7 @@ ) -> Option<UpmappingResult<NavigationTarget>> { let db = sema.db; let InFile { file_id, value } = self.source(db)?; - // Labels can't be keywords, so no escaping needed. - let name = self.name(db).display_no_db(Edition::Edition2015).to_smolstr(); + let name = self.name(db).symbol().clone(); Some(orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map( |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { @@ -722,8 +706,7 @@ ) -> Option<UpmappingResult<NavigationTarget>> { let db = sema.db; let InFile { file_id, value } = self.merge().source(db)?; - let edition = self.module(db).krate().edition(db); - let name = self.name(db).display_no_db(edition).to_smolstr(); + let name = self.name(db).symbol().clone(); let value = match value { Either::Left(ast::TypeOrConstParam::Type(x)) => Either::Left(x), @@ -772,8 +755,7 @@ ) -> Option<UpmappingResult<NavigationTarget>> { let db = sema.db; let InFile { file_id, value } = self.source(db)?; - // Lifetimes cannot be keywords, so not escaping needed. - let name = self.name(db).display_no_db(Edition::Edition2015).to_smolstr(); + let name = self.name(db).symbol().clone(); Some(orig_range(db, file_id, value.syntax()).map( |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { @@ -798,8 +780,7 @@ ) -> Option<UpmappingResult<NavigationTarget>> { let db = sema.db; let InFile { file_id, value } = self.merge().source(db)?; - let edition = self.module(db).krate().edition(db); - let name = self.name(db).display_no_db(edition).to_smolstr(); + let name = self.name(db).symbol().clone(); let value = match value { Either::Left(ast::TypeOrConstParam::Const(x)) => x, @@ -834,21 +815,17 @@ let InFile { file_id, value } = &self.source(db)?; let file_id = *file_id; Some(orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( - |(FileRange { file_id, range: full_range }, focus_range)| { - let edition = self.parent(db).module(db).krate().edition(db); - NavigationTarget { - file_id, - name: self - .name(db) - .map_or_else(|| "_".into(), |it| it.display(db, edition).to_smolstr()), - alias: None, - kind: Some(SymbolKind::Local), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - } + |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { + file_id, + name: + self.name(db).map_or_else(|| sym::underscore.clone(), |it| it.symbol().clone()), + alias: None, + kind: Some(SymbolKind::Local), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, }, )) }
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index a53a192..516cc7f 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs
@@ -1058,7 +1058,7 @@ use self$0; "#, expect![[r#" - Module FileId(0) 0..10 + _ Module FileId(0) 0..10 FileId(0) 4..8 import "#]], @@ -3130,7 +3130,7 @@ } "#, expect![[r#" - 'r#break Label FileId(0) 87..96 87..95 + 'break Label FileId(0) 87..96 87..95 FileId(0) 113..121 "#]], @@ -3146,7 +3146,7 @@ } "#, expect![[r#" - 'r#fn LifetimeParam FileId(0) 7..12 + 'fn LifetimeParam FileId(0) 7..12 FileId(0) 18..23 FileId(0) 44..49
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 494701d..2086a19 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs
@@ -231,7 +231,7 @@ .cmp(&nav_b.focus_range.map_or_else(t_0, |it| it.start())) }) .then_with(|| kind.disc().cmp(&kind_b.disc())) - .then_with(|| nav.name.cmp(&nav_b.name)) + .then_with(|| nav.name.as_str().cmp(nav_b.name.as_str())) } fn find_related_tests( @@ -817,7 +817,7 @@ "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..331, name: \"\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..331, name: \"_\", kind: Module })", "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", "(Bin, NavigationTarget { file_id: FileId(0), full_range: 15..76, focus_range: 42..71, name: \"__cortex_m_rt_main_trampoline\", kind: Function })", "(Bin, NavigationTarget { file_id: FileId(0), full_range: 78..154, focus_range: 113..149, name: \"__cortex_m_rt_main_trampoline_unsafe\", kind: Function })", @@ -1138,7 +1138,7 @@ "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..51, name: \"\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..51, name: \"_\", kind: Module })", "(Test, NavigationTarget { file_id: FileId(0), full_range: 1..50, focus_range: 36..45, name: \"test_foo1\", kind: Function }, Atom(KeyValue { key: \"feature\", value: \"foo\" }))", ] "#]], @@ -1157,7 +1157,7 @@ "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..73, name: \"\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..73, name: \"_\", kind: Module })", "(Test, NavigationTarget { file_id: FileId(0), full_range: 1..72, focus_range: 58..67, name: \"test_foo1\", kind: Function }, All([Atom(KeyValue { key: \"feature\", value: \"foo\" }), Atom(KeyValue { key: \"feature\", value: \"bar\" })]))", ] "#]], @@ -1236,7 +1236,7 @@ "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"_\", kind: Module })", "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })", "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..307, name: \"foo_test\", kind: Function })", "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)", @@ -1679,10 +1679,10 @@ "#, expect![[r#" [ - "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 1..461, focus_range: 5..10, name: \"r#mod\", kind: Module, description: \"mod r#mod\" })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 1..461, focus_range: 5..10, name: \"mod\", kind: Module, description: \"mod r#mod\" })", "(Test, NavigationTarget { file_id: FileId(0), full_range: 17..41, focus_range: 32..36, name: \"r#fn\", kind: Function })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 47..84, name: \"r#for\", container_name: \"r#mod\" })", - "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 90..146, name: \"r#struct\", container_name: \"r#mod\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 47..84, name: \"r#for\", container_name: \"mod\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 90..146, name: \"r#struct\", container_name: \"mod\" })", "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 152..266, focus_range: 189..205, name: \"impl\", kind: Impl })", "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 216..260, name: \"r#fn\" })", "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 323..367, name: \"r#fn\" })",
diff --git a/crates/ide/src/ssr.rs b/crates/ide/src/ssr.rs index 7df4499..dc8f343 100644 --- a/crates/ide/src/ssr.rs +++ b/crates/ide/src/ssr.rs
@@ -59,11 +59,9 @@ use expect_test::expect; use ide_assists::{Assist, AssistResolveStrategy}; use ide_db::{ - FileRange, FxHashSet, RootDatabase, base_db::salsa::Durability, - symbol_index::SymbolsDatabase, + FileRange, FxHashSet, RootDatabase, base_db::salsa::Setter as _, symbol_index::LocalRoots, }; use test_fixture::WithFixture; - use triomphe::Arc; use super::ssr_assists; @@ -74,7 +72,7 @@ let (mut db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture); let mut local_roots = FxHashSet::default(); local_roots.insert(test_fixture::WORKSPACE); - db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); + LocalRoots::get(&db).set_roots(&mut db).to(local_roots); ssr_assists( &db, &resolve,
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index e261928..052de0f 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs
@@ -173,6 +173,7 @@ adjustment_hints_mode: AdjustmentHintsMode::Prefix, adjustment_hints_hide_outside_unsafe: false, implicit_drop_hints: false, + implied_dyn_trait_hints: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, hide_closure_parameter_hints: false,
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs index 756377f..0624467 100644 --- a/crates/intern/src/symbol/symbols.rs +++ b/crates/intern/src/symbol/symbols.rs
@@ -85,6 +85,7 @@ false_ = "false", let_ = "let", const_ = "const", + kw_impl = "impl", proc_dash_macro = "proc-macro", aapcs_dash_unwind = "aapcs-unwind", avr_dash_interrupt = "avr-interrupt", @@ -519,4 +520,7 @@ never_type_fallback, specialization, min_specialization, + arbitrary_self_types, + arbitrary_self_types_pointers, + supertrait_item_shadowing, }
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index cde62e0..ab18309 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -283,7 +283,7 @@ if p.eat(T![,]) { while !p.at(EOF) && !p.at(T![')']) { let m = p.start(); - if p.at(IDENT) && p.nth_at(1, T![=]) { + if p.at(IDENT) && p.nth_at(1, T![=]) && !p.nth_at(2, T![=]) { name(p); p.bump(T![=]); }
diff --git a/crates/parser/src/grammar/items/traits.rs b/crates/parser/src/grammar/items/traits.rs index c1b1a3f..4e48a4c 100644 --- a/crates/parser/src/grammar/items/traits.rs +++ b/crates/parser/src/grammar/items/traits.rs
@@ -54,12 +54,13 @@ // impl const Send for S {} p.eat(T![const]); - // FIXME: never type + // test impl_item_never_type // impl ! {} - - // test impl_item_neg - // impl !Send for S {} - p.eat(T![!]); + if p.at(T![!]) && !p.nth_at(1, T!['{']) { + // test impl_item_neg + // impl !Send for S {} + p.eat(T![!]); + } impl_type(p); if p.eat(T![for]) { impl_type(p);
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 93e02a9..6a38044 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs
@@ -1006,7 +1006,149 @@ } } #[macro_export] -macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [global_asm] => { $ crate :: SyntaxKind :: GLOBAL_ASM_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [naked_asm] => { $ crate :: SyntaxKind :: NAKED_ASM_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER } ; } +macro_rules ! T_ { + [$] => { $ crate :: SyntaxKind :: DOLLAR }; + [;] => { $ crate :: SyntaxKind :: SEMICOLON }; + [,] => { $ crate :: SyntaxKind :: COMMA }; + ['('] => { $ crate :: SyntaxKind :: L_PAREN }; + [')'] => { $ crate :: SyntaxKind :: R_PAREN }; + ['{'] => { $ crate :: SyntaxKind :: L_CURLY }; + ['}'] => { $ crate :: SyntaxKind :: R_CURLY }; + ['['] => { $ crate :: SyntaxKind :: L_BRACK }; + [']'] => { $ crate :: SyntaxKind :: R_BRACK }; + [<] => { $ crate :: SyntaxKind :: L_ANGLE }; + [>] => { $ crate :: SyntaxKind :: R_ANGLE }; + [@] => { $ crate :: SyntaxKind :: AT }; + [#] => { $ crate :: SyntaxKind :: POUND }; + [~] => { $ crate :: SyntaxKind :: TILDE }; + [?] => { $ crate :: SyntaxKind :: QUESTION }; + [&] => { $ crate :: SyntaxKind :: AMP }; + [|] => { $ crate :: SyntaxKind :: PIPE }; + [+] => { $ crate :: SyntaxKind :: PLUS }; + [*] => { $ crate :: SyntaxKind :: STAR }; + [/] => { $ crate :: SyntaxKind :: SLASH }; + [^] => { $ crate :: SyntaxKind :: CARET }; + [%] => { $ crate :: SyntaxKind :: PERCENT }; + [_] => { $ crate :: SyntaxKind :: UNDERSCORE }; + [.] => { $ crate :: SyntaxKind :: DOT }; + [..] => { $ crate :: SyntaxKind :: DOT2 }; + [...] => { $ crate :: SyntaxKind :: DOT3 }; + [..=] => { $ crate :: SyntaxKind :: DOT2EQ }; + [:] => { $ crate :: SyntaxKind :: COLON }; + [::] => { $ crate :: SyntaxKind :: COLON2 }; + [=] => { $ crate :: SyntaxKind :: EQ }; + [==] => { $ crate :: SyntaxKind :: EQ2 }; + [=>] => { $ crate :: SyntaxKind :: FAT_ARROW }; + [!] => { $ crate :: SyntaxKind :: BANG }; + [!=] => { $ crate :: SyntaxKind :: NEQ }; + [-] => { $ crate :: SyntaxKind :: MINUS }; + [->] => { $ crate :: SyntaxKind :: THIN_ARROW }; + [<=] => { $ crate :: SyntaxKind :: LTEQ }; + [>=] => { $ crate :: SyntaxKind :: GTEQ }; + [+=] => { $ crate :: SyntaxKind :: PLUSEQ }; + [-=] => { $ crate :: SyntaxKind :: MINUSEQ }; + [|=] => { $ crate :: SyntaxKind :: PIPEEQ }; + [&=] => { $ crate :: SyntaxKind :: AMPEQ }; + [^=] => { $ crate :: SyntaxKind :: CARETEQ }; + [/=] => { $ crate :: SyntaxKind :: SLASHEQ }; + [*=] => { $ crate :: SyntaxKind :: STAREQ }; + [%=] => { $ crate :: SyntaxKind :: PERCENTEQ }; + [&&] => { $ crate :: SyntaxKind :: AMP2 }; + [||] => { $ crate :: SyntaxKind :: PIPE2 }; + [<<] => { $ crate :: SyntaxKind :: SHL }; + [>>] => { $ crate :: SyntaxKind :: SHR }; + [<<=] => { $ crate :: SyntaxKind :: SHLEQ }; + [>>=] => { $ crate :: SyntaxKind :: SHREQ }; + [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW }; + [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW }; + [as] => { $ crate :: SyntaxKind :: AS_KW }; + [become] => { $ crate :: SyntaxKind :: BECOME_KW }; + [box] => { $ crate :: SyntaxKind :: BOX_KW }; + [break] => { $ crate :: SyntaxKind :: BREAK_KW }; + [const] => { $ crate :: SyntaxKind :: CONST_KW }; + [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW }; + [crate] => { $ crate :: SyntaxKind :: CRATE_KW }; + [do] => { $ crate :: SyntaxKind :: DO_KW }; + [else] => { $ crate :: SyntaxKind :: ELSE_KW }; + [enum] => { $ crate :: SyntaxKind :: ENUM_KW }; + [extern] => { $ crate :: SyntaxKind :: EXTERN_KW }; + [false] => { $ crate :: SyntaxKind :: FALSE_KW }; + [final] => { $ crate :: SyntaxKind :: FINAL_KW }; + [fn] => { $ crate :: SyntaxKind :: FN_KW }; + [for] => { $ crate :: SyntaxKind :: FOR_KW }; + [if] => { $ crate :: SyntaxKind :: IF_KW }; + [impl] => { $ crate :: SyntaxKind :: IMPL_KW }; + [in] => { $ crate :: SyntaxKind :: IN_KW }; + [let] => { $ crate :: SyntaxKind :: LET_KW }; + [loop] => { $ crate :: SyntaxKind :: LOOP_KW }; + [macro] => { $ crate :: SyntaxKind :: MACRO_KW }; + [match] => { $ crate :: SyntaxKind :: MATCH_KW }; + [mod] => { $ crate :: SyntaxKind :: MOD_KW }; + [move] => { $ crate :: SyntaxKind :: MOVE_KW }; + [mut] => { $ crate :: SyntaxKind :: MUT_KW }; + [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW }; + [priv] => { $ crate :: SyntaxKind :: PRIV_KW }; + [pub] => { $ crate :: SyntaxKind :: PUB_KW }; + [ref] => { $ crate :: SyntaxKind :: REF_KW }; + [return] => { $ crate :: SyntaxKind :: RETURN_KW }; + [self] => { $ crate :: SyntaxKind :: SELF_KW }; + [static] => { $ crate :: SyntaxKind :: STATIC_KW }; + [struct] => { $ crate :: SyntaxKind :: STRUCT_KW }; + [super] => { $ crate :: SyntaxKind :: SUPER_KW }; + [trait] => { $ crate :: SyntaxKind :: TRAIT_KW }; + [true] => { $ crate :: SyntaxKind :: TRUE_KW }; + [type] => { $ crate :: SyntaxKind :: TYPE_KW }; + [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW }; + [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW }; + [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW }; + [use] => { $ crate :: SyntaxKind :: USE_KW }; + [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW }; + [where] => { $ crate :: SyntaxKind :: WHERE_KW }; + [while] => { $ crate :: SyntaxKind :: WHILE_KW }; + [yield] => { $ crate :: SyntaxKind :: YIELD_KW }; + [asm] => { $ crate :: SyntaxKind :: ASM_KW }; + [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW }; + [auto] => { $ crate :: SyntaxKind :: AUTO_KW }; + [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW }; + [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW }; + [default] => { $ crate :: SyntaxKind :: DEFAULT_KW }; + [dyn] => { $ crate :: SyntaxKind :: DYN_KW }; + [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW }; + [global_asm] => { $ crate :: SyntaxKind :: GLOBAL_ASM_KW }; + [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW }; + [inout] => { $ crate :: SyntaxKind :: INOUT_KW }; + [label] => { $ crate :: SyntaxKind :: LABEL_KW }; + [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW }; + [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW }; + [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW }; + [naked_asm] => { $ crate :: SyntaxKind :: NAKED_ASM_KW }; + [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW }; + [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW }; + [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW }; + [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW }; + [options] => { $ crate :: SyntaxKind :: OPTIONS_KW }; + [out] => { $ crate :: SyntaxKind :: OUT_KW }; + [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW }; + [pure] => { $ crate :: SyntaxKind :: PURE_KW }; + [raw] => { $ crate :: SyntaxKind :: RAW_KW }; + [readonly] => { $ crate :: SyntaxKind :: READONLY_KW }; + [safe] => { $ crate :: SyntaxKind :: SAFE_KW }; + [sym] => { $ crate :: SyntaxKind :: SYM_KW }; + [union] => { $ crate :: SyntaxKind :: UNION_KW }; + [yeet] => { $ crate :: SyntaxKind :: YEET_KW }; + [async] => { $ crate :: SyntaxKind :: ASYNC_KW }; + [await] => { $ crate :: SyntaxKind :: AWAIT_KW }; + [dyn] => { $ crate :: SyntaxKind :: DYN_KW }; + [gen] => { $ crate :: SyntaxKind :: GEN_KW }; + [try] => { $ crate :: SyntaxKind :: TRY_KW }; + [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT }; + [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER }; + [ident] => { $ crate :: SyntaxKind :: IDENT }; + [string] => { $ crate :: SyntaxKind :: STRING }; + [shebang] => { $ crate :: SyntaxKind :: SHEBANG }; + [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER }; +} + impl ::core::marker::Copy for SyntaxKind {} impl ::core::clone::Clone for SyntaxKind { #[inline]
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs index 7f5ff0e..7b0d32d 100644 --- a/crates/parser/test_data/generated/runner.rs +++ b/crates/parser/test_data/generated/runner.rs
@@ -322,6 +322,10 @@ #[test] fn impl_item_neg() { run_and_expect_no_errors("test_data/parser/inline/ok/impl_item_neg.rs"); } #[test] + fn impl_item_never_type() { + run_and_expect_no_errors("test_data/parser/inline/ok/impl_item_never_type.rs"); + } + #[test] fn impl_trait_type() { run_and_expect_no_errors("test_data/parser/inline/ok/impl_trait_type.rs"); }
diff --git a/crates/parser/test_data/parser/inline/ok/impl_item_never_type.rast b/crates/parser/test_data/parser/inline/ok/impl_item_never_type.rast new file mode 100644 index 0000000..fa4575e --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/impl_item_never_type.rast
@@ -0,0 +1,11 @@ +SOURCE_FILE + IMPL + IMPL_KW "impl" + WHITESPACE " " + NEVER_TYPE + BANG "!" + WHITESPACE " " + ASSOC_ITEM_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/impl_item_never_type.rs b/crates/parser/test_data/parser/inline/ok/impl_item_never_type.rs new file mode 100644 index 0000000..ed8057b --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/impl_item_never_type.rs
@@ -0,0 +1 @@ +impl ! {}
diff --git a/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/Cargo.toml index bc04482..78630dd 100644 --- a/crates/proc-macro-srv/proc-macro-test/Cargo.toml +++ b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
@@ -10,4 +10,4 @@ doctest = false [build-dependencies] -cargo_metadata = "0.20.0" +cargo_metadata = "0.23.0"
diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml index 0dbb309..ec44369 100644 --- a/crates/project-model/Cargo.toml +++ b/crates/project-model/Cargo.toml
@@ -21,6 +21,7 @@ serde.workspace = true serde_derive.workspace = true temp-dir.workspace = true +toml.workspace = true tracing.workspace = true triomphe.workspace = true la-arena.workspace = true
diff --git a/crates/project-model/src/cargo_config_file.rs b/crates/project-model/src/cargo_config_file.rs index a1e7ed0..5d6e5fd 100644 --- a/crates/project-model/src/cargo_config_file.rs +++ b/crates/project-model/src/cargo_config_file.rs
@@ -1,37 +1,135 @@ -//! Read `.cargo/config.toml` as a JSON object -use paths::{Utf8Path, Utf8PathBuf}; +//! Read `.cargo/config.toml` as a TOML table +use paths::{AbsPath, Utf8Path, Utf8PathBuf}; use rustc_hash::FxHashMap; +use toml::{ + Spanned, + de::{DeTable, DeValue}, +}; use toolchain::Tool; use crate::{ManifestPath, Sysroot, utf8_stdout}; -pub(crate) type CargoConfigFile = serde_json::Map<String, serde_json::Value>; +#[derive(Clone)] +pub struct CargoConfigFile(String); -pub(crate) fn read( - manifest: &ManifestPath, - extra_env: &FxHashMap<String, Option<String>>, - sysroot: &Sysroot, -) -> Option<CargoConfigFile> { - let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env); - cargo_config - .args(["-Z", "unstable-options", "config", "get", "--format", "json"]) - .env("RUSTC_BOOTSTRAP", "1"); - if manifest.is_rust_manifest() { - cargo_config.arg("-Zscript"); +impl CargoConfigFile { + pub(crate) fn load( + manifest: &ManifestPath, + extra_env: &FxHashMap<String, Option<String>>, + sysroot: &Sysroot, + ) -> Option<Self> { + let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env); + cargo_config + .args(["-Z", "unstable-options", "config", "get", "--format", "toml", "--show-origin"]) + .env("RUSTC_BOOTSTRAP", "1"); + if manifest.is_rust_manifest() { + cargo_config.arg("-Zscript"); + } + + tracing::debug!("Discovering cargo config by {cargo_config:?}"); + utf8_stdout(&mut cargo_config) + .inspect(|toml| { + tracing::debug!("Discovered cargo config: {toml:?}"); + }) + .inspect_err(|err| { + tracing::debug!("Failed to discover cargo config: {err:?}"); + }) + .ok() + .map(CargoConfigFile) } - tracing::debug!("Discovering cargo config by {:?}", cargo_config); - let json: serde_json::Map<String, serde_json::Value> = utf8_stdout(&mut cargo_config) - .inspect(|json| { - tracing::debug!("Discovered cargo config: {:?}", json); - }) - .inspect_err(|err| { - tracing::debug!("Failed to discover cargo config: {:?}", err); - }) - .ok() - .and_then(|stdout| serde_json::from_str(&stdout).ok())?; + pub(crate) fn read<'a>(&'a self) -> Option<CargoConfigFileReader<'a>> { + CargoConfigFileReader::new(&self.0) + } - Some(json) + #[cfg(test)] + pub(crate) fn from_string_for_test(s: String) -> Self { + CargoConfigFile(s) + } +} + +pub(crate) struct CargoConfigFileReader<'a> { + toml_str: &'a str, + line_ends: Vec<usize>, + table: Spanned<DeTable<'a>>, +} + +impl<'a> CargoConfigFileReader<'a> { + fn new(toml_str: &'a str) -> Option<Self> { + let toml = DeTable::parse(toml_str) + .inspect_err(|err| tracing::debug!("Failed to parse cargo config into toml: {err:?}")) + .ok()?; + let mut last_line_end = 0; + let line_ends = toml_str + .lines() + .map(|l| { + last_line_end += l.len() + 1; + last_line_end + }) + .collect(); + + Some(CargoConfigFileReader { toml_str, table: toml, line_ends }) + } + + pub(crate) fn get_spanned( + &self, + accessor: impl IntoIterator<Item = &'a str>, + ) -> Option<&Spanned<DeValue<'a>>> { + let mut keys = accessor.into_iter(); + let mut val = self.table.get_ref().get(keys.next()?)?; + for key in keys { + let DeValue::Table(map) = val.get_ref() else { return None }; + val = map.get(key)?; + } + Some(val) + } + + pub(crate) fn get(&self, accessor: impl IntoIterator<Item = &'a str>) -> Option<&DeValue<'a>> { + self.get_spanned(accessor).map(|it| it.as_ref()) + } + + pub(crate) fn get_origin_root(&self, spanned: &Spanned<DeValue<'a>>) -> Option<&AbsPath> { + let span = spanned.span(); + + for &line_end in &self.line_ends { + if line_end < span.end { + continue; + } + + let after_span = &self.toml_str[span.end..line_end]; + + // table.key = "value" # /parent/.cargo/config.toml + // | | + // span.end line_end + let origin_path = after_span + .strip_prefix([',']) // strip trailing comma + .unwrap_or(after_span) + .trim_start() + .strip_prefix(['#']) + .and_then(|path| { + let path = path.trim(); + if path.starts_with("environment variable") + || path.starts_with("--config cli option") + { + None + } else { + Some(path) + } + }); + + return origin_path.and_then(|path| { + <&Utf8Path>::from(path) + .try_into() + .ok() + // Two levels up to the config file. + // See https://doc.rust-lang.org/cargo/reference/config.html#config-relative-paths + .and_then(AbsPath::parent) + .and_then(AbsPath::parent) + }); + } + + None + } } pub(crate) fn make_lockfile_copy( @@ -54,3 +152,74 @@ } } } + +#[test] +fn cargo_config_file_reader_works() { + #[cfg(target_os = "windows")] + let root = "C://ROOT"; + + #[cfg(not(target_os = "windows"))] + let root = "/ROOT"; + + let toml = format!( + r##" +alias.foo = "abc" +alias.bar = "🙂" # {root}/home/.cargo/config.toml +alias.sub-example = [ + "sub", # {root}/foo/.cargo/config.toml + "example", # {root}/❤️💛💙/💝/.cargo/config.toml +] +build.rustflags = [ + "--flag", # {root}/home/.cargo/config.toml + "env", # environment variable `CARGO_BUILD_RUSTFLAGS` + "cli", # --config cli option +] +env.CARGO_WORKSPACE_DIR.relative = true # {root}/home/.cargo/config.toml +env.CARGO_WORKSPACE_DIR.value = "" # {root}/home/.cargo/config.toml +"## + ); + + let reader = CargoConfigFileReader::new(&toml).unwrap(); + + let alias_foo = reader.get_spanned(["alias", "foo"]).unwrap(); + assert_eq!(alias_foo.as_ref().as_str().unwrap(), "abc"); + assert!(reader.get_origin_root(alias_foo).is_none()); + + let alias_bar = reader.get_spanned(["alias", "bar"]).unwrap(); + assert_eq!(alias_bar.as_ref().as_str().unwrap(), "🙂"); + assert_eq!(reader.get_origin_root(alias_bar).unwrap().as_str(), format!("{root}/home")); + + let alias_sub_example = reader.get_spanned(["alias", "sub-example"]).unwrap(); + assert!(reader.get_origin_root(alias_sub_example).is_none()); + let alias_sub_example = alias_sub_example.as_ref().as_array().unwrap(); + + assert_eq!(alias_sub_example[0].get_ref().as_str().unwrap(), "sub"); + assert_eq!( + reader.get_origin_root(&alias_sub_example[0]).unwrap().as_str(), + format!("{root}/foo") + ); + + assert_eq!(alias_sub_example[1].get_ref().as_str().unwrap(), "example"); + assert_eq!( + reader.get_origin_root(&alias_sub_example[1]).unwrap().as_str(), + format!("{root}/❤️💛💙/💝") + ); + + let build_rustflags = reader.get(["build", "rustflags"]).unwrap().as_array().unwrap(); + assert_eq!( + reader.get_origin_root(&build_rustflags[0]).unwrap().as_str(), + format!("{root}/home") + ); + assert!(reader.get_origin_root(&build_rustflags[1]).is_none()); + assert!(reader.get_origin_root(&build_rustflags[2]).is_none()); + + let env_cargo_workspace_dir = + reader.get(["env", "CARGO_WORKSPACE_DIR"]).unwrap().as_table().unwrap(); + let env_relative = &env_cargo_workspace_dir["relative"]; + assert!(env_relative.as_ref().as_bool().unwrap()); + assert_eq!(reader.get_origin_root(env_relative).unwrap().as_str(), format!("{root}/home")); + + let env_val = &env_cargo_workspace_dir["value"]; + assert_eq!(env_val.as_ref().as_str().unwrap(), ""); + assert_eq!(reader.get_origin_root(env_val).unwrap().as_str(), format!("{root}/home")); +}
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs index ae0458a..51c4479 100644 --- a/crates/project-model/src/env.rs +++ b/crates/project-model/src/env.rs
@@ -2,9 +2,8 @@ use base_db::Env; use paths::Utf8Path; use rustc_hash::FxHashMap; -use toolchain::Tool; -use crate::{ManifestPath, PackageData, TargetKind, cargo_config_file::CargoConfigFile}; +use crate::{PackageData, TargetKind, cargo_config_file::CargoConfigFile}; /// Recreates the compile-time environment variables that Cargo sets. /// @@ -48,8 +47,8 @@ ); } -pub(crate) fn inject_cargo_env(env: &mut Env) { - env.set("CARGO", Tool::Cargo.path().to_string()); +pub(crate) fn inject_cargo_env(env: &mut Env, cargo_path: &Utf8Path) { + env.set("CARGO", cargo_path.as_str()); } pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: TargetKind) { @@ -62,46 +61,48 @@ } pub(crate) fn cargo_config_env( - manifest: &ManifestPath, config: &Option<CargoConfigFile>, extra_env: &FxHashMap<String, Option<String>>, ) -> Env { + use toml::de::*; + let mut env = Env::default(); env.extend(extra_env.iter().filter_map(|(k, v)| v.as_ref().map(|v| (k.clone(), v.clone())))); - let Some(serde_json::Value::Object(env_json)) = config.as_ref().and_then(|c| c.get("env")) - else { + let Some(config_reader) = config.as_ref().and_then(|c| c.read()) else { + return env; + }; + let Some(env_toml) = config_reader.get(["env"]).and_then(|it| it.as_table()) else { return env; }; - // FIXME: The base here should be the parent of the `.cargo/config` file, not the manifest. - // But cargo does not provide this information. - let base = <_ as AsRef<Utf8Path>>::as_ref(manifest.parent()); - - for (key, entry) in env_json { - let value = match entry { - serde_json::Value::String(s) => s.clone(), - serde_json::Value::Object(entry) => { + for (key, entry) in env_toml { + let key = key.as_ref().as_ref(); + let value = match entry.as_ref() { + DeValue::String(s) => String::from(s.clone()), + DeValue::Table(entry) => { // Each entry MUST have a `value` key. - let Some(value) = entry.get("value").and_then(|v| v.as_str()) else { + let Some(map) = entry.get("value").and_then(|v| v.as_ref().as_str()) else { continue; }; // If the entry already exists in the environment AND the `force` key is not set to // true, then don't overwrite the value. if extra_env.get(key).is_some_and(Option::is_some) - && !entry.get("force").and_then(|v| v.as_bool()).unwrap_or(false) + && !entry.get("force").and_then(|v| v.as_ref().as_bool()).unwrap_or(false) { continue; } - if entry - .get("relative") - .and_then(|v| v.as_bool()) - .is_some_and(std::convert::identity) - { - base.join(value).to_string() + if let Some(base) = entry.get("relative").and_then(|v| { + if v.as_ref().as_bool().is_some_and(std::convert::identity) { + config_reader.get_origin_root(v) + } else { + None + } + }) { + base.join(map).to_string() } else { - value.to_owned() + map.to_owned() } } _ => continue, @@ -115,43 +116,30 @@ #[test] fn parse_output_cargo_config_env_works() { + use itertools::Itertools; + + let cwd = paths::AbsPathBuf::try_from( + paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap(), + ) + .unwrap(); + let config_path = cwd.join(".cargo").join("config.toml"); let raw = r#" -{ - "env": { - "CARGO_WORKSPACE_DIR": { - "relative": true, - "value": "" - }, - "INVALID": { - "relative": "invalidbool", - "value": "../relative" - }, - "RELATIVE": { - "relative": true, - "value": "../relative" - }, - "TEST": { - "value": "test" - }, - "FORCED": { - "value": "test", - "force": true - }, - "UNFORCED": { - "value": "test", - "force": false - }, - "OVERWRITTEN": { - "value": "test" - }, - "NOT_AN_OBJECT": "value" - } -} +env.CARGO_WORKSPACE_DIR.relative = true +env.CARGO_WORKSPACE_DIR.value = "" +env.INVALID.relative = "invalidbool" +env.INVALID.value = "../relative" +env.RELATIVE.relative = true +env.RELATIVE.value = "../relative" +env.TEST.value = "test" +env.FORCED.value = "test" +env.FORCED.force = true +env.UNFORCED.value = "test" +env.UNFORCED.forced = false +env.OVERWRITTEN.value = "test" +env.NOT_AN_OBJECT = "value" "#; - let config: CargoConfigFile = serde_json::from_str(raw).unwrap(); - let cwd = paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap(); - let manifest = paths::AbsPathBuf::assert(cwd.join("Cargo.toml")); - let manifest = ManifestPath::try_from(manifest).unwrap(); + let raw = raw.lines().map(|l| format!("{l} # {config_path}")).join("\n"); + let config = CargoConfigFile::from_string_for_test(raw); let extra_env = [ ("FORCED", Some("ignored")), ("UNFORCED", Some("newvalue")), @@ -161,7 +149,7 @@ .iter() .map(|(k, v)| (k.to_string(), v.map(ToString::to_string))) .collect(); - let env = cargo_config_env(&manifest, &Some(config), &extra_env); + let env = cargo_config_env(&Some(config), &extra_env); assert_eq!(env.get("CARGO_WORKSPACE_DIR").as_deref(), Some(cwd.join("").as_str())); assert_eq!(env.get("RELATIVE").as_deref(), Some(cwd.join("../relative").as_str())); assert_eq!(env.get("INVALID").as_deref(), Some("../relative"));
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index 920afe6..1b31138 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs
@@ -8,6 +8,7 @@ use std::{env, fs, ops::Not, path::Path, process::Command}; use anyhow::{Result, format_err}; +use base_db::Env; use itertools::Itertools; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashMap; @@ -172,6 +173,32 @@ } } + pub fn tool_path(&self, tool: Tool, current_dir: impl AsRef<Path>, envs: &Env) -> Utf8PathBuf { + match self.root() { + Some(root) => { + let mut cmd = toolchain::command( + Tool::Rustup.path(), + current_dir, + &envs + .into_iter() + .map(|(k, v)| (k.clone(), Some(v.clone()))) + .collect::<FxHashMap<_, _>>(), + ); + if !envs.contains_key("RUSTUP_TOOLCHAIN") + && std::env::var_os("RUSTUP_TOOLCHAIN").is_none() + { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(root)); + } + + cmd.arg("which"); + cmd.arg(tool.name()); + (|| Some(Utf8PathBuf::from(String::from_utf8(cmd.output().ok()?.stdout).ok()?)))() + .unwrap_or_else(|| Utf8PathBuf::from(tool.name())) + } + _ => tool.path(), + } + } + pub fn discover_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> { let root = self.root()?; Some(
diff --git a/crates/project-model/src/toolchain_info/target_tuple.rs b/crates/project-model/src/toolchain_info/target_tuple.rs index 9f12ede..12c64b5 100644 --- a/crates/project-model/src/toolchain_info/target_tuple.rs +++ b/crates/project-model/src/toolchain_info/target_tuple.rs
@@ -53,7 +53,7 @@ } fn cargo_config_build_target(config: &CargoConfigFile) -> Option<Vec<String>> { - match parse_json_cargo_config_build_target(config) { + match parse_toml_cargo_config_build_target(config) { Ok(v) => v, Err(e) => { tracing::debug!("Failed to discover cargo config build target {e:?}"); @@ -63,18 +63,44 @@ } // Parses `"build.target = [target-tuple, target-tuple, ...]"` or `"build.target = "target-tuple"` -fn parse_json_cargo_config_build_target( +fn parse_toml_cargo_config_build_target( config: &CargoConfigFile, ) -> anyhow::Result<Option<Vec<String>>> { - let target = config.get("build").and_then(|v| v.as_object()).and_then(|m| m.get("target")); - match target { - Some(serde_json::Value::String(s)) => Ok(Some(vec![s.to_owned()])), - Some(v) => serde_json::from_value(v.clone()) - .map(Option::Some) - .context("Failed to parse `build.target` as an array of target"), - // t`error: config value `build.target` is not set`, in which case we - // don't wanna log the error - None => Ok(None), + let Some(config_reader) = config.read() else { + return Ok(None); + }; + let Some(target) = config_reader.get_spanned(["build", "target"]) else { + return Ok(None); + }; + + // if the target ends with `.json`, join it to the config file's parent dir. + // See https://github.com/rust-lang/cargo/blob/f7acf448fc127df9a77c52cc2bba027790ac4931/src/cargo/core/compiler/compile_kind.rs#L171-L192 + let join_to_origin_if_json_path = |s: &str, spanned: &toml::Spanned<toml::de::DeValue<'_>>| { + if s.ends_with(".json") { + config_reader + .get_origin_root(spanned) + .map(|p| p.join(s).to_string()) + .unwrap_or_else(|| s.to_owned()) + } else { + s.to_owned() + } + }; + + let parse_err = "Failed to parse `build.target` as an array of target"; + + match target.as_ref() { + toml::de::DeValue::String(s) => { + Ok(Some(vec![join_to_origin_if_json_path(s.as_ref(), target)])) + } + toml::de::DeValue::Array(arr) => arr + .iter() + .map(|v| { + let s = v.as_ref().as_str().context(parse_err)?; + Ok(join_to_origin_if_json_path(s, v)) + }) + .collect::<anyhow::Result<_>>() + .map(Option::Some), + _ => Err(anyhow::anyhow!(parse_err)), } }
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index aa2e159..4d56668 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs
@@ -2,6 +2,7 @@ //! metadata` or `rust-project.json`) into representation stored in the salsa //! database -- `CrateGraph`. +use std::thread::Builder; use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread}; use anyhow::Context; @@ -12,7 +13,7 @@ }; use cfg::{CfgAtom, CfgDiff, CfgOptions}; use intern::{Symbol, sym}; -use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; use span::{Edition, FileId}; @@ -26,7 +27,7 @@ ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, build_dependencies::{BuildScriptOutput, ProcMacroDylibPath}, - cargo_config_file, + cargo_config_file::CargoConfigFile, cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource}, env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env}, project_json::{Crate, CrateArrayIdx}, @@ -267,7 +268,7 @@ tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot"); progress("querying project metadata".to_owned()); - let config_file = cargo_config_file::read(cargo_toml, extra_env, &sysroot); + let config_file = CargoConfigFile::load(cargo_toml, extra_env, &sysroot); let config_file_ = config_file.clone(); let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml, &config_file_); let targets = @@ -301,31 +302,39 @@ // We can speed up loading a bit by spawning all of these processes in parallel (especially // on systems were process spawning is delayed) let join = thread::scope(|s| { - let rustc_cfg = s.spawn(|| { - rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env) - }); - let target_data = s.spawn(|| { - target_data::get( - toolchain_config, - targets.first().map(Deref::deref), - extra_env, - ).inspect_err(|e| { - tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace") + let rustc_cfg = Builder::new() + .name("ProjectWorkspace::rustc_cfg".to_owned()) + .spawn_scoped(s, || { + rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env) }) - }); - - let rustc_dir = s.spawn(|| { - let rustc_dir = match rustc_source { - Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) - .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), - Some(RustLibSource::Discover) => { - sysroot.discover_rustc_src().ok_or_else(|| { - Some("Failed to discover rustc source for sysroot.".to_owned()) + .expect("failed to spawn thread"); + let target_data = Builder::new() + .name("ProjectWorkspace::target_data".to_owned()) + .spawn_scoped(s, || { + target_data::get(toolchain_config, targets.first().map(Deref::deref), extra_env) + .inspect_err(|e| { + tracing::error!(%e, + "failed fetching data layout for \ + {cargo_toml:?} workspace" + ) }) - } - None => Err(None), - }; - rustc_dir.and_then(|rustc_dir| { + }) + .expect("failed to spawn thread"); + + let rustc_dir = Builder::new() + .name("ProjectWorkspace::rustc_dir".to_owned()) + .spawn_scoped(s, || { + let rustc_dir = match rustc_source { + Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) + .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), + Some(RustLibSource::Discover) => { + sysroot.discover_rustc_src().ok_or_else(|| { + Some("Failed to discover rustc source for sysroot.".to_owned()) + }) + } + None => Err(None), + }; + rustc_dir.and_then(|rustc_dir| { info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source"); match FetchMetadata::new( &rustc_dir, @@ -359,31 +368,41 @@ Err(e) => { tracing::error!( %e, - "Failed to read Cargo metadata from rustc source at {rustc_dir}", + "Failed to read Cargo metadata from rustc source \ + at {rustc_dir}", ); Err(Some(format!( - "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}" + "Failed to read Cargo metadata from rustc source \ + at {rustc_dir}: {e}" ))) } } }) - }); + }) + .expect("failed to spawn thread"); - let cargo_metadata = s.spawn(|| fetch_metadata.exec(false, progress)); - let loaded_sysroot = s.spawn(|| { - sysroot.load_workspace( - &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config( - config, - workspace_dir, - &targets, - toolchain.clone(), - )), - config.no_deps, - progress, - ) - }); - let cargo_env = - s.spawn(move || cargo_config_env(cargo_toml, &config_file, &config.extra_env)); + let cargo_metadata = Builder::new() + .name("ProjectWorkspace::cargo_metadata".to_owned()) + .spawn_scoped(s, || fetch_metadata.exec(false, progress)) + .expect("failed to spawn thread"); + let loaded_sysroot = Builder::new() + .name("ProjectWorkspace::loaded_sysroot".to_owned()) + .spawn_scoped(s, || { + sysroot.load_workspace( + &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config( + config, + &targets, + toolchain.clone(), + )), + config.no_deps, + progress, + ) + }) + .expect("failed to spawn thread"); + let cargo_env = Builder::new() + .name("ProjectWorkspace::cargo_env".to_owned()) + .spawn_scoped(s, move || cargo_config_env(&config_file, &config.extra_env)) + .expect("failed to spawn thread"); thread::Result::Ok(( rustc_cfg.join()?, target_data.join()?, @@ -481,7 +500,6 @@ sysroot.load_workspace( &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config( config, - project_json.project_root(), &targets, toolchain.clone(), )), @@ -526,7 +544,7 @@ None => Sysroot::empty(), }; - let config_file = cargo_config_file::read(detached_file, &config.extra_env, &sysroot); + let config_file = CargoConfigFile::load(detached_file, &config.extra_env, &sysroot); let query_config = QueryConfig::Cargo(&sysroot, detached_file, &config_file); let toolchain = version::get(query_config, &config.extra_env).ok().flatten(); let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env) @@ -537,7 +555,6 @@ let loaded_sysroot = sysroot.load_workspace( &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config( config, - dir, &targets, toolchain.clone(), )), @@ -563,8 +580,7 @@ config.no_deps, ); let cargo_script = fetch_metadata.exec(false, &|_| ()).ok().map(|(ws, error)| { - let cargo_config_extra_env = - cargo_config_env(detached_file, &config_file, &config.extra_env); + let cargo_config_extra_env = cargo_config_env(&config_file, &config.extra_env); ( CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false), WorkspaceBuildScripts::default(), @@ -1194,6 +1210,7 @@ load, crate_ws_data.clone(), ); + let cargo_path = sysroot.tool_path(Tool::Cargo, cargo.workspace_root(), cargo.env()); let cfg_options = CfgOptions::from_iter(rustc_cfg); @@ -1268,6 +1285,7 @@ } else { Arc::new(pkg_data.manifest.parent().to_path_buf()) }, + &cargo_path, ); if let TargetKind::Lib { .. } = kind { lib_tgt = Some((crate_id, name.clone())); @@ -1375,6 +1393,7 @@ }, // FIXME: This looks incorrect but I don't think this causes problems. crate_ws_data, + &cargo_path, ); } } @@ -1453,6 +1472,7 @@ override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, crate_ws_data: Arc<CrateWorkspaceData>, + cargo_path: &Utf8Path, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -1503,6 +1523,7 @@ } else { Arc::new(pkg_data.manifest.parent().to_path_buf()) }, + cargo_path, ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate @@ -1560,11 +1581,12 @@ build_data: Option<(&BuildScriptOutput, bool)>, cfg_options: CfgOptions, file_id: FileId, - cargo_name: &str, + cargo_crate_name: &str, kind: TargetKind, origin: CrateOrigin, crate_ws_data: Arc<CrateWorkspaceData>, proc_macro_cwd: Arc<AbsPathBuf>, + cargo_path: &Utf8Path, ) -> CrateBuilderId { let edition = pkg.edition; let potential_cfg_options = if pkg.features.is_empty() { @@ -1591,8 +1613,8 @@ let mut env = cargo.env().clone(); inject_cargo_package_env(&mut env, pkg); - inject_cargo_env(&mut env); - inject_rustc_tool_env(&mut env, cargo_name, kind); + inject_cargo_env(&mut env, cargo_path); + inject_rustc_tool_env(&mut env, cargo_crate_name, kind); if let Some(envs) = build_data.map(|(it, _)| &it.envs) { env.extend_from_other(envs); @@ -1600,7 +1622,7 @@ let crate_id = crate_graph.add_crate_root( file_id, edition, - Some(CrateDisplayName::from_canonical_name(cargo_name)), + Some(CrateDisplayName::from_canonical_name(cargo_crate_name)), Some(pkg.version.to_string()), cfg_options, potential_cfg_options, @@ -1614,7 +1636,9 @@ let proc_macro = match build_data { Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => { match proc_macro_dylib_path { - ProcMacroDylibPath::Path(path) => Ok((cargo_name.to_owned(), path.clone())), + ProcMacroDylibPath::Path(path) => { + Ok((cargo_crate_name.to_owned(), path.clone())) + } ProcMacroDylibPath::NotBuilt => Err(ProcMacroLoadingError::NotYetBuilt), ProcMacroDylibPath::NotProcMacro | ProcMacroDylibPath::DylibNotFound if has_errors => @@ -1867,29 +1891,12 @@ fn sysroot_metadata_config( config: &CargoConfig, - current_dir: &AbsPath, targets: &[String], toolchain_version: Option<Version>, ) -> CargoMetadataConfig { - // We run `cargo metadata` on sysroot with sysroot dir as a working directory, but still pass - // the `targets` from the cargo config evaluated from the workspace's `current_dir`. - // So, we need to *canonicalize* those *might-be-relative-paths-to-custom-target-json-files*. - // - // See https://github.com/rust-lang/cargo/blob/f7acf448fc127df9a77c52cc2bba027790ac4931/src/cargo/core/compiler/compile_kind.rs#L171-L192 - let targets = targets - .iter() - .map(|target| { - if target.ends_with(".json") { - current_dir.join(target).to_string() - } else { - target.to_owned() - } - }) - .collect(); - CargoMetadataConfig { features: Default::default(), - targets, + targets: targets.to_vec(), extra_args: Default::default(), extra_env: config.extra_env.clone(), toolchain_version,
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index b9dfe1f..782ec55 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml
@@ -47,7 +47,7 @@ tracing-subscriber.workspace = true tracing-tree.workspace = true triomphe.workspace = true -toml = "0.8.23" +toml.workspace = true nohash-hasher.workspace = true walkdir = "2.5.0" semver.workspace = true
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index cc8db1b..44c442f 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs
@@ -307,6 +307,11 @@ config.rediscover_workspaces(); } + rayon::ThreadPoolBuilder::new() + .thread_name(|ix| format!("RayonWorker{}", ix)) + .build_global() + .unwrap(); + // If the io_threads have an error, there's usually an error on the main // loop too because the channels are closed. Ensure we report both errors. match (rust_analyzer::main_loop(config, connection), io_threads.join()) {
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 5e4a277..395a59c 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -1186,6 +1186,7 @@ closure_capture_hints: true, binding_mode_hints: true, implicit_drop_hints: true, + implied_dyn_trait_hints: true, lifetime_elision_hints: ide::LifetimeElisionHints::Always, param_names_for_lifetime_elision_hints: true, hide_named_constructor_hints: false,
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs index e3e3a14..975e81a 100644 --- a/crates/rust-analyzer/src/cli/ssr.rs +++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -50,7 +50,6 @@ /// for much else. pub fn run(self) -> anyhow::Result<()> { use ide_db::base_db::SourceDatabase; - use ide_db::symbol_index::SymbolsDatabase; let cargo_config = CargoConfig { all_targets: true, set_test: true, ..CargoConfig::default() }; let load_cargo_config = LoadCargoConfig { @@ -69,7 +68,7 @@ match_finder.add_search_pattern(pattern)?; } if let Some(debug_snippet) = &self.debug { - for &root in db.local_roots().iter() { + for &root in ide_db::symbol_index::LocalRoots::get(db).roots(db).iter() { let sr = db.source_root(root).source_root(db); for file_id in sr.iter() { for debug_info in match_finder.debug_where_text_equal(
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index b8c4b9f..1b15d83 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs
@@ -263,6 +263,9 @@ /// Show inlay hints for the implied type parameter `Sized` bound. inlayHints_implicitSizedBoundHints_enable: bool = false, + /// Show inlay hints for the implied `dyn` keyword in trait object types. + inlayHints_impliedDynTraitHints_enable: bool = true, + /// Show inlay type hints for elided lifetimes in function signatures. inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = LifetimeElisionDef::Never, @@ -1983,6 +1986,7 @@ &client_capability_fields, ), implicit_drop_hints: self.inlayHints_implicitDrops_enable().to_owned(), + implied_dyn_trait_hints: self.inlayHints_impliedDynTraitHints_enable().to_owned(), range_exclusive_hints: self.inlayHints_rangeExclusiveHints_enable().to_owned(), minicore, }
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index c2b887c..099eed9 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs
@@ -452,7 +452,11 @@ // Project has loaded properly, kick off initial flycheck self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None)); } - if self.config.prefill_caches() { + // delay initial cache priming until proc macros are loaded, or we will load up a bunch of garbage into salsa + let proc_macros_loaded = self.config.prefill_caches() + && !self.config.expand_proc_macros() + || self.fetch_proc_macros_queue.last_op_result().copied().unwrap_or(false); + if proc_macros_loaded { self.prime_caches_queue.request_op("became quiescent".to_owned(), ()); } }
diff --git a/crates/span/src/map.rs b/crates/span/src/map.rs index bb09933..83b2413 100644 --- a/crates/span/src/map.rs +++ b/crates/span/src/map.rs
@@ -156,6 +156,43 @@ } } +impl<S> Drop for SpanMap<S> { + fn drop(&mut self) { + struct SendPtr(*mut [()]); + unsafe impl Send for SendPtr {} + static SPAN_MAP_DROP_THREAD: std::sync::OnceLock< + std::sync::mpsc::Sender<(SendPtr, fn(SendPtr))>, + > = std::sync::OnceLock::new(); + SPAN_MAP_DROP_THREAD + .get_or_init(|| { + let (sender, receiver) = std::sync::mpsc::channel::<(SendPtr, fn(SendPtr))>(); + std::thread::Builder::new() + .name("SpanMapDropper".to_owned()) + .spawn(move || receiver.iter().for_each(|(b, drop)| drop(b))) + .unwrap(); + sender + }) + .send(( + unsafe { + SendPtr(std::mem::transmute::<*mut [(TextSize, SpanData<S>)], *mut [()]>( + Box::<[(TextSize, SpanData<S>)]>::into_raw( + std::mem::take(&mut self.spans).into_boxed_slice(), + ), + )) + }, + |b: SendPtr| { + _ = unsafe { + Box::from_raw(std::mem::transmute::< + *mut [()], + *mut [(TextSize, SpanData<S>)], + >(b.0)) + } + }, + )) + .unwrap(); + } +} + #[derive(PartialEq, Eq, Hash, Debug)] pub struct RealSpanMap { file_id: EditionedFileId,
diff --git a/crates/stdx/src/anymap.rs b/crates/stdx/src/anymap.rs index a3f6ab8..b82eeaa 100644 --- a/crates/stdx/src/anymap.rs +++ b/crates/stdx/src/anymap.rs
@@ -117,7 +117,7 @@ #[inline] #[must_use] pub fn get<T: IntoBox<A>>(&self) -> Option<&T> { - self.raw.get(&TypeId::of::<T>()).map(|any| unsafe { any.downcast_ref_unchecked::<T>() }) + self.raw.get(&TypeId::of::<T>()).map(|any| unsafe { any.downcast_unchecked_ref::<T>() }) } /// Gets the entry for the given type in the collection for in-place manipulation @@ -172,7 +172,7 @@ #[inline] #[must_use] pub fn into_mut(self) -> &'map mut V { - unsafe { self.inner.into_mut().downcast_mut_unchecked() } + unsafe { self.inner.into_mut().downcast_unchecked_mut() } } } @@ -181,7 +181,7 @@ /// and returns a mutable reference to it #[inline] pub fn insert(self, value: V) -> &'map mut V { - unsafe { self.inner.insert(value.into_box()).downcast_mut_unchecked() } + unsafe { self.inner.insert(value.into_box()).downcast_unchecked_mut() } } } @@ -244,14 +244,14 @@ /// # Safety /// /// The caller must ensure that `T` matches the trait object, on pain of *undefined behavior*. - unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T; + unsafe fn downcast_unchecked_ref<T: 'static>(&self) -> &T; /// Downcast from `&mut Any` to `&mut T`, without checking the type matches. /// /// # Safety /// /// The caller must ensure that `T` matches the trait object, on pain of *undefined behavior*. - unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T; + unsafe fn downcast_unchecked_mut<T: 'static>(&mut self) -> &mut T; } /// A trait for the conversion of an object into a boxed trait object. @@ -269,12 +269,12 @@ } #[inline] - unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T { + unsafe fn downcast_unchecked_ref<T: 'static>(&self) -> &T { unsafe { &*std::ptr::from_ref::<Self>(self).cast::<T>() } } #[inline] - unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T { + unsafe fn downcast_unchecked_mut<T: 'static>(&mut self) -> &mut T { unsafe { &mut *std::ptr::from_mut::<Self>(self).cast::<T>() } } }
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index a3c19f7..27d2889 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs
@@ -67,7 +67,7 @@ /// files. #[derive(Debug, PartialEq, Eq)] pub struct Parse<T> { - green: GreenNode, + green: Option<GreenNode>, errors: Option<Arc<[SyntaxError]>>, _ty: PhantomData<fn() -> T>, } @@ -81,14 +81,14 @@ impl<T> Parse<T> { fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> { Parse { - green, + green: Some(green), errors: if errors.is_empty() { None } else { Some(errors.into()) }, _ty: PhantomData, } } pub fn syntax_node(&self) -> SyntaxNode { - SyntaxNode::new_root(self.green.clone()) + SyntaxNode::new_root(self.green.as_ref().unwrap().clone()) } pub fn errors(&self) -> Vec<SyntaxError> { @@ -100,8 +100,10 @@ impl<T: AstNode> Parse<T> { /// Converts this parse result into a parse result for an untyped syntax tree. - pub fn to_syntax(self) -> Parse<SyntaxNode> { - Parse { green: self.green, errors: self.errors, _ty: PhantomData } + pub fn to_syntax(mut self) -> Parse<SyntaxNode> { + let green = self.green.take(); + let errors = self.errors.take(); + Parse { green, errors, _ty: PhantomData } } /// Gets the parsed syntax tree as a typed ast node. @@ -124,9 +126,9 @@ } impl Parse<SyntaxNode> { - pub fn cast<N: AstNode>(self) -> Option<Parse<N>> { + pub fn cast<N: AstNode>(mut self) -> Option<Parse<N>> { if N::cast(self.syntax_node()).is_some() { - Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData }) + Some(Parse { green: self.green.take(), errors: self.errors.take(), _ty: PhantomData }) } else { None } @@ -162,7 +164,7 @@ edition, ) .map(|(green_node, errors, _reparsed_range)| Parse { - green: green_node, + green: Some(green_node), errors: if errors.is_empty() { None } else { Some(errors.into()) }, _ty: PhantomData, }) @@ -198,6 +200,27 @@ } } +impl<T> Drop for Parse<T> { + fn drop(&mut self) { + let Some(green) = self.green.take() else { + return; + }; + static PARSE_DROP_THREAD: std::sync::OnceLock<std::sync::mpsc::Sender<GreenNode>> = + std::sync::OnceLock::new(); + PARSE_DROP_THREAD + .get_or_init(|| { + let (sender, receiver) = std::sync::mpsc::channel::<GreenNode>(); + std::thread::Builder::new() + .name("ParseNodeDropper".to_owned()) + .spawn(move || receiver.iter().for_each(drop)) + .unwrap(); + sender + }) + .send(green) + .unwrap(); + } +} + /// `SourceFile` represents a parse tree for a single Rust file. pub use crate::ast::SourceFile;
diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index d3afac8..62867fd 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs
@@ -43,7 +43,7 @@ #[macro_export] macro_rules! assert_eq_text { ($left:expr, $right:expr) => { - assert_eq_text!($left, $right,) + $crate::assert_eq_text!($left, $right,) }; ($left:expr, $right:expr, $($tt:tt)*) => {{ let left = $left;
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 696928b..d5905af 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs
@@ -68,6 +68,7 @@ //! transmute: //! try: infallible //! tuple: +//! unary_ops: //! unpin: sized //! unsize: sized //! write: fmt @@ -591,13 +592,13 @@ impl<T: PointeeSized> Deref for &T { type Target = T; fn deref(&self) -> &T { - loop {} + *self } } impl<T: PointeeSized> Deref for &mut T { type Target = T; fn deref(&self) -> &T { - loop {} + *self } } // region:deref_mut @@ -1056,6 +1057,9 @@ type Output = $t; fn add(self, other: $t) -> $t { self + other } } + impl AddAssign for $t { + fn add_assign(&mut self, other: $t) { *self += other; } + } )*) } @@ -1063,6 +1067,24 @@ // endregion:builtin_impls // endregion:add + // region:unary_ops + #[lang = "not"] + pub const trait Not { + type Output; + + #[must_use] + fn not(self) -> Self::Output; + } + + #[lang = "neg"] + pub const trait Neg { + type Output; + + #[must_use = "this returns the result of the operation, without modifying the original"] + fn neg(self) -> Self::Output; + } + // endregion:unary_ops + // region:coroutine mod coroutine { use crate::pin::Pin; @@ -1118,6 +1140,12 @@ pub trait Eq: PartialEq<Self> + PointeeSized {} + // region:builtin_impls + impl PartialEq for () { + fn eq(&self, other: &()) -> bool { true } + } + // endregion:builtin_impls + // region:derive #[rustc_builtin_macro] pub macro PartialEq($item:item) {}
diff --git a/docs/book/book.toml b/docs/book/book.toml index edf11fa..c77eabd 100644 --- a/docs/book/book.toml +++ b/docs/book/book.toml
@@ -1,7 +1,6 @@ [book] authors = ["The rust-analyzer authors"] language = "en" -multilingual = false src = "src" title = "rust-analyzer"
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md index 21e199c..f0da2bd 100644 --- a/docs/book/src/configuration_generated.md +++ b/docs/book/src/configuration_generated.md
@@ -1033,6 +1033,13 @@ Show inlay hints for the implied type parameter `Sized` bound. +## rust-analyzer.inlayHints.impliedDynTraitHints.enable {#inlayHints.impliedDynTraitHints.enable} + +Default: `true` + +Show inlay hints for the implied `dyn` keyword in trait object types. + + ## rust-analyzer.inlayHints.lifetimeElisionHints.enable {#inlayHints.lifetimeElisionHints.enable} Default: `"never"`
diff --git a/docs/book/src/contributing/testing.md b/docs/book/src/contributing/testing.md index ccee9b8..b94bc16 100644 --- a/docs/book/src/contributing/testing.md +++ b/docs/book/src/contributing/testing.md
@@ -1,4 +1,4 @@ -rust-analyzer's testing is based on *snapshot tests*: a test is a piece of input text, usually a Rust code, and some output text. There is then some testing helper that runs the feature on the input text and compares the result to the output text. +rust-analyzer's testing is based on *snapshot tests*: a test is a piece of input text, usually Rust code, and some output text. There is then some testing helper that runs the feature on the input text and compares the result to the output text. rust-analyzer uses a combination of the crate [`expect-test`](https://docs.rs/expect-test) and a custom testing framework. @@ -20,7 +20,7 @@ } ``` -Sometimes we want to check more that there are no type mismatches. For that we use other helpers. For example, often we want to assert that the type of some expression is some specific type. For that we use the `check_types()` function. It takes a Rust code string with custom annotation, that are common in our test suite. The general scheme of annotation is: +Sometimes we want to check more than that there are no type mismatches. For that we use other helpers. For example, often we want to assert that the type of some expression is some specific type. For that we use the `check_types()` function. It takes a Rust code string with custom annotation, which are common in our test suite. The general scheme of annotation is: - `$0` marks a position. What to do with it is determined by the testing helper. Commonly it denotes the cursor position in IDE tests (for example, hover). - `$0...$0` marks a range, commonly a selection in IDE tests. @@ -31,7 +31,7 @@ // ^^^^ hey ``` -`check_types()` uses labels to assert type: when you attach a label to a range, `check_types()` assert that the type of this range will be what written in the label. +`check_types()` uses labels to assert types: when you attach a label to a range, `check_types()` asserts that the type of this range will be what is written in the label. It's all too abstract without an example: ```rust @@ -67,9 +67,9 @@ ); } ``` -The text inside the `expect![[]]` is determined by the helper, `check_infer()` in this case. For `check_infer()`, each line is a range in the source code (the range is counted in bytes and the source is trimmed, indentation is stripped), next to it there is the text in that range, or some part of it with `...` if it's too long, and finally comes the type of that range. +The text inside the `expect![[]]` is determined by the helper, `check_infer()` in this case. For `check_infer()`, each line is a range in the source code (the range is counted in bytes and the source is trimmed, so indentation is stripped); next to it there is the text in that range, or some part of it with `...` if it's too long, and finally comes the type of that range. -The important feature of `expect-test` is that it allows easy update of the expectation. Say you changed something in the code, maybe fixed a bug, and the output in `expect![[]]` needs to change. Or maybe you are writing it from scratch. Writing it by hand is very tedious and prone to mistakes. But `expect-trait` has a magic. You can set the environment variable `UPDATE_EXPECT=1`, then run the test, and it will update automatically! Some editors (e.g. VSCode) make it even more convenient: on them, on the top of every test that uses `expect-test`, next to the usual `Run | Debug` buttons, rust-analyzer also shows an `Update Expect` button. Clicking it will run that test in updating mode. +The important feature of `expect-test` is that it allows easy update of the expectation. Say you changed something in the code, maybe fixed a bug, and the output in `expect![[]]` needs to change. Or maybe you are writing it from scratch. Writing it by hand is very tedious and prone to mistakes. But `expect-trait` has some magic. You can set the environment variable `UPDATE_EXPECT=1`, then run the test, and it will update automatically! Some editors (e.g. VSCode) make it even more convenient: on them, at the top of every test that uses `expect-test`, next to the usual `Run | Debug` buttons, rust-analyzer also shows an `Update Expect` button. Clicking it will run that test in updating mode. ## Rust code in the tests @@ -77,13 +77,13 @@ The syntax highlighting is very important, not just because it's nice to the eye: it's very easy to make mistakes in test code, and debugging that can be very hard. Often the test will just fail, printing an `{unknown}` type, and you'll have no clue what's going wrong. The syntax is the clue; if something isn't highlighted correctly, that probably means there is an error (there is one exception to this, which we'll discuss later). You can even set the semantic highlighting tag `unresolved_reference` to e.g. red, so you will see such things clearly. -Still, often you won't know what's going wrong. Why you can't fix the test, or worse, you expect it to fail but it doesn't. You can try the code on a real IDE to be sure it works. Later we'll give some tips to fix the test. +Still, often you won't know what's going wrong. Why you can't fix the test, or worse, you expect it to fail but it doesn't. You can try the code on a real IDE to be sure it works. Later we'll give some tips for fixing the test. ### The fixture -The Rust code in a test is not, a fact, a single Rust file. It has a mini-language that allows you to express multiple files, multiple crates, different configs, and more. All options are documented in `crates/test-utils/src/fixture.rs`, but here are some of the common ones: +The Rust code in a test is not, a fact, a single Rust file. It uses a mini-language that allows you to express multiple files, multiple crates, different configs, and more. All options are documented in `crates/test-utils/src/fixture.rs`, but here are some of the common ones: - - `//- minicore: flag1, flag2, ...`. This is by far the most common flag. Tests in rust-analyzer don't have access by default to any other type - not `Option`, not `Iterator`, not even `Sized`. This flag allows you to include parts of the `crates/test-utils/src/minicore.rs` file, which mimics `core`. All possible flags are listed at the top of `minicore` along with the flags they imply, then later you can see by `// region:flag` and `// endregion:flag` what code each flag enables. + - `//- minicore: flag1, flag2, ...`. This is by far the most common option. Tests in rust-analyzer don't have access by default to any other type - not `Option`, not `Iterator`, not even `Sized`. This option allows you to include parts of the `crates/test-utils/src/minicore.rs` file, which mimics `core`. All possible flags are listed at the top of `minicore` along with the flags they imply, then later you can see by `// region:flag` and `// endregion:flag` what code each flag enables. - `// /path/to/file.rs crate:crate deps:dep_a,dep_b`. The first component is the filename of the code that follows (until the next file). It is required, but only if you supply this line. Other components in this line are optional. They include `crate:crate_name`, to start a new crate, or `deps:dep_a,dep_b`, to declare dependencies between crates. You can also declare modules as usual in Rust - just name your paths `/foo.rs` or `/foo/mod.rs`, declare `mod foo` and that's it! So the following snippet: @@ -96,11 +96,11 @@ // /main.rs crate:main deps:foo use foo::Bar; ``` -Declares two crates `foo` and `main` where `main` depends on `foo`, with dependency in `Sized` and the `FnX` traits from `core`, and a module of `foo` called `bar`. +declares two crates `foo` and `main`, where `main` depends on `foo`, with dependencies on the `Sized` and `FnX` traits from `core`, and a module of `foo` called `bar`. -And as promised, here are some tips to make your test work: +And as promised, here are some tips for making your test work: - - If you use some type/trait, you must *always* include it in `minicore`. Note - not all types from core/std are available there, you can add new (under flags) if you need. And import them if they are not in the prelude. + - If you use some type/trait, you must *always* include it in `minicore`. Note - not all types from core/std are available there, but you can add new ones (under flags) if you need. And import them if they are not in the prelude. - If you use unsized types (`dyn Trait`/slices), you may want to include some or all of the following `minicore` flags: `sized`, `unsize`, `coerce_unsized`, `dispatch_from_dyn`. - If you use closures, consider including the `fn` minicore flag. Async closures need the `async_fn` flag. - - `sized` is commonly needed, consider adding it if you're stuck. + - `sized` is commonly needed, so consider adding it if you're stuck.
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json index 6dd4485..d49d19f 100644 --- a/editors/code/package-lock.json +++ b/editors/code/package-lock.json
@@ -934,6 +934,29 @@ "url": "https://github.com/sponsors/nzakas" } }, + "node_modules/@isaacs/balanced-match": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", + "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/brace-expansion": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", + "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@isaacs/balanced-match": "^4.0.1" + }, + "engines": { + "node": "20 || >=22" + } + }, "node_modules/@isaacs/cliui": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", @@ -1296,9 +1319,9 @@ "license": "MIT" }, "node_modules/@textlint/linter-formatter/node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz", + "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==", "dev": true, "license": "MIT", "dependencies": { @@ -3725,13 +3748,13 @@ } }, "node_modules/foreground-child": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", - "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", "dev": true, "license": "ISC", "dependencies": { - "cross-spawn": "^7.0.0", + "cross-spawn": "^7.0.6", "signal-exit": "^4.0.1" }, "engines": { @@ -3848,15 +3871,15 @@ "optional": true }, "node_modules/glob": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.1.tgz", - "integrity": "sha512-zrQDm8XPnYEKawJScsnM0QzobJxlT/kHOOlRTio8IH/GrmxRE5fjllkzdaHclIuNjUQTJYH2xHNIGfdpJkDJUw==", + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-11.1.0.tgz", + "integrity": "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw==", "dev": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^4.0.1", - "minimatch": "^10.0.0", + "foreground-child": "^3.3.1", + "jackspeak": "^4.1.1", + "minimatch": "^10.1.1", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" @@ -3885,13 +3908,13 @@ } }, "node_modules/glob/node_modules/minimatch": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz", - "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==", + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz", + "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==", "dev": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "brace-expansion": "^2.0.1" + "@isaacs/brace-expansion": "^5.0.0" }, "engines": { "node": "20 || >=22" @@ -4363,9 +4386,9 @@ } }, "node_modules/jackspeak": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.0.tgz", - "integrity": "sha512-9DDdhb5j6cpeitCbvLO7n7J4IxnbM6hoF6O1g4HQ5TfhvvKN8ywDM7668ZhMHRqVmxqhps/F6syWK2KcPxYlkw==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz", + "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==", "dev": true, "license": "BlueOak-1.0.0", "dependencies": { @@ -4395,9 +4418,9 @@ "license": "MIT" }, "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "dev": true, "license": "MIT", "dependencies": { @@ -6976,9 +6999,9 @@ } }, "node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", "dev": true, "license": "MIT", "engines": {
diff --git a/editors/code/package.json b/editors/code/package.json index 7db4986..4d1ae48 100644 --- a/editors/code/package.json +++ b/editors/code/package.json
@@ -2346,6 +2346,16 @@ { "title": "Inlay Hints", "properties": { + "rust-analyzer.inlayHints.impliedDynTraitHints.enable": { + "markdownDescription": "Show inlay hints for the implied `dyn` keyword in trait object types.", + "default": true, + "type": "boolean" + } + } + }, + { + "title": "Inlay Hints", + "properties": { "rust-analyzer.inlayHints.lifetimeElisionHints.enable": { "markdownDescription": "Show inlay type hints for elided lifetimes in function signatures.", "default": "never",
diff --git a/lib/smol_str/CHANGELOG.md b/lib/smol_str/CHANGELOG.md new file mode 100644 index 0000000..fb65d88 --- /dev/null +++ b/lib/smol_str/CHANGELOG.md
@@ -0,0 +1,38 @@ +# Changelog + +## Unreleased + +## 0.3.4 - 2025-10-23 + +- Added `rust-version` field to `Cargo.toml` + +## 0.3.3 - 2025-10-23 + +- Optimise `StrExt::to_ascii_lowercase_smolstr`, `StrExt::to_ascii_uppercase_smolstr` + ~2x speedup inline, ~4-22x for heap. +- Optimise `StrExt::to_lowercase_smolstr`, `StrExt::to_uppercase_smolstr` ~2x speedup inline, ~5-50x for heap. +- Optimise `StrExt::replace_smolstr`, `StrExt::replacen_smolstr` for single ascii replace, + ~3x speedup inline & heap. + +## 0.3.2 - 2024-10-23 + +- Fix `SmolStrBuilder::push` incorrectly padding null bytes when spilling onto the heap on a + multibyte character push + +## 0.3.1 - 2024-09-04 + +- Fix `SmolStrBuilder` leaking implementation details + +## 0.3.0 - 2024-09-04 + +- Remove deprecated `SmolStr::new_inline_from_ascii` function +- Remove `SmolStr::to_string` in favor of `ToString::to_string` +- Add `impl AsRef<[u8]> for SmolStr` impl +- Add `impl AsRef<OsStr> for SmolStr` impl +- Add `impl AsRef<Path> for SmolStr` impl +- Add `SmolStrBuilder` + +## 0.2.2 - 2024-05-14 + +- Add `StrExt` trait providing `to_lowercase_smolstr`, `replace_smolstr` and similar +- Add `PartialEq` optimization for `ptr_eq`-able representations
diff --git a/lib/smol_str/Cargo.toml b/lib/smol_str/Cargo.toml new file mode 100644 index 0000000..118b259 --- /dev/null +++ b/lib/smol_str/Cargo.toml
@@ -0,0 +1,37 @@ +[package] +name = "smol_str" +version = "0.3.4" +description = "small-string optimized string type with O(1) clone" +license = "MIT OR Apache-2.0" +repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/smol_str" +authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>", "Lukas Wirth <lukastw97@gmail.com>"] +edition = "2024" +rust-version = "1.89" + +[package.metadata.docs.rs] +rustdoc-args = ["--cfg", "docsrs"] +all-features = true + +[dependencies] +serde_core = { version = "1.0.220", optional = true, default-features = false } +borsh = { version = "1.4.0", optional = true, default-features = false } +arbitrary = { version = "1.3", optional = true } + +[dev-dependencies] +proptest = "1.5" +serde_json = "1.0" +serde = { version = "1.0", features = ["derive"] } +criterion = "0.7" +rand = "0.9.2" + +[features] +default = ["std"] +std = ["serde_core?/std", "borsh?/std"] +serde = ["dep:serde_core"] + +[[bench]] +name = "bench" +harness = false + +[lints] +workspace = true
diff --git a/lib/smol_str/LICENSE-APACHE b/lib/smol_str/LICENSE-APACHE new file mode 100644 index 0000000..16fe87b --- /dev/null +++ b/lib/smol_str/LICENSE-APACHE
@@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License.
diff --git a/lib/smol_str/LICENSE-MIT b/lib/smol_str/LICENSE-MIT new file mode 100644 index 0000000..31aa793 --- /dev/null +++ b/lib/smol_str/LICENSE-MIT
@@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE.
diff --git a/lib/smol_str/README.md b/lib/smol_str/README.md new file mode 100644 index 0000000..56296fb --- /dev/null +++ b/lib/smol_str/README.md
@@ -0,0 +1,35 @@ +# smol_str + +[](https://github.com/rust-analyzer/smol_str/actions?query=branch%3Amaster+workflow%3ACI) +[](https://crates.io/crates/smol_str) +[](https://docs.rs/smol_str/) + + +A `SmolStr` is a string type that has the following properties: + +* `size_of::<SmolStr>() == 24` (therefore `== size_of::<String>()` on 64 bit platforms) +* `Clone` is `O(1)` +* Strings are stack-allocated if they are: + * Up to 23 bytes long + * Longer than 23 bytes, but substrings of `WS` (see `src/lib.rs`). Such strings consist + solely of consecutive newlines, followed by consecutive spaces +* If a string does not satisfy the aforementioned conditions, it is heap-allocated +* Additionally, a `SmolStr` can be explicitly created from a `&'static str` without allocation + +Unlike `String`, however, `SmolStr` is immutable. The primary use case for +`SmolStr` is a good enough default storage for tokens of typical programming +languages. Strings consisting of a series of newlines, followed by a series of +whitespace are a typical pattern in computer programs because of indentation. +Note that a specialized interner might be a better solution for some use cases. + +## Benchmarks +Run criterion benches with +```sh +cargo bench --bench \* -- --quick +``` + +## MSRV Policy + +Minimal Supported Rust Version: latest stable. + +Bumping MSRV is not considered a semver-breaking change.
diff --git a/lib/smol_str/benches/bench.rs b/lib/smol_str/benches/bench.rs new file mode 100644 index 0000000..092ee35 --- /dev/null +++ b/lib/smol_str/benches/bench.rs
@@ -0,0 +1,118 @@ +use criterion::{Criterion, criterion_group, criterion_main}; +use rand::distr::{Alphanumeric, SampleString}; +use smol_str::{SmolStr, StrExt, ToSmolStr, format_smolstr}; +use std::hint::black_box; + +/// 12: small (inline) +/// 50: medium (heap) +/// 1000: large (heap) +const TEST_LENS: [usize; 3] = [12, 50, 1000]; + +fn format_bench(c: &mut Criterion) { + for len in TEST_LENS { + let n = rand::random_range(10000..99999); + let str_len = len.checked_sub(n.to_smolstr().len()).unwrap(); + let str = Alphanumeric.sample_string(&mut rand::rng(), str_len); + + c.bench_function(&format!("format_smolstr! len={len}"), |b| { + let mut v = <_>::default(); + b.iter(|| v = format_smolstr!("{str}-{n}")); + assert_eq!(v, format!("{str}-{n}")); + }); + } +} + +fn from_str_bench(c: &mut Criterion) { + for len in TEST_LENS { + let str = Alphanumeric.sample_string(&mut rand::rng(), len); + + c.bench_function(&format!("SmolStr::from len={len}"), |b| { + let mut v = <_>::default(); + b.iter(|| v = SmolStr::from(black_box(&str))); + assert_eq!(v, str); + }); + } +} + +fn clone_bench(c: &mut Criterion) { + for len in TEST_LENS { + let str = Alphanumeric.sample_string(&mut rand::rng(), len); + let smolstr = SmolStr::new(&str); + + c.bench_function(&format!("SmolStr::clone len={len}"), |b| { + let mut v = <_>::default(); + b.iter(|| v = smolstr.clone()); + assert_eq!(v, str); + }); + } +} + +fn eq_bench(c: &mut Criterion) { + for len in TEST_LENS { + let str = Alphanumeric.sample_string(&mut rand::rng(), len); + let smolstr = SmolStr::new(&str); + + c.bench_function(&format!("SmolStr::eq len={len}"), |b| { + let mut v = false; + b.iter(|| v = smolstr == black_box(&str)); + assert!(v); + }); + } +} + +fn to_lowercase_bench(c: &mut Criterion) { + const END_CHAR: char = 'İ'; + + for len in TEST_LENS { + // mostly ascii seq with some non-ascii at the end + let mut str = Alphanumeric.sample_string(&mut rand::rng(), len - END_CHAR.len_utf8()); + str.push(END_CHAR); + let str = str.as_str(); + + c.bench_function(&format!("to_lowercase_smolstr len={len}"), |b| { + let mut v = <_>::default(); + b.iter(|| v = str.to_lowercase_smolstr()); + assert_eq!(v, str.to_lowercase()); + }); + } +} + +fn to_ascii_lowercase_bench(c: &mut Criterion) { + for len in TEST_LENS { + let str = Alphanumeric.sample_string(&mut rand::rng(), len); + let str = str.as_str(); + + c.bench_function(&format!("to_ascii_lowercase_smolstr len={len}"), |b| { + let mut v = <_>::default(); + b.iter(|| v = str.to_ascii_lowercase_smolstr()); + assert_eq!(v, str.to_ascii_lowercase()); + }); + } +} + +fn replace_bench(c: &mut Criterion) { + for len in TEST_LENS { + let s_dash_s = Alphanumeric.sample_string(&mut rand::rng(), len / 2) + + "-" + + &Alphanumeric.sample_string(&mut rand::rng(), len - 1 - len / 2); + let str = s_dash_s.as_str(); + + c.bench_function(&format!("replace_smolstr len={len}"), |b| { + let mut v = <_>::default(); + b.iter(|| v = str.replace_smolstr("-", "_")); + assert_eq!(v, str.replace("-", "_")); + }); + } +} + +criterion_group!( + benches, + format_bench, + from_str_bench, + clone_bench, + eq_bench, + to_lowercase_bench, + to_ascii_lowercase_bench, + replace_bench, +); +criterion_main!(benches);
diff --git a/lib/smol_str/src/borsh.rs b/lib/smol_str/src/borsh.rs new file mode 100644 index 0000000..527ce85 --- /dev/null +++ b/lib/smol_str/src/borsh.rs
@@ -0,0 +1,40 @@ +use crate::{INLINE_CAP, Repr, SmolStr}; +use alloc::string::{String, ToString}; +use borsh::{ + BorshDeserialize, BorshSerialize, + io::{Error, ErrorKind, Read, Write}, +}; +use core::mem::transmute; + +impl BorshSerialize for SmolStr { + fn serialize<W: Write>(&self, writer: &mut W) -> borsh::io::Result<()> { + self.as_str().serialize(writer) + } +} + +impl BorshDeserialize for SmolStr { + #[inline] + fn deserialize_reader<R: Read>(reader: &mut R) -> borsh::io::Result<Self> { + let len = u32::deserialize_reader(reader)?; + if (len as usize) < INLINE_CAP { + let mut buf = [0u8; INLINE_CAP]; + reader.read_exact(&mut buf[..len as usize])?; + _ = core::str::from_utf8(&buf[..len as usize]).map_err(|err| { + let msg = err.to_string(); + Error::new(ErrorKind::InvalidData, msg) + })?; + Ok(SmolStr(Repr::Inline { + len: unsafe { transmute::<u8, crate::InlineSize>(len as u8) }, + buf, + })) + } else { + // u8::vec_from_reader always returns Some on success in current implementation + let vec = u8::vec_from_reader(len, reader)? + .ok_or_else(|| Error::other("u8::vec_from_reader unexpectedly returned None"))?; + Ok(SmolStr::from(String::from_utf8(vec).map_err(|err| { + let msg = err.to_string(); + Error::new(ErrorKind::InvalidData, msg) + })?)) + } + } +}
diff --git a/lib/smol_str/src/lib.rs b/lib/smol_str/src/lib.rs new file mode 100644 index 0000000..31695b8 --- /dev/null +++ b/lib/smol_str/src/lib.rs
@@ -0,0 +1,970 @@ +#![cfg_attr(not(feature = "std"), no_std)] +#![cfg_attr(docsrs, feature(doc_cfg))] + +extern crate alloc; + +use alloc::{borrow::Cow, boxed::Box, string::String, sync::Arc}; +use core::{ + borrow::Borrow, + cmp::{self, Ordering}, + convert::Infallible, + fmt, hash, iter, mem, ops, + str::FromStr, +}; + +/// A `SmolStr` is a string type that has the following properties: +/// +/// * `size_of::<SmolStr>() == 24` (therefor `== size_of::<String>()` on 64 bit platforms) +/// * `Clone` is `O(1)` +/// * Strings are stack-allocated if they are: +/// * Up to 23 bytes long +/// * Longer than 23 bytes, but substrings of `WS` (see below). Such strings consist +/// solely of consecutive newlines, followed by consecutive spaces +/// * If a string does not satisfy the aforementioned conditions, it is heap-allocated +/// * Additionally, a `SmolStr` can be explicitly created from a `&'static str` without allocation +/// +/// Unlike `String`, however, `SmolStr` is immutable. The primary use case for +/// `SmolStr` is a good enough default storage for tokens of typical programming +/// languages. Strings consisting of a series of newlines, followed by a series of +/// whitespace are a typical pattern in computer programs because of indentation. +/// Note that a specialized interner might be a better solution for some use cases. +/// +/// `WS`: A string of 32 newlines followed by 128 spaces. +pub struct SmolStr(Repr); + +impl SmolStr { + /// Constructs an inline variant of `SmolStr`. + /// + /// This never allocates. + /// + /// # Panics + /// + /// Panics if `text.len() > 23`. + #[inline] + pub const fn new_inline(text: &str) -> SmolStr { + assert!(text.len() <= INLINE_CAP); // avoids bounds checks in loop + + let text = text.as_bytes(); + let mut buf = [0; INLINE_CAP]; + let mut i = 0; + while i < text.len() { + buf[i] = text[i]; + i += 1 + } + SmolStr(Repr::Inline { + // SAFETY: We know that `len` is less than or equal to the maximum value of `InlineSize` + // as we asserted it. + len: unsafe { InlineSize::transmute_from_u8(text.len() as u8) }, + buf, + }) + } + + /// Constructs a `SmolStr` from a statically allocated string. + /// + /// This never allocates. + #[inline(always)] + pub const fn new_static(text: &'static str) -> SmolStr { + // NOTE: this never uses the inline storage; if a canonical + // representation is needed, we could check for `len() < INLINE_CAP` + // and call `new_inline`, but this would mean an extra branch. + SmolStr(Repr::Static(text)) + } + + /// Constructs a `SmolStr` from a `str`, heap-allocating if necessary. + #[inline(always)] + pub fn new(text: impl AsRef<str>) -> SmolStr { + SmolStr(Repr::new(text.as_ref())) + } + + /// Returns a `&str` slice of this `SmolStr`. + #[inline(always)] + pub fn as_str(&self) -> &str { + self.0.as_str() + } + + /// Returns the length of `self` in bytes. + #[inline(always)] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns `true` if `self` has a length of zero bytes. + #[inline(always)] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Returns `true` if `self` is heap-allocated. + #[inline(always)] + pub const fn is_heap_allocated(&self) -> bool { + matches!(self.0, Repr::Heap(..)) + } +} + +impl Clone for SmolStr { + #[inline] + fn clone(&self) -> Self { + if !self.is_heap_allocated() { + // SAFETY: We verified that the payload of `Repr` is a POD + return unsafe { core::ptr::read(self as *const SmolStr) }; + } + Self(self.0.clone()) + } +} + +impl Default for SmolStr { + #[inline(always)] + fn default() -> SmolStr { + SmolStr(Repr::Inline { len: InlineSize::_V0, buf: [0; INLINE_CAP] }) + } +} + +impl ops::Deref for SmolStr { + type Target = str; + + #[inline(always)] + fn deref(&self) -> &str { + self.as_str() + } +} + +// region: PartialEq implementations + +impl Eq for SmolStr {} +impl PartialEq<SmolStr> for SmolStr { + fn eq(&self, other: &SmolStr) -> bool { + self.0.ptr_eq(&other.0) || self.as_str() == other.as_str() + } +} + +impl PartialEq<str> for SmolStr { + #[inline(always)] + fn eq(&self, other: &str) -> bool { + self.as_str() == other + } +} + +impl PartialEq<SmolStr> for str { + #[inline(always)] + fn eq(&self, other: &SmolStr) -> bool { + other == self + } +} + +impl<'a> PartialEq<&'a str> for SmolStr { + #[inline(always)] + fn eq(&self, other: &&'a str) -> bool { + self == *other + } +} + +impl PartialEq<SmolStr> for &str { + #[inline(always)] + fn eq(&self, other: &SmolStr) -> bool { + *self == other + } +} + +impl PartialEq<String> for SmolStr { + #[inline(always)] + fn eq(&self, other: &String) -> bool { + self.as_str() == other + } +} + +impl PartialEq<SmolStr> for String { + #[inline(always)] + fn eq(&self, other: &SmolStr) -> bool { + other == self + } +} + +impl<'a> PartialEq<&'a String> for SmolStr { + #[inline(always)] + fn eq(&self, other: &&'a String) -> bool { + self == *other + } +} + +impl PartialEq<SmolStr> for &String { + #[inline(always)] + fn eq(&self, other: &SmolStr) -> bool { + *self == other + } +} +// endregion: PartialEq implementations + +impl Ord for SmolStr { + fn cmp(&self, other: &SmolStr) -> Ordering { + self.as_str().cmp(other.as_str()) + } +} + +impl PartialOrd for SmolStr { + fn partial_cmp(&self, other: &SmolStr) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + +impl hash::Hash for SmolStr { + fn hash<H: hash::Hasher>(&self, hasher: &mut H) { + self.as_str().hash(hasher); + } +} + +impl fmt::Debug for SmolStr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(self.as_str(), f) + } +} + +impl fmt::Display for SmolStr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self.as_str(), f) + } +} + +impl iter::FromIterator<char> for SmolStr { + fn from_iter<I: iter::IntoIterator<Item = char>>(iter: I) -> SmolStr { + from_char_iter(iter.into_iter()) + } +} + +#[inline] +fn from_char_iter(iter: impl Iterator<Item = char>) -> SmolStr { + from_buf_and_chars([0; _], 0, iter) +} + +fn from_buf_and_chars( + mut buf: [u8; INLINE_CAP], + buf_len: usize, + mut iter: impl Iterator<Item = char>, +) -> SmolStr { + let min_size = iter.size_hint().0 + buf_len; + if min_size > INLINE_CAP { + let heap: String = + core::str::from_utf8(&buf[..buf_len]).unwrap().chars().chain(iter).collect(); + if heap.len() <= INLINE_CAP { + // size hint lied + return SmolStr::new_inline(&heap); + } + return SmolStr(Repr::Heap(heap.into_boxed_str().into())); + } + let mut len = buf_len; + while let Some(ch) = iter.next() { + let size = ch.len_utf8(); + if size + len > INLINE_CAP { + let (min_remaining, _) = iter.size_hint(); + let mut heap = String::with_capacity(size + len + min_remaining); + heap.push_str(core::str::from_utf8(&buf[..len]).unwrap()); + heap.push(ch); + heap.extend(iter); + return SmolStr(Repr::Heap(heap.into_boxed_str().into())); + } + ch.encode_utf8(&mut buf[len..]); + len += size; + } + SmolStr(Repr::Inline { + // SAFETY: We know that `len` is less than or equal to the maximum value of `InlineSize` + // as we otherwise return early. + len: unsafe { InlineSize::transmute_from_u8(len as u8) }, + buf, + }) +} + +fn build_from_str_iter<T>(mut iter: impl Iterator<Item = T>) -> SmolStr +where + T: AsRef<str>, + String: iter::Extend<T>, +{ + let mut len = 0; + let mut buf = [0u8; INLINE_CAP]; + while let Some(slice) = iter.next() { + let slice = slice.as_ref(); + let size = slice.len(); + if size + len > INLINE_CAP { + let mut heap = String::with_capacity(size + len); + heap.push_str(core::str::from_utf8(&buf[..len]).unwrap()); + heap.push_str(slice); + heap.extend(iter); + return SmolStr(Repr::Heap(heap.into_boxed_str().into())); + } + buf[len..][..size].copy_from_slice(slice.as_bytes()); + len += size; + } + SmolStr(Repr::Inline { + // SAFETY: We know that `len` is less than or equal to the maximum value of `InlineSize` + // as we otherwise return early. + len: unsafe { InlineSize::transmute_from_u8(len as u8) }, + buf, + }) +} + +impl iter::FromIterator<String> for SmolStr { + fn from_iter<I: iter::IntoIterator<Item = String>>(iter: I) -> SmolStr { + build_from_str_iter(iter.into_iter()) + } +} + +impl<'a> iter::FromIterator<&'a String> for SmolStr { + fn from_iter<I: iter::IntoIterator<Item = &'a String>>(iter: I) -> SmolStr { + SmolStr::from_iter(iter.into_iter().map(|x| x.as_str())) + } +} + +impl<'a> iter::FromIterator<&'a str> for SmolStr { + fn from_iter<I: iter::IntoIterator<Item = &'a str>>(iter: I) -> SmolStr { + build_from_str_iter(iter.into_iter()) + } +} + +impl AsRef<str> for SmolStr { + #[inline(always)] + fn as_ref(&self) -> &str { + self.as_str() + } +} + +impl AsRef<[u8]> for SmolStr { + #[inline(always)] + fn as_ref(&self) -> &[u8] { + self.as_str().as_bytes() + } +} + +#[cfg(feature = "std")] +#[cfg_attr(docsrs, doc(cfg(feature = "std")))] +impl AsRef<std::ffi::OsStr> for SmolStr { + #[inline(always)] + fn as_ref(&self) -> &std::ffi::OsStr { + AsRef::<std::ffi::OsStr>::as_ref(self.as_str()) + } +} + +#[cfg(feature = "std")] +#[cfg_attr(docsrs, doc(cfg(feature = "std")))] +impl AsRef<std::path::Path> for SmolStr { + #[inline(always)] + fn as_ref(&self) -> &std::path::Path { + AsRef::<std::path::Path>::as_ref(self.as_str()) + } +} + +impl From<&str> for SmolStr { + #[inline] + fn from(s: &str) -> SmolStr { + SmolStr::new(s) + } +} + +impl From<&mut str> for SmolStr { + #[inline] + fn from(s: &mut str) -> SmolStr { + SmolStr::new(s) + } +} + +impl From<&String> for SmolStr { + #[inline] + fn from(s: &String) -> SmolStr { + SmolStr::new(s) + } +} + +impl From<String> for SmolStr { + #[inline(always)] + fn from(text: String) -> Self { + Self::new(text) + } +} + +impl From<Box<str>> for SmolStr { + #[inline] + fn from(s: Box<str>) -> SmolStr { + SmolStr::new(s) + } +} + +impl From<Arc<str>> for SmolStr { + #[inline] + fn from(s: Arc<str>) -> SmolStr { + let repr = Repr::new_on_stack(s.as_ref()).unwrap_or(Repr::Heap(s)); + Self(repr) + } +} + +impl<'a> From<Cow<'a, str>> for SmolStr { + #[inline] + fn from(s: Cow<'a, str>) -> SmolStr { + SmolStr::new(s) + } +} + +impl From<SmolStr> for Arc<str> { + #[inline(always)] + fn from(text: SmolStr) -> Self { + match text.0 { + Repr::Heap(data) => data, + _ => text.as_str().into(), + } + } +} + +impl From<SmolStr> for String { + #[inline(always)] + fn from(text: SmolStr) -> Self { + text.as_str().into() + } +} + +impl Borrow<str> for SmolStr { + #[inline(always)] + fn borrow(&self) -> &str { + self.as_str() + } +} + +impl FromStr for SmolStr { + type Err = Infallible; + + #[inline] + fn from_str(s: &str) -> Result<SmolStr, Self::Err> { + Ok(SmolStr::from(s)) + } +} + +const INLINE_CAP: usize = InlineSize::_V23 as usize; +const N_NEWLINES: usize = 32; +const N_SPACES: usize = 128; +const WS: &str = "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n "; +const _: () = { + assert!(WS.len() == N_NEWLINES + N_SPACES); + assert!(WS.as_bytes()[N_NEWLINES - 1] == b'\n'); + assert!(WS.as_bytes()[N_NEWLINES] == b' '); +}; + +/// A [`u8`] with a bunch of niches. +#[derive(Clone, Copy, Debug, PartialEq)] +#[repr(u8)] +enum InlineSize { + _V0 = 0, + _V1, + _V2, + _V3, + _V4, + _V5, + _V6, + _V7, + _V8, + _V9, + _V10, + _V11, + _V12, + _V13, + _V14, + _V15, + _V16, + _V17, + _V18, + _V19, + _V20, + _V21, + _V22, + _V23, +} + +impl InlineSize { + /// SAFETY: `value` must be less than or equal to [`INLINE_CAP`] + #[inline(always)] + const unsafe fn transmute_from_u8(value: u8) -> Self { + debug_assert!(value <= InlineSize::_V23 as u8); + // SAFETY: The caller is responsible to uphold this invariant + unsafe { mem::transmute::<u8, Self>(value) } + } +} + +#[derive(Clone, Debug)] +enum Repr { + Inline { len: InlineSize, buf: [u8; INLINE_CAP] }, + Static(&'static str), + Heap(Arc<str>), +} + +impl Repr { + /// This function tries to create a new Repr::Inline or Repr::Static + /// If it isn't possible, this function returns None + fn new_on_stack<T>(text: T) -> Option<Self> + where + T: AsRef<str>, + { + let text = text.as_ref(); + + let len = text.len(); + if len <= INLINE_CAP { + let mut buf = [0; INLINE_CAP]; + buf[..len].copy_from_slice(text.as_bytes()); + return Some(Repr::Inline { + // SAFETY: We know that `len` is less than or equal to the maximum value of `InlineSize` + len: unsafe { InlineSize::transmute_from_u8(len as u8) }, + buf, + }); + } + + if len <= N_NEWLINES + N_SPACES { + let bytes = text.as_bytes(); + let possible_newline_count = cmp::min(len, N_NEWLINES); + let newlines = + bytes[..possible_newline_count].iter().take_while(|&&b| b == b'\n').count(); + let possible_space_count = len - newlines; + if possible_space_count <= N_SPACES && bytes[newlines..].iter().all(|&b| b == b' ') { + let spaces = possible_space_count; + let substring = &WS[N_NEWLINES - newlines..N_NEWLINES + spaces]; + return Some(Repr::Static(substring)); + } + } + None + } + + fn new(text: &str) -> Self { + Self::new_on_stack(text).unwrap_or_else(|| Repr::Heap(Arc::from(text))) + } + + #[inline(always)] + fn len(&self) -> usize { + match self { + Repr::Heap(data) => data.len(), + Repr::Static(data) => data.len(), + Repr::Inline { len, .. } => *len as usize, + } + } + + #[inline(always)] + fn is_empty(&self) -> bool { + match self { + Repr::Heap(data) => data.is_empty(), + Repr::Static(data) => data.is_empty(), + &Repr::Inline { len, .. } => len as u8 == 0, + } + } + + #[inline] + fn as_str(&self) -> &str { + match self { + Repr::Heap(data) => data, + Repr::Static(data) => data, + Repr::Inline { len, buf } => { + let len = *len as usize; + // SAFETY: len is guaranteed to be <= INLINE_CAP + let buf = unsafe { buf.get_unchecked(..len) }; + // SAFETY: buf is guaranteed to be valid utf8 for ..len bytes + unsafe { ::core::str::from_utf8_unchecked(buf) } + } + } + } + + fn ptr_eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Heap(l0), Self::Heap(r0)) => Arc::ptr_eq(l0, r0), + (Self::Static(l0), Self::Static(r0)) => core::ptr::eq(l0, r0), + (Self::Inline { len: l_len, buf: l_buf }, Self::Inline { len: r_len, buf: r_buf }) => { + l_len == r_len && l_buf == r_buf + } + _ => false, + } + } +} + +/// Convert value to [`SmolStr`] using [`fmt::Display`], potentially without allocating. +/// +/// Almost identical to [`ToString`], but converts to `SmolStr` instead. +pub trait ToSmolStr { + fn to_smolstr(&self) -> SmolStr; +} + +/// [`str`] methods producing [`SmolStr`]s. +pub trait StrExt: private::Sealed { + /// Returns the lowercase equivalent of this string slice as a new [`SmolStr`], + /// potentially without allocating. + /// + /// See [`str::to_lowercase`]. + #[must_use = "this returns a new SmolStr without modifying the original"] + fn to_lowercase_smolstr(&self) -> SmolStr; + + /// Returns the uppercase equivalent of this string slice as a new [`SmolStr`], + /// potentially without allocating. + /// + /// See [`str::to_uppercase`]. + #[must_use = "this returns a new SmolStr without modifying the original"] + fn to_uppercase_smolstr(&self) -> SmolStr; + + /// Returns the ASCII lowercase equivalent of this string slice as a new [`SmolStr`], + /// potentially without allocating. + /// + /// See [`str::to_ascii_lowercase`]. + #[must_use = "this returns a new SmolStr without modifying the original"] + fn to_ascii_lowercase_smolstr(&self) -> SmolStr; + + /// Returns the ASCII uppercase equivalent of this string slice as a new [`SmolStr`], + /// potentially without allocating. + /// + /// See [`str::to_ascii_uppercase`]. + #[must_use = "this returns a new SmolStr without modifying the original"] + fn to_ascii_uppercase_smolstr(&self) -> SmolStr; + + /// Replaces all matches of a &str with another &str returning a new [`SmolStr`], + /// potentially without allocating. + /// + /// See [`str::replace`]. + #[must_use = "this returns a new SmolStr without modifying the original"] + fn replace_smolstr(&self, from: &str, to: &str) -> SmolStr; + + /// Replaces first N matches of a &str with another &str returning a new [`SmolStr`], + /// potentially without allocating. + /// + /// See [`str::replacen`]. + #[must_use = "this returns a new SmolStr without modifying the original"] + fn replacen_smolstr(&self, from: &str, to: &str, count: usize) -> SmolStr; +} + +impl StrExt for str { + #[inline] + fn to_lowercase_smolstr(&self) -> SmolStr { + let len = self.len(); + if len <= INLINE_CAP { + let (buf, rest) = inline_convert_while_ascii(self, u8::to_ascii_lowercase); + from_buf_and_chars(buf, len - rest.len(), rest.chars().flat_map(|c| c.to_lowercase())) + } else { + self.to_lowercase().into() + } + } + + #[inline] + fn to_uppercase_smolstr(&self) -> SmolStr { + let len = self.len(); + if len <= INLINE_CAP { + let (buf, rest) = inline_convert_while_ascii(self, u8::to_ascii_uppercase); + from_buf_and_chars(buf, len - rest.len(), rest.chars().flat_map(|c| c.to_uppercase())) + } else { + self.to_uppercase().into() + } + } + + #[inline] + fn to_ascii_lowercase_smolstr(&self) -> SmolStr { + let len = self.len(); + if len <= INLINE_CAP { + let mut buf = [0u8; INLINE_CAP]; + buf[..len].copy_from_slice(self.as_bytes()); + buf[..len].make_ascii_lowercase(); + SmolStr(Repr::Inline { + // SAFETY: `len` is in bounds + len: unsafe { InlineSize::transmute_from_u8(len as u8) }, + buf, + }) + } else { + self.to_ascii_lowercase().into() + } + } + + #[inline] + fn to_ascii_uppercase_smolstr(&self) -> SmolStr { + let len = self.len(); + if len <= INLINE_CAP { + let mut buf = [0u8; INLINE_CAP]; + buf[..len].copy_from_slice(self.as_bytes()); + buf[..len].make_ascii_uppercase(); + SmolStr(Repr::Inline { + // SAFETY: `len` is in bounds + len: unsafe { InlineSize::transmute_from_u8(len as u8) }, + buf, + }) + } else { + self.to_ascii_uppercase().into() + } + } + + #[inline] + fn replace_smolstr(&self, from: &str, to: &str) -> SmolStr { + self.replacen_smolstr(from, to, usize::MAX) + } + + #[inline] + fn replacen_smolstr(&self, from: &str, to: &str, mut count: usize) -> SmolStr { + // Fast path for replacing a single ASCII character with another inline. + if let [from_u8] = from.as_bytes() + && let [to_u8] = to.as_bytes() + { + return if self.len() <= count { + // SAFETY: `from_u8` & `to_u8` are ascii + unsafe { replacen_1_ascii(self, |b| if b == from_u8 { *to_u8 } else { *b }) } + } else { + unsafe { + replacen_1_ascii(self, |b| { + if b == from_u8 && count != 0 { + count -= 1; + *to_u8 + } else { + *b + } + }) + } + }; + } + + let mut result = SmolStrBuilder::new(); + let mut last_end = 0; + for (start, part) in self.match_indices(from).take(count) { + // SAFETY: `start` is guaranteed to be within the bounds of `self` as per + // `match_indices` and last_end is always less than or equal to `start` + result.push_str(unsafe { self.get_unchecked(last_end..start) }); + result.push_str(to); + last_end = start + part.len(); + } + // SAFETY: `self.len()` is guaranteed to be within the bounds of `self` and last_end is + // always less than or equal to `self.len()` + result.push_str(unsafe { self.get_unchecked(last_end..self.len()) }); + SmolStr::from(result) + } +} + +/// SAFETY: `map` fn must only replace ascii with ascii or return unchanged bytes. +#[inline] +unsafe fn replacen_1_ascii(src: &str, mut map: impl FnMut(&u8) -> u8) -> SmolStr { + if src.len() <= INLINE_CAP { + let mut buf = [0u8; INLINE_CAP]; + for (idx, b) in src.as_bytes().iter().enumerate() { + buf[idx] = map(b); + } + SmolStr(Repr::Inline { + // SAFETY: `len` is in bounds + len: unsafe { InlineSize::transmute_from_u8(src.len() as u8) }, + buf, + }) + } else { + let out = src.as_bytes().iter().map(map).collect(); + // SAFETY: We replaced ascii with ascii on valid utf8 strings. + unsafe { String::from_utf8_unchecked(out).into() } + } +} + +/// Inline version of std fn `convert_while_ascii`. `s` must have len <= 23. +#[inline] +fn inline_convert_while_ascii(s: &str, convert: fn(&u8) -> u8) -> ([u8; INLINE_CAP], &str) { + // Process the input in chunks of 16 bytes to enable auto-vectorization. + // Previously the chunk size depended on the size of `usize`, + // but on 32-bit platforms with sse or neon is also the better choice. + // The only downside on other platforms would be a bit more loop-unrolling. + const N: usize = 16; + + debug_assert!(s.len() <= INLINE_CAP, "only for inline-able strings"); + + let mut slice = s.as_bytes(); + let mut out = [0u8; INLINE_CAP]; + let mut out_slice = &mut out[..slice.len()]; + let mut is_ascii = [false; N]; + + while slice.len() >= N { + // SAFETY: checked in loop condition + let chunk = unsafe { slice.get_unchecked(..N) }; + // SAFETY: out_slice has at least same length as input slice and gets sliced with the same offsets + let out_chunk = unsafe { out_slice.get_unchecked_mut(..N) }; + + for j in 0..N { + is_ascii[j] = chunk[j] <= 127; + } + + // Auto-vectorization for this check is a bit fragile, sum and comparing against the chunk + // size gives the best result, specifically a pmovmsk instruction on x86. + // See https://github.com/llvm/llvm-project/issues/96395 for why llvm currently does not + // currently recognize other similar idioms. + if is_ascii.iter().map(|x| *x as u8).sum::<u8>() as usize != N { + break; + } + + for j in 0..N { + out_chunk[j] = convert(&chunk[j]); + } + + slice = unsafe { slice.get_unchecked(N..) }; + out_slice = unsafe { out_slice.get_unchecked_mut(N..) }; + } + + // handle the remainder as individual bytes + while !slice.is_empty() { + let byte = slice[0]; + if byte > 127 { + break; + } + // SAFETY: out_slice has at least same length as input slice + unsafe { + *out_slice.get_unchecked_mut(0) = convert(&byte); + } + slice = unsafe { slice.get_unchecked(1..) }; + out_slice = unsafe { out_slice.get_unchecked_mut(1..) }; + } + + unsafe { + // SAFETY: we know this is a valid char boundary + // since we only skipped over leading ascii bytes + let rest = core::str::from_utf8_unchecked(slice); + (out, rest) + } +} + +impl<T> ToSmolStr for T +where + T: fmt::Display + ?Sized, +{ + fn to_smolstr(&self) -> SmolStr { + format_smolstr!("{}", self) + } +} + +mod private { + /// No downstream impls allowed. + pub trait Sealed {} + impl Sealed for str {} +} + +/// Formats arguments to a [`SmolStr`], potentially without allocating. +/// +/// See [`alloc::format!`] or [`format_args!`] for syntax documentation. +#[macro_export] +macro_rules! format_smolstr { + ($($tt:tt)*) => {{ + let mut w = $crate::SmolStrBuilder::new(); + ::core::fmt::Write::write_fmt(&mut w, format_args!($($tt)*)).expect("a formatting trait implementation returned an error"); + w.finish() + }}; +} + +/// A builder that can be used to efficiently build a [`SmolStr`]. +/// +/// This won't allocate if the final string fits into the inline buffer. +#[derive(Clone, Default, Debug, PartialEq, Eq)] +pub struct SmolStrBuilder(SmolStrBuilderRepr); + +#[derive(Clone, Debug, PartialEq, Eq)] +enum SmolStrBuilderRepr { + Inline { len: usize, buf: [u8; INLINE_CAP] }, + Heap(String), +} + +impl Default for SmolStrBuilderRepr { + #[inline] + fn default() -> Self { + SmolStrBuilderRepr::Inline { buf: [0; INLINE_CAP], len: 0 } + } +} + +impl SmolStrBuilder { + /// Creates a new empty [`SmolStrBuilder`]. + #[must_use] + pub const fn new() -> Self { + Self(SmolStrBuilderRepr::Inline { buf: [0; INLINE_CAP], len: 0 }) + } + + /// Builds a [`SmolStr`] from `self`. + #[must_use] + pub fn finish(&self) -> SmolStr { + SmolStr(match &self.0 { + &SmolStrBuilderRepr::Inline { len, buf } => { + debug_assert!(len <= INLINE_CAP); + Repr::Inline { + // SAFETY: We know that `value.len` is less than or equal to the maximum value of `InlineSize` + len: unsafe { InlineSize::transmute_from_u8(len as u8) }, + buf, + } + } + SmolStrBuilderRepr::Heap(heap) => Repr::new(heap), + }) + } + + /// Appends the given [`char`] to the end of `self`'s buffer. + pub fn push(&mut self, c: char) { + match &mut self.0 { + SmolStrBuilderRepr::Inline { len, buf } => { + let char_len = c.len_utf8(); + let new_len = *len + char_len; + if new_len <= INLINE_CAP { + c.encode_utf8(&mut buf[*len..]); + *len += char_len; + } else { + let mut heap = String::with_capacity(new_len); + // copy existing inline bytes over to the heap + // SAFETY: inline data is guaranteed to be valid utf8 for `old_len` bytes + unsafe { heap.as_mut_vec().extend_from_slice(&buf[..*len]) }; + heap.push(c); + self.0 = SmolStrBuilderRepr::Heap(heap); + } + } + SmolStrBuilderRepr::Heap(h) => h.push(c), + } + } + + /// Appends a given string slice onto the end of `self`'s buffer. + pub fn push_str(&mut self, s: &str) { + match &mut self.0 { + SmolStrBuilderRepr::Inline { len, buf } => { + let old_len = *len; + *len += s.len(); + + // if the new length will fit on the stack (even if it fills it entirely) + if *len <= INLINE_CAP { + buf[old_len..*len].copy_from_slice(s.as_bytes()); + return; // skip the heap push below + } + + let mut heap = String::with_capacity(*len); + + // copy existing inline bytes over to the heap + // SAFETY: inline data is guaranteed to be valid utf8 for `old_len` bytes + unsafe { heap.as_mut_vec().extend_from_slice(&buf[..old_len]) }; + heap.push_str(s); + self.0 = SmolStrBuilderRepr::Heap(heap); + } + SmolStrBuilderRepr::Heap(heap) => heap.push_str(s), + } + } +} + +impl fmt::Write for SmolStrBuilder { + #[inline] + fn write_str(&mut self, s: &str) -> fmt::Result { + self.push_str(s); + Ok(()) + } +} + +impl From<SmolStrBuilder> for SmolStr { + fn from(value: SmolStrBuilder) -> Self { + value.finish() + } +} + +#[cfg(feature = "arbitrary")] +#[cfg_attr(docsrs, doc(cfg(feature = "arbitrary")))] +impl<'a> arbitrary::Arbitrary<'a> for SmolStr { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> Result<Self, arbitrary::Error> { + let s = <&str>::arbitrary(u)?; + Ok(SmolStr::new(s)) + } +} + +#[cfg(feature = "borsh")] +#[cfg_attr(docsrs, doc(cfg(feature = "borsh")))] +mod borsh; +#[cfg(feature = "serde")] +#[cfg_attr(docsrs, doc(cfg(feature = "serde")))] +mod serde; + +#[test] +fn from_buf_and_chars_size_hinted_heap() { + let str = from_buf_and_chars( + *b"abcdefghijklmnopqr00000", + 18, + "_0x1x2x3x4x5x6x7x8x9x10x11x12x13".chars(), + ); + + assert_eq!(str, "abcdefghijklmnopqr_0x1x2x3x4x5x6x7x8x9x10x11x12x13"); +}
diff --git a/lib/smol_str/src/serde.rs b/lib/smol_str/src/serde.rs new file mode 100644 index 0000000..66cbcd3 --- /dev/null +++ b/lib/smol_str/src/serde.rs
@@ -0,0 +1,94 @@ +use alloc::{string::String, vec::Vec}; +use core::fmt; + +use serde::de::{Deserializer, Error, Unexpected, Visitor}; +use serde_core as serde; + +use crate::SmolStr; + +// https://github.com/serde-rs/serde/blob/629802f2abfd1a54a6072992888fea7ca5bc209f/serde/src/private/de.rs#L56-L125 +fn smol_str<'de: 'a, 'a, D>(deserializer: D) -> Result<SmolStr, D::Error> +where + D: Deserializer<'de>, +{ + struct SmolStrVisitor; + + impl<'a> Visitor<'a> for SmolStrVisitor { + type Value = SmolStr; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string") + } + + fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> + where + E: Error, + { + Ok(SmolStr::from(v)) + } + + fn visit_borrowed_str<E>(self, v: &'a str) -> Result<Self::Value, E> + where + E: Error, + { + Ok(SmolStr::from(v)) + } + + fn visit_string<E>(self, v: String) -> Result<Self::Value, E> + where + E: Error, + { + Ok(SmolStr::from(v)) + } + + fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> + where + E: Error, + { + match core::str::from_utf8(v) { + Ok(s) => Ok(SmolStr::from(s)), + Err(_) => Err(Error::invalid_value(Unexpected::Bytes(v), &self)), + } + } + + fn visit_borrowed_bytes<E>(self, v: &'a [u8]) -> Result<Self::Value, E> + where + E: Error, + { + match core::str::from_utf8(v) { + Ok(s) => Ok(SmolStr::from(s)), + Err(_) => Err(Error::invalid_value(Unexpected::Bytes(v), &self)), + } + } + + fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E> + where + E: Error, + { + match String::from_utf8(v) { + Ok(s) => Ok(SmolStr::from(s)), + Err(e) => Err(Error::invalid_value(Unexpected::Bytes(&e.into_bytes()), &self)), + } + } + } + + deserializer.deserialize_str(SmolStrVisitor) +} + +impl serde::Serialize for SmolStr { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: serde::Serializer, + { + self.as_str().serialize(serializer) + } +} + +impl<'de> serde::Deserialize<'de> for SmolStr { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: serde::Deserializer<'de>, + { + smol_str(deserializer) + } +}
diff --git a/lib/smol_str/tests/test.rs b/lib/smol_str/tests/test.rs new file mode 100644 index 0000000..640e7df --- /dev/null +++ b/lib/smol_str/tests/test.rs
@@ -0,0 +1,446 @@ +#![allow(clippy::disallowed_types)] +use std::sync::Arc; + +#[cfg(not(miri))] +use proptest::{prop_assert, prop_assert_eq, proptest}; + +use smol_str::{SmolStr, SmolStrBuilder}; + +#[test] +#[cfg(target_pointer_width = "64")] +fn smol_str_is_smol() { + assert_eq!(::std::mem::size_of::<SmolStr>(), ::std::mem::size_of::<String>(),); +} + +#[test] +fn assert_traits() { + fn f<T: Send + Sync + ::std::fmt::Debug + Clone>() {} + f::<SmolStr>(); +} + +#[test] +fn conversions() { + let s: SmolStr = "Hello, World!".into(); + let s: String = s.into(); + assert_eq!(s, "Hello, World!"); + + let s: SmolStr = Arc::<str>::from("Hello, World!").into(); + let s: Arc<str> = s.into(); + assert_eq!(s.as_ref(), "Hello, World!"); +} + +#[test] +fn const_fn_ctor() { + const EMPTY: SmolStr = SmolStr::new_inline(""); + const A: SmolStr = SmolStr::new_inline("A"); + const HELLO: SmolStr = SmolStr::new_inline("HELLO"); + const LONG: SmolStr = SmolStr::new_inline("ABCDEFGHIZKLMNOPQRSTUVW"); + + assert_eq!(EMPTY, SmolStr::from("")); + assert_eq!(A, SmolStr::from("A")); + assert_eq!(HELLO, SmolStr::from("HELLO")); + assert_eq!(LONG, SmolStr::from("ABCDEFGHIZKLMNOPQRSTUVW")); +} + +#[cfg(not(miri))] +fn check_props(std_str: &str, smol: SmolStr) -> Result<(), proptest::test_runner::TestCaseError> { + prop_assert_eq!(smol.as_str(), std_str); + prop_assert_eq!(smol.len(), std_str.len()); + prop_assert_eq!(smol.is_empty(), std_str.is_empty()); + if smol.len() <= 23 { + prop_assert!(!smol.is_heap_allocated()); + } + Ok(()) +} + +#[cfg(not(miri))] +proptest! { + #[test] + fn roundtrip(s: String) { + check_props(s.as_str(), SmolStr::new(s.clone()))?; + } + + #[test] + fn roundtrip_spaces(s in r"( )*") { + check_props(s.as_str(), SmolStr::new(s.clone()))?; + } + + #[test] + fn roundtrip_newlines(s in r"\n*") { + check_props(s.as_str(), SmolStr::new(s.clone()))?; + } + + #[test] + fn roundtrip_ws(s in r"( |\n)*") { + check_props(s.as_str(), SmolStr::new(s.clone()))?; + } + + #[test] + fn from_string_iter(slices in proptest::collection::vec(".*", 1..100)) { + let string: String = slices.iter().map(|x| x.as_str()).collect(); + let smol: SmolStr = slices.into_iter().collect(); + check_props(string.as_str(), smol)?; + } + + #[test] + fn from_str_iter(slices in proptest::collection::vec(".*", 1..100)) { + let string: String = slices.iter().map(|x| x.as_str()).collect(); + let smol: SmolStr = slices.iter().collect(); + check_props(string.as_str(), smol)?; + } +} + +#[cfg(feature = "serde")] +mod serde_tests { + use super::*; + use serde::{Deserialize, Serialize}; + use std::collections::HashMap; + + #[derive(Serialize, Deserialize)] + struct SmolStrStruct { + pub(crate) s: SmolStr, + pub(crate) vec: Vec<SmolStr>, + pub(crate) map: HashMap<SmolStr, SmolStr>, + } + + #[test] + fn test_serde() { + let s = SmolStr::new("Hello, World"); + let s = serde_json::to_string(&s).unwrap(); + assert_eq!(s, "\"Hello, World\""); + let s: SmolStr = serde_json::from_str(&s).unwrap(); + assert_eq!(s, "Hello, World"); + } + + #[test] + fn test_serde_reader() { + let s = SmolStr::new("Hello, World"); + let s = serde_json::to_string(&s).unwrap(); + assert_eq!(s, "\"Hello, World\""); + let s: SmolStr = serde_json::from_reader(std::io::Cursor::new(s)).unwrap(); + assert_eq!(s, "Hello, World"); + } + + #[test] + fn test_serde_struct() { + let mut map = HashMap::new(); + map.insert(SmolStr::new("a"), SmolStr::new("ohno")); + let struct_ = SmolStrStruct { + s: SmolStr::new("Hello, World"), + vec: vec![SmolStr::new("Hello, World"), SmolStr::new("Hello, World")], + map, + }; + let s = serde_json::to_string(&struct_).unwrap(); + let _new_struct: SmolStrStruct = serde_json::from_str(&s).unwrap(); + } + + #[test] + fn test_serde_struct_reader() { + let mut map = HashMap::new(); + map.insert(SmolStr::new("a"), SmolStr::new("ohno")); + let struct_ = SmolStrStruct { + s: SmolStr::new("Hello, World"), + vec: vec![SmolStr::new("Hello, World"), SmolStr::new("Hello, World")], + map, + }; + let s = serde_json::to_string(&struct_).unwrap(); + let _new_struct: SmolStrStruct = serde_json::from_reader(std::io::Cursor::new(s)).unwrap(); + } + + #[test] + fn test_serde_hashmap() { + let mut map = HashMap::new(); + map.insert(SmolStr::new("a"), SmolStr::new("ohno")); + let s = serde_json::to_string(&map).unwrap(); + let _s: HashMap<SmolStr, SmolStr> = serde_json::from_str(&s).unwrap(); + } + + #[test] + fn test_serde_hashmap_reader() { + let mut map = HashMap::new(); + map.insert(SmolStr::new("a"), SmolStr::new("ohno")); + let s = serde_json::to_string(&map).unwrap(); + let _s: HashMap<SmolStr, SmolStr> = + serde_json::from_reader(std::io::Cursor::new(s)).unwrap(); + } + + #[test] + fn test_serde_vec() { + let vec = vec![SmolStr::new(""), SmolStr::new("b")]; + let s = serde_json::to_string(&vec).unwrap(); + let _s: Vec<SmolStr> = serde_json::from_str(&s).unwrap(); + } + + #[test] + fn test_serde_vec_reader() { + let vec = vec![SmolStr::new(""), SmolStr::new("b")]; + let s = serde_json::to_string(&vec).unwrap(); + let _s: Vec<SmolStr> = serde_json::from_reader(std::io::Cursor::new(s)).unwrap(); + } +} + +#[test] +fn test_search_in_hashmap() { + let mut m = ::std::collections::HashMap::<SmolStr, i32>::new(); + m.insert("aaa".into(), 17); + assert_eq!(17, *m.get("aaa").unwrap()); +} + +#[test] +fn test_from_char_iterator() { + let examples = [ + // Simple keyword-like strings + ("if", false), + ("for", false), + ("impl", false), + // Strings containing two-byte characters + ("パーティーへ行かないか", true), + ("パーティーへ行か", true), + ("パーティーへ行_", false), + ("和製漢語", false), + ("部落格", false), + ("사회과학원 어학연구소", true), + // String containing diverse characters + ("表ポあA鷗ŒéB逍Üߪąñ丂㐀𠀀", true), + ]; + for (raw, is_heap) in &examples { + let s: SmolStr = raw.chars().collect(); + assert_eq!(s.as_str(), *raw); + assert_eq!(s.is_heap_allocated(), *is_heap); + } + // String which has too many characters to even consider inlining: Chars::size_hint uses + // (`len` + 3) / 4. With `len` = 89, this results in 23, so `from_iter` will immediately + // heap allocate + let raw = "a".repeat(23 * 4 + 1); + let s: SmolStr = raw.chars().collect(); + assert_eq!(s.as_str(), raw); + assert!(s.is_heap_allocated()); +} + +#[test] +fn test_bad_size_hint_char_iter() { + struct BadSizeHint<I>(I); + + impl<T, I: Iterator<Item = T>> Iterator for BadSizeHint<I> { + type Item = T; + + fn next(&mut self) -> Option<Self::Item> { + self.0.next() + } + + fn size_hint(&self) -> (usize, Option<usize>) { + (1024, None) + } + } + + let data = "testing"; + let collected: SmolStr = BadSizeHint(data.chars()).collect(); + let new = SmolStr::new(data); + + assert!(!collected.is_heap_allocated()); + assert!(!new.is_heap_allocated()); + assert_eq!(new, collected); +} + +#[test] +fn test_to_smolstr() { + use smol_str::ToSmolStr; + + for i in 0..26 { + let a = &"abcdefghijklmnopqrstuvwxyz"[i..]; + + assert_eq!(a, a.to_smolstr()); + assert_eq!(a, smol_str::format_smolstr!("{}", a)); + } +} + +#[test] +fn test_builder_push_str() { + //empty + let builder = SmolStrBuilder::new(); + assert_eq!("", builder.finish()); + + // inline push + let mut builder = SmolStrBuilder::new(); + builder.push_str("a"); + builder.push_str("b"); + let s = builder.finish(); + assert!(!s.is_heap_allocated()); + assert_eq!("ab", s); + + // inline max push + let mut builder = SmolStrBuilder::new(); + builder.push_str(&"a".repeat(23)); + let s = builder.finish(); + assert!(!s.is_heap_allocated()); + assert_eq!("a".repeat(23), s); + + // heap push immediate + let mut builder = SmolStrBuilder::new(); + builder.push_str(&"a".repeat(24)); + let s = builder.finish(); + assert!(s.is_heap_allocated()); + assert_eq!("a".repeat(24), s); + + // heap push succession + let mut builder = SmolStrBuilder::new(); + builder.push_str(&"a".repeat(23)); + builder.push_str(&"a".repeat(23)); + let s = builder.finish(); + assert!(s.is_heap_allocated()); + assert_eq!("a".repeat(46), s); + + // heap push on multibyte char + let mut builder = SmolStrBuilder::new(); + builder.push_str("ohnonononononononono!"); + builder.push('🤯'); + let s = builder.finish(); + assert!(s.is_heap_allocated()); + assert_eq!("ohnonononononononono!🤯", s); +} + +#[test] +fn test_builder_push() { + //empty + let builder = SmolStrBuilder::new(); + assert_eq!("", builder.finish()); + + // inline push + let mut builder = SmolStrBuilder::new(); + builder.push('a'); + builder.push('b'); + let s = builder.finish(); + assert!(!s.is_heap_allocated()); + assert_eq!("ab", s); + + // inline max push + let mut builder = SmolStrBuilder::new(); + for _ in 0..23 { + builder.push('a'); + } + let s = builder.finish(); + assert!(!s.is_heap_allocated()); + assert_eq!("a".repeat(23), s); + + // heap push + let mut builder = SmolStrBuilder::new(); + for _ in 0..24 { + builder.push('a'); + } + let s = builder.finish(); + assert!(s.is_heap_allocated()); + assert_eq!("a".repeat(24), s); +} + +#[cfg(test)] +mod test_str_ext { + use smol_str::StrExt; + + #[test] + fn large() { + let lowercase = "aaaaaaAAAAAaaaaaaaaaaaaaaaaaaaaaAAAAaaaaaaaaaaaaaa".to_lowercase_smolstr(); + assert_eq!(lowercase, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); + assert!(lowercase.is_heap_allocated()); + } + + #[test] + fn to_lowercase() { + let lowercase = "aßΔC".to_lowercase_smolstr(); + assert_eq!(lowercase, "aßδc"); + assert!(!lowercase.is_heap_allocated()); + } + + #[test] + fn to_uppercase() { + let uppercase = "aßΔC".to_uppercase_smolstr(); + assert_eq!(uppercase, "ASSΔC"); + assert!(!uppercase.is_heap_allocated()); + } + + #[test] + fn to_ascii_lowercase() { + let uppercase = "aßΔC".to_ascii_lowercase_smolstr(); + assert_eq!(uppercase, "aßΔc"); + assert!(!uppercase.is_heap_allocated()); + } + + #[test] + fn to_ascii_uppercase() { + let uppercase = "aßΔC".to_ascii_uppercase_smolstr(); + assert_eq!(uppercase, "AßΔC"); + assert!(!uppercase.is_heap_allocated()); + } + + #[test] + fn replace() { + let result = "foo_bar_baz".replace_smolstr("ba", "do"); + assert_eq!(result, "foo_dor_doz"); + assert!(!result.is_heap_allocated()); + } + + #[test] + fn replacen() { + let result = "foo_bar_baz".replacen_smolstr("ba", "do", 1); + assert_eq!(result, "foo_dor_baz"); + assert!(!result.is_heap_allocated()); + } + + #[test] + fn replacen_1_ascii() { + let result = "foo_bar_baz".replacen_smolstr("o", "u", 1); + assert_eq!(result, "fuo_bar_baz"); + assert!(!result.is_heap_allocated()); + } +} + +#[cfg(feature = "borsh")] +mod borsh_tests { + use borsh::BorshDeserialize; + use smol_str::{SmolStr, ToSmolStr}; + use std::io::Cursor; + + #[test] + fn borsh_serialize_stack() { + let smolstr_on_stack = "aßΔCaßδc".to_smolstr(); + let mut buffer = Vec::new(); + borsh::BorshSerialize::serialize(&smolstr_on_stack, &mut buffer).unwrap(); + let mut cursor = Cursor::new(buffer); + let decoded: SmolStr = borsh::BorshDeserialize::deserialize_reader(&mut cursor).unwrap(); + assert_eq!(smolstr_on_stack, decoded); + } + #[test] + fn borsh_serialize_heap() { + let smolstr_on_heap = "aßΔCaßδcaßΔCaßδcaßΔCaßδcaßΔCaßδcaßΔCaßδcaßΔCaßδcaßΔCaßδcaßΔCaßδcaßΔCaßδcaßΔCaßδcaßΔCaßδc".to_smolstr(); + let mut buffer = Vec::new(); + borsh::BorshSerialize::serialize(&smolstr_on_heap, &mut buffer).unwrap(); + let mut cursor = Cursor::new(buffer); + let decoded: SmolStr = borsh::BorshDeserialize::deserialize_reader(&mut cursor).unwrap(); + assert_eq!(smolstr_on_heap, decoded); + } + #[test] + fn borsh_non_utf8_stack() { + let invalid_utf8: Vec<u8> = vec![0xF0, 0x9F, 0x8F]; // Incomplete UTF-8 sequence + + let wrong_utf8 = SmolStr::from(unsafe { String::from_utf8_unchecked(invalid_utf8) }); + let mut buffer = Vec::new(); + borsh::BorshSerialize::serialize(&wrong_utf8, &mut buffer).unwrap(); + let mut cursor = Cursor::new(buffer); + let result = SmolStr::deserialize_reader(&mut cursor); + assert!(result.is_err()); + } + + #[test] + fn borsh_non_utf8_heap() { + let invalid_utf8: Vec<u8> = vec![ + 0xC1, 0x8A, 0x5F, 0xE2, 0x3A, 0x9E, 0x3B, 0xAA, 0x01, 0x08, 0x6F, 0x2F, 0xC0, 0x32, + 0xAB, 0xE1, 0x9A, 0x2F, 0x4A, 0x3F, 0x25, 0x0D, 0x8A, 0x2A, 0x19, 0x11, 0xF0, 0x7F, + 0x0E, 0x80, + ]; + let wrong_utf8 = SmolStr::from(unsafe { String::from_utf8_unchecked(invalid_utf8) }); + let mut buffer = Vec::new(); + borsh::BorshSerialize::serialize(&wrong_utf8, &mut buffer).unwrap(); + let mut cursor = Cursor::new(buffer); + let result = SmolStr::deserialize_reader(&mut cursor); + assert!(result.is_err()); + } +}
diff --git a/lib/smol_str/tests/tidy.rs b/lib/smol_str/tests/tidy.rs new file mode 100644 index 0000000..743fa5a --- /dev/null +++ b/lib/smol_str/tests/tidy.rs
@@ -0,0 +1,48 @@ +#![allow(clippy::disallowed_methods, clippy::print_stdout)] +#![cfg(not(miri))] +use std::{ + env, + path::{Path, PathBuf}, + process::{Command, Stdio}, +}; + +fn project_root() -> PathBuf { + PathBuf::from( + env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), + ) +} + +fn run(cmd: &str, dir: impl AsRef<Path>) -> Result<(), ()> { + let mut args: Vec<_> = cmd.split_whitespace().collect(); + let bin = args.remove(0); + println!("> {}", cmd); + let output = Command::new(bin) + .args(args) + .current_dir(dir) + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::inherit()) + .output() + .map_err(drop)?; + if output.status.success() { + Ok(()) + } else { + let stdout = String::from_utf8(output.stdout).map_err(drop)?; + print!("{}", stdout); + Err(()) + } +} + +#[test] +fn check_code_formatting() { + let dir = project_root(); + if run("rustfmt +stable --version", &dir).is_err() { + panic!( + "failed to run rustfmt from toolchain 'stable'; \ + please run `rustup component add rustfmt --toolchain stable` to install it.", + ); + } + if run("cargo +stable fmt -- --check", &dir).is_err() { + panic!("code is not properly formatted; please format the code by running `cargo fmt`") + } +}
diff --git a/lib/text-size/CHANGELOG.md b/lib/text-size/CHANGELOG.md new file mode 100644 index 0000000..0167599 --- /dev/null +++ b/lib/text-size/CHANGELOG.md
@@ -0,0 +1,26 @@ +# Changelog + +## 1.1.0 + +* add `TextRange::ordering` method + +## 1.0.0 :tada: + +* the carate is renamed to `text-size` from `text_unit` + +Transition table: +- `TextUnit::of_char(c)` ⟹ `TextSize::of(c)` +- `TextUnit::of_str(s)` ⟹ `TextSize::of(s)` +- `TextUnit::from_usize(size)` ⟹ `TextSize::try_from(size).unwrap_or_else(|| panic!(_))` +- `unit.to_usize()` ⟹ `usize::from(size)` +- `TextRange::from_to(from, to)` ⟹ `TextRange::new(from, to)` +- `TextRange::offset_len(offset, size)` ⟹ `TextRange::from_len(offset, size)` +- `range.start()` ⟹ `range.start()` +- `range.end()` ⟹ `range.end()` +- `range.len()` ⟹ `range.len()` +- `range.is_empty()` ⟹ `range.is_empty()` +- `a.is_subrange(b)` ⟹ `b.contains_range(a)` +- `a.intersection(b)` ⟹ `a.intersect(b)` +- `a.extend_to(b)` ⟹ `a.cover(b)` +- `range.contains(offset)` ⟹ `range.contains(point)` +- `range.contains_inclusive(offset)` ⟹ `range.contains_inclusive(point)`
diff --git a/lib/text-size/Cargo.toml b/lib/text-size/Cargo.toml new file mode 100644 index 0000000..f889009 --- /dev/null +++ b/lib/text-size/Cargo.toml
@@ -0,0 +1,28 @@ +[package] +name = "text-size" +version = "1.1.1" +edition = "2024" + +authors = [ + "Aleksey Kladov <aleksey.kladov@gmail.com>", + "Christopher Durham (CAD97) <cad97@cad97.com>" +] +description = "Newtypes for text offsets" +license = "MIT OR Apache-2.0" +repository = "https://github.com/rust-analyzer/text-size" +documentation = "https://docs.rs/text-size" + +[dependencies] +serde = { version = "1.0", optional = true, default-features = false } + +[dev-dependencies] +serde_test = "1.0" +static_assertions = "1.1" + +[[test]] +name = "serde" +path = "tests/serde.rs" +required-features = ["serde"] + +[lints] +workspace = true
diff --git a/lib/text-size/LICENSE-APACHE b/lib/text-size/LICENSE-APACHE new file mode 100644 index 0000000..16fe87b --- /dev/null +++ b/lib/text-size/LICENSE-APACHE
@@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License.
diff --git a/lib/text-size/LICENSE-MIT b/lib/text-size/LICENSE-MIT new file mode 100644 index 0000000..31aa793 --- /dev/null +++ b/lib/text-size/LICENSE-MIT
@@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE.
diff --git a/lib/text-size/README.md b/lib/text-size/README.md new file mode 100644 index 0000000..365b602 --- /dev/null +++ b/lib/text-size/README.md
@@ -0,0 +1,27 @@ +# text-size + +[](https://travis-ci.org/matklad/text-size) +[](https://crates.io/crates/text-size) +[](https://docs.rs/text-size/) + + +A library that provides newtype wrappers for `u32` and `(u32, u32)` for use as text offsets. + +See the [docs](https://docs.rs/text-size/) for more. + +## License + +Licensed under either of + + * Apache License, Version 2.0 + ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) + * MIT license + ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + +at your option. + +## Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you, as defined in the Apache-2.0 license, shall be +dual licensed as above, without any additional terms or conditions.
diff --git a/lib/text-size/src/lib.rs b/lib/text-size/src/lib.rs new file mode 100644 index 0000000..92bd36b --- /dev/null +++ b/lib/text-size/src/lib.rs
@@ -0,0 +1,32 @@ +//! Newtypes for working with text sizes/ranges in a more type-safe manner. +//! +//! This library can help with two things: +//! * Reducing storage requirements for offsets and ranges, under the +//! assumption that 32 bits is enough. +//! * Providing standard vocabulary types for applications where text ranges +//! are pervasive. +//! +//! However, you should not use this library simply because you work with +//! strings. In the overwhelming majority of cases, using `usize` and +//! `std::ops::Range<usize>` is better. In particular, if you are publishing a +//! library, using only std types in the interface would make it more +//! interoperable. Similarly, if you are writing something like a lexer, which +//! produces, but does not *store* text ranges, then sticking to `usize` would +//! be better. +//! +//! Minimal Supported Rust Version: latest stable. + +#![forbid(unsafe_code)] +#![warn(missing_debug_implementations, missing_docs)] + +mod range; +mod size; +mod traits; + +#[cfg(feature = "serde")] +mod serde_impls; + +pub use crate::{range::TextRange, size::TextSize, traits::TextLen}; + +#[cfg(target_pointer_width = "16")] +compile_error!("text-size assumes usize >= u32 and does not work on 16-bit targets");
diff --git a/lib/text-size/src/range.rs b/lib/text-size/src/range.rs new file mode 100644 index 0000000..6067115 --- /dev/null +++ b/lib/text-size/src/range.rs
@@ -0,0 +1,446 @@ +use cmp::Ordering; + +use { + crate::TextSize, + std::{ + cmp, fmt, + ops::{Add, AddAssign, Bound, Index, IndexMut, Range, RangeBounds, Sub, SubAssign}, + }, +}; + +/// A range in text, represented as a pair of [`TextSize`][struct@TextSize]. +/// +/// It is a logic error for `start` to be greater than `end`. +#[derive(Default, Copy, Clone, Eq, PartialEq, Hash)] +pub struct TextRange { + // Invariant: start <= end + start: TextSize, + end: TextSize, +} + +impl fmt::Debug for TextRange { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}..{}", self.start().raw, self.end().raw) + } +} + +impl TextRange { + /// Creates a new `TextRange` with the given `start` and `end` (`start..end`). + /// + /// # Panics + /// + /// Panics if `end < start`. + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// let start = TextSize::from(5); + /// let end = TextSize::from(10); + /// let range = TextRange::new(start, end); + /// + /// assert_eq!(range.start(), start); + /// assert_eq!(range.end(), end); + /// assert_eq!(range.len(), end - start); + /// ``` + #[inline] + pub const fn new(start: TextSize, end: TextSize) -> TextRange { + assert!(start.raw <= end.raw); + TextRange { start, end } + } + + /// Create a new `TextRange` with the given `offset` and `len` (`offset..offset + len`). + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// let text = "0123456789"; + /// + /// let offset = TextSize::from(2); + /// let length = TextSize::from(5); + /// let range = TextRange::at(offset, length); + /// + /// assert_eq!(range, TextRange::new(offset, offset + length)); + /// assert_eq!(&text[range], "23456") + /// ``` + #[inline] + pub const fn at(offset: TextSize, len: TextSize) -> TextRange { + TextRange::new(offset, TextSize::new(offset.raw + len.raw)) + } + + /// Create a zero-length range at the specified offset (`offset..offset`). + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// let point: TextSize; + /// # point = TextSize::from(3); + /// let range = TextRange::empty(point); + /// assert!(range.is_empty()); + /// assert_eq!(range, TextRange::new(point, point)); + /// ``` + #[inline] + pub const fn empty(offset: TextSize) -> TextRange { + TextRange { start: offset, end: offset } + } + + /// Create a range up to the given end (`..end`). + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// let point: TextSize; + /// # point = TextSize::from(12); + /// let range = TextRange::up_to(point); + /// + /// assert_eq!(range.len(), point); + /// assert_eq!(range, TextRange::new(0.into(), point)); + /// assert_eq!(range, TextRange::at(0.into(), point)); + /// ``` + #[inline] + pub const fn up_to(end: TextSize) -> TextRange { + TextRange { start: TextSize::new(0), end } + } +} + +/// Identity methods. +impl TextRange { + /// The start point of this range. + #[inline] + pub const fn start(self) -> TextSize { + self.start + } + + /// The end point of this range. + #[inline] + pub const fn end(self) -> TextSize { + self.end + } + + /// The size of this range. + #[inline] + pub const fn len(self) -> TextSize { + // HACK for const fn: math on primitives only + TextSize { raw: self.end().raw - self.start().raw } + } + + /// Check if this range is empty. + #[inline] + pub const fn is_empty(self) -> bool { + // HACK for const fn: math on primitives only + self.start().raw == self.end().raw + } +} + +/// Manipulation methods. +impl TextRange { + /// Check if this range contains an offset. + /// + /// The end index is considered excluded. + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// let (start, end): (TextSize, TextSize); + /// # start = 10.into(); end = 20.into(); + /// let range = TextRange::new(start, end); + /// assert!(range.contains(start)); + /// assert!(!range.contains(end)); + /// ``` + #[inline] + pub fn contains(self, offset: TextSize) -> bool { + self.start() <= offset && offset < self.end() + } + + /// Check if this range contains an offset. + /// + /// The end index is considered included. + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// let (start, end): (TextSize, TextSize); + /// # start = 10.into(); end = 20.into(); + /// let range = TextRange::new(start, end); + /// assert!(range.contains_inclusive(start)); + /// assert!(range.contains_inclusive(end)); + /// ``` + #[inline] + pub fn contains_inclusive(self, offset: TextSize) -> bool { + self.start() <= offset && offset <= self.end() + } + + /// Check if this range completely contains another range. + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// let larger = TextRange::new(0.into(), 20.into()); + /// let smaller = TextRange::new(5.into(), 15.into()); + /// assert!(larger.contains_range(smaller)); + /// assert!(!smaller.contains_range(larger)); + /// + /// // a range always contains itself + /// assert!(larger.contains_range(larger)); + /// assert!(smaller.contains_range(smaller)); + /// ``` + #[inline] + pub fn contains_range(self, other: TextRange) -> bool { + self.start() <= other.start() && other.end() <= self.end() + } + + /// The range covered by both ranges, if it exists. + /// If the ranges touch but do not overlap, the output range is empty. + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// assert_eq!( + /// TextRange::intersect( + /// TextRange::new(0.into(), 10.into()), + /// TextRange::new(5.into(), 15.into()), + /// ), + /// Some(TextRange::new(5.into(), 10.into())), + /// ); + /// ``` + #[inline] + pub fn intersect(self, other: TextRange) -> Option<TextRange> { + let start = cmp::max(self.start(), other.start()); + let end = cmp::min(self.end(), other.end()); + if end < start { + return None; + } + Some(TextRange::new(start, end)) + } + + /// Extends the range to cover `other` as well. + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// assert_eq!( + /// TextRange::cover( + /// TextRange::new(0.into(), 5.into()), + /// TextRange::new(15.into(), 20.into()), + /// ), + /// TextRange::new(0.into(), 20.into()), + /// ); + /// ``` + #[inline] + pub fn cover(self, other: TextRange) -> TextRange { + let start = cmp::min(self.start(), other.start()); + let end = cmp::max(self.end(), other.end()); + TextRange::new(start, end) + } + + /// Extends the range to cover `other` offsets as well. + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// assert_eq!( + /// TextRange::empty(0.into()).cover_offset(20.into()), + /// TextRange::new(0.into(), 20.into()), + /// ) + /// ``` + #[inline] + pub fn cover_offset(self, offset: TextSize) -> TextRange { + self.cover(TextRange::empty(offset)) + } + + /// Add an offset to this range. + /// + /// Note that this is not appropriate for changing where a `TextRange` is + /// within some string; rather, it is for changing the reference anchor + /// that the `TextRange` is measured against. + /// + /// The unchecked version (`Add::add`) will _always_ panic on overflow, + /// in contrast to primitive integers, which check in debug mode only. + #[inline] + pub fn checked_add(self, offset: TextSize) -> Option<TextRange> { + Some(TextRange { + start: self.start.checked_add(offset)?, + end: self.end.checked_add(offset)?, + }) + } + + /// Subtract an offset from this range. + /// + /// Note that this is not appropriate for changing where a `TextRange` is + /// within some string; rather, it is for changing the reference anchor + /// that the `TextRange` is measured against. + /// + /// The unchecked version (`Sub::sub`) will _always_ panic on overflow, + /// in contrast to primitive integers, which check in debug mode only. + #[inline] + pub fn checked_sub(self, offset: TextSize) -> Option<TextRange> { + Some(TextRange { + start: self.start.checked_sub(offset)?, + end: self.end.checked_sub(offset)?, + }) + } + + /// Relative order of the two ranges (overlapping ranges are considered + /// equal). + /// + /// + /// This is useful when, for example, binary searching an array of disjoint + /// ranges. + /// + /// # Examples + /// + /// ``` + /// # use text_size::*; + /// # use std::cmp::Ordering; + /// + /// let a = TextRange::new(0.into(), 3.into()); + /// let b = TextRange::new(4.into(), 5.into()); + /// assert_eq!(a.ordering(b), Ordering::Less); + /// + /// let a = TextRange::new(0.into(), 3.into()); + /// let b = TextRange::new(3.into(), 5.into()); + /// assert_eq!(a.ordering(b), Ordering::Less); + /// + /// let a = TextRange::new(0.into(), 3.into()); + /// let b = TextRange::new(2.into(), 5.into()); + /// assert_eq!(a.ordering(b), Ordering::Equal); + /// + /// let a = TextRange::new(0.into(), 3.into()); + /// let b = TextRange::new(2.into(), 2.into()); + /// assert_eq!(a.ordering(b), Ordering::Equal); + /// + /// let a = TextRange::new(2.into(), 3.into()); + /// let b = TextRange::new(2.into(), 2.into()); + /// assert_eq!(a.ordering(b), Ordering::Greater); + /// ``` + #[inline] + pub fn ordering(self, other: TextRange) -> Ordering { + if self.end() <= other.start() { + Ordering::Less + } else if other.end() <= self.start() { + Ordering::Greater + } else { + Ordering::Equal + } + } +} + +impl Index<TextRange> for str { + type Output = str; + #[inline] + fn index(&self, index: TextRange) -> &str { + &self[Range::<usize>::from(index)] + } +} + +impl Index<TextRange> for String { + type Output = str; + #[inline] + fn index(&self, index: TextRange) -> &str { + &self[Range::<usize>::from(index)] + } +} + +impl IndexMut<TextRange> for str { + #[inline] + fn index_mut(&mut self, index: TextRange) -> &mut str { + &mut self[Range::<usize>::from(index)] + } +} + +impl IndexMut<TextRange> for String { + #[inline] + fn index_mut(&mut self, index: TextRange) -> &mut str { + &mut self[Range::<usize>::from(index)] + } +} + +impl RangeBounds<TextSize> for TextRange { + fn start_bound(&self) -> Bound<&TextSize> { + Bound::Included(&self.start) + } + + fn end_bound(&self) -> Bound<&TextSize> { + Bound::Excluded(&self.end) + } +} + +impl<T> From<TextRange> for Range<T> +where + T: From<TextSize>, +{ + #[inline] + fn from(r: TextRange) -> Self { + r.start().into()..r.end().into() + } +} + +macro_rules! ops { + (impl $Op:ident for TextRange by fn $f:ident = $op:tt) => { + impl $Op<&TextSize> for TextRange { + type Output = TextRange; + #[inline] + fn $f(self, other: &TextSize) -> TextRange { + self $op *other + } + } + impl<T> $Op<T> for &TextRange + where + TextRange: $Op<T, Output=TextRange>, + { + type Output = TextRange; + #[inline] + fn $f(self, other: T) -> TextRange { + *self $op other + } + } + }; +} + +impl Add<TextSize> for TextRange { + type Output = TextRange; + #[inline] + fn add(self, offset: TextSize) -> TextRange { + self.checked_add(offset).expect("TextRange +offset overflowed") + } +} + +impl Sub<TextSize> for TextRange { + type Output = TextRange; + #[inline] + fn sub(self, offset: TextSize) -> TextRange { + self.checked_sub(offset).expect("TextRange -offset overflowed") + } +} + +ops!(impl Add for TextRange by fn add = +); +ops!(impl Sub for TextRange by fn sub = -); + +impl<A> AddAssign<A> for TextRange +where + TextRange: Add<A, Output = TextRange>, +{ + #[inline] + fn add_assign(&mut self, rhs: A) { + *self = *self + rhs + } +} + +impl<S> SubAssign<S> for TextRange +where + TextRange: Sub<S, Output = TextRange>, +{ + #[inline] + fn sub_assign(&mut self, rhs: S) { + *self = *self - rhs + } +}
diff --git a/lib/text-size/src/serde_impls.rs b/lib/text-size/src/serde_impls.rs new file mode 100644 index 0000000..4cd4161 --- /dev/null +++ b/lib/text-size/src/serde_impls.rs
@@ -0,0 +1,45 @@ +use { + crate::{TextRange, TextSize}, + serde::{Deserialize, Deserializer, Serialize, Serializer, de}, +}; + +impl Serialize for TextSize { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + self.raw.serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for TextSize { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + u32::deserialize(deserializer).map(TextSize::from) + } +} + +impl Serialize for TextRange { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + (self.start(), self.end()).serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for TextRange { + #[allow(clippy::nonminimal_bool)] + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + let (start, end) = Deserialize::deserialize(deserializer)?; + if !(start <= end) { + return Err(de::Error::custom(format!("invalid range: {:?}..{:?}", start, end))); + } + Ok(TextRange::new(start, end)) + } +}
diff --git a/lib/text-size/src/size.rs b/lib/text-size/src/size.rs new file mode 100644 index 0000000..05328b4 --- /dev/null +++ b/lib/text-size/src/size.rs
@@ -0,0 +1,172 @@ +use { + crate::TextLen, + std::{ + convert::TryFrom, + fmt, iter, + num::TryFromIntError, + ops::{Add, AddAssign, Sub, SubAssign}, + }, +}; + +/// A measure of text length. Also, equivalently, an index into text. +/// +/// This is a UTF-8 bytes offset stored as `u32`, but +/// most clients should treat it as an opaque measure. +/// +/// For cases that need to escape `TextSize` and return to working directly +/// with primitive integers, `TextSize` can be converted losslessly to/from +/// `u32` via [`From`] conversions as well as losslessly be converted [`Into`] +/// `usize`. The `usize -> TextSize` direction can be done via [`TryFrom`]. +/// +/// These escape hatches are primarily required for unit testing and when +/// converting from UTF-8 size to another coordinate space, such as UTF-16. +#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct TextSize { + pub(crate) raw: u32, +} + +impl fmt::Debug for TextSize { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.raw) + } +} + +impl TextSize { + /// Creates a new instance of `TextSize` from a raw `u32`. + #[inline] + pub const fn new(raw: u32) -> TextSize { + TextSize { raw } + } + + /// The text size of some primitive text-like object. + /// + /// Accepts `char`, `&str`, and `&String`. + /// + /// # Examples + /// + /// ```rust + /// # use text_size::*; + /// let char_size = TextSize::of('🦀'); + /// assert_eq!(char_size, TextSize::from(4)); + /// + /// let str_size = TextSize::of("rust-analyzer"); + /// assert_eq!(str_size, TextSize::from(13)); + /// ``` + #[inline] + pub fn of<T: TextLen>(text: T) -> TextSize { + text.text_len() + } +} + +/// Methods to act like a primitive integer type, where reasonably applicable. +// Last updated for parity with Rust 1.42.0. +impl TextSize { + /// Checked addition. Returns `None` if overflow occurred. + #[inline] + pub const fn checked_add(self, rhs: TextSize) -> Option<TextSize> { + match self.raw.checked_add(rhs.raw) { + Some(raw) => Some(TextSize { raw }), + None => None, + } + } + + /// Checked subtraction. Returns `None` if overflow occurred. + #[inline] + pub const fn checked_sub(self, rhs: TextSize) -> Option<TextSize> { + match self.raw.checked_sub(rhs.raw) { + Some(raw) => Some(TextSize { raw }), + None => None, + } + } +} + +impl From<u32> for TextSize { + #[inline] + fn from(raw: u32) -> Self { + TextSize { raw } + } +} + +impl From<TextSize> for u32 { + #[inline] + fn from(value: TextSize) -> Self { + value.raw + } +} + +impl TryFrom<usize> for TextSize { + type Error = TryFromIntError; + #[inline] + fn try_from(value: usize) -> Result<Self, TryFromIntError> { + Ok(u32::try_from(value)?.into()) + } +} + +impl From<TextSize> for usize { + #[inline] + fn from(value: TextSize) -> Self { + value.raw as usize + } +} + +macro_rules! ops { + (impl $Op:ident for TextSize by fn $f:ident = $op:tt) => { + impl $Op<TextSize> for TextSize { + type Output = TextSize; + #[inline] + fn $f(self, other: TextSize) -> TextSize { + TextSize { raw: self.raw $op other.raw } + } + } + impl $Op<&TextSize> for TextSize { + type Output = TextSize; + #[inline] + fn $f(self, other: &TextSize) -> TextSize { + self $op *other + } + } + impl<T> $Op<T> for &TextSize + where + TextSize: $Op<T, Output=TextSize>, + { + type Output = TextSize; + #[inline] + fn $f(self, other: T) -> TextSize { + *self $op other + } + } + }; +} + +ops!(impl Add for TextSize by fn add = +); +ops!(impl Sub for TextSize by fn sub = -); + +impl<A> AddAssign<A> for TextSize +where + TextSize: Add<A, Output = TextSize>, +{ + #[inline] + fn add_assign(&mut self, rhs: A) { + *self = *self + rhs + } +} + +impl<S> SubAssign<S> for TextSize +where + TextSize: Sub<S, Output = TextSize>, +{ + #[inline] + fn sub_assign(&mut self, rhs: S) { + *self = *self - rhs + } +} + +impl<A> iter::Sum<A> for TextSize +where + TextSize: Add<A, Output = TextSize>, +{ + #[inline] + fn sum<I: Iterator<Item = A>>(iter: I) -> TextSize { + iter.fold(0.into(), Add::add) + } +}
diff --git a/lib/text-size/src/traits.rs b/lib/text-size/src/traits.rs new file mode 100644 index 0000000..d0bb6c1 --- /dev/null +++ b/lib/text-size/src/traits.rs
@@ -0,0 +1,36 @@ +use {crate::TextSize, std::convert::TryInto}; + +use priv_in_pub::Sealed; +mod priv_in_pub { + pub trait Sealed {} +} + +/// Primitives with a textual length that can be passed to [`TextSize::of`]. +pub trait TextLen: Copy + Sealed { + /// The textual length of this primitive. + fn text_len(self) -> TextSize; +} + +impl Sealed for &'_ str {} +impl TextLen for &'_ str { + #[inline] + fn text_len(self) -> TextSize { + self.len().try_into().unwrap() + } +} + +impl Sealed for &'_ String {} +impl TextLen for &'_ String { + #[inline] + fn text_len(self) -> TextSize { + self.as_str().text_len() + } +} + +impl Sealed for char {} +impl TextLen for char { + #[inline] + fn text_len(self) -> TextSize { + (self.len_utf8() as u32).into() + } +}
diff --git a/lib/text-size/tests/auto_traits.rs b/lib/text-size/tests/auto_traits.rs new file mode 100644 index 0000000..6e62369 --- /dev/null +++ b/lib/text-size/tests/auto_traits.rs
@@ -0,0 +1,18 @@ +use { + static_assertions::*, + std::{ + fmt::Debug, + hash::Hash, + marker::{Send, Sync}, + panic::{RefUnwindSafe, UnwindSafe}, + }, + text_size::*, +}; + +// auto traits +assert_impl_all!(TextSize: Send, Sync, Unpin, UnwindSafe, RefUnwindSafe); +assert_impl_all!(TextRange: Send, Sync, Unpin, UnwindSafe, RefUnwindSafe); + +// common traits +assert_impl_all!(TextSize: Copy, Debug, Default, Hash, Ord); +assert_impl_all!(TextRange: Copy, Debug, Default, Hash, Eq);
diff --git a/lib/text-size/tests/constructors.rs b/lib/text-size/tests/constructors.rs new file mode 100644 index 0000000..9ff4e19 --- /dev/null +++ b/lib/text-size/tests/constructors.rs
@@ -0,0 +1,24 @@ +use text_size::TextSize; + +#[derive(Copy, Clone)] +struct BadRope<'a>(&'a [&'a str]); + +impl BadRope<'_> { + fn text_len(self) -> TextSize { + self.0.iter().copied().map(TextSize::of).sum() + } +} + +#[test] +fn main() { + let x: char = 'c'; + let _ = TextSize::of(x); + + let x: &str = "hello"; + let _ = TextSize::of(x); + + let x: &String = &"hello".into(); + let _ = TextSize::of(x); + + let _ = BadRope(&[""]).text_len(); +}
diff --git a/lib/text-size/tests/indexing.rs b/lib/text-size/tests/indexing.rs new file mode 100644 index 0000000..93ba3c7 --- /dev/null +++ b/lib/text-size/tests/indexing.rs
@@ -0,0 +1,8 @@ +use text_size::*; + +#[test] +fn main() { + let range = TextRange::default(); + _ = &""[range]; + _ = &String::new()[range]; +}
diff --git a/lib/text-size/tests/main.rs b/lib/text-size/tests/main.rs new file mode 100644 index 0000000..5e6b86d --- /dev/null +++ b/lib/text-size/tests/main.rs
@@ -0,0 +1,76 @@ +use {std::ops, text_size::*}; + +fn size(x: u32) -> TextSize { + TextSize::from(x) +} + +fn range(x: ops::Range<u32>) -> TextRange { + TextRange::new(x.start.into(), x.end.into()) +} + +#[test] +fn sum() { + let xs: Vec<TextSize> = vec![size(0), size(1), size(2)]; + assert_eq!(xs.iter().sum::<TextSize>(), size(3)); + assert_eq!(xs.into_iter().sum::<TextSize>(), size(3)); +} + +#[test] +fn math() { + assert_eq!(size(10) + size(5), size(15)); + assert_eq!(size(10) - size(5), size(5)); +} + +#[test] +fn checked_math() { + assert_eq!(size(1).checked_add(size(1)), Some(size(2))); + assert_eq!(size(1).checked_sub(size(1)), Some(size(0))); + assert_eq!(size(1).checked_sub(size(2)), None); + assert_eq!(size(!0).checked_add(size(1)), None); +} + +#[test] +#[rustfmt::skip] +fn contains() { + assert!( range(2..4).contains_range(range(2..3))); + assert!( ! range(2..4).contains_range(range(1..3))); +} + +#[test] +fn intersect() { + assert_eq!(range(1..2).intersect(range(2..3)), Some(range(2..2))); + assert_eq!(range(1..5).intersect(range(2..3)), Some(range(2..3))); + assert_eq!(range(1..2).intersect(range(3..4)), None); +} + +#[test] +fn cover() { + assert_eq!(range(1..2).cover(range(2..3)), range(1..3)); + assert_eq!(range(1..5).cover(range(2..3)), range(1..5)); + assert_eq!(range(1..2).cover(range(4..5)), range(1..5)); +} + +#[test] +fn cover_offset() { + assert_eq!(range(1..3).cover_offset(size(0)), range(0..3)); + assert_eq!(range(1..3).cover_offset(size(1)), range(1..3)); + assert_eq!(range(1..3).cover_offset(size(2)), range(1..3)); + assert_eq!(range(1..3).cover_offset(size(3)), range(1..3)); + assert_eq!(range(1..3).cover_offset(size(4)), range(1..4)); +} + +#[test] +#[rustfmt::skip] +fn contains_point() { + assert!( ! range(1..3).contains(size(0))); + assert!( range(1..3).contains(size(1))); + assert!( range(1..3).contains(size(2))); + assert!( ! range(1..3).contains(size(3))); + assert!( ! range(1..3).contains(size(4))); + + assert!( ! range(1..3).contains_inclusive(size(0))); + assert!( range(1..3).contains_inclusive(size(1))); + assert!( range(1..3).contains_inclusive(size(2))); + assert!( range(1..3).contains_inclusive(size(3))); + assert!( ! range(1..3).contains_inclusive(size(4))); +}
diff --git a/lib/text-size/tests/serde.rs b/lib/text-size/tests/serde.rs new file mode 100644 index 0000000..cc4d538 --- /dev/null +++ b/lib/text-size/tests/serde.rs
@@ -0,0 +1,49 @@ +use {serde_test::*, std::ops, text_size::*}; + +fn size(x: u32) -> TextSize { + TextSize::from(x) +} + +fn range(x: ops::Range<u32>) -> TextRange { + TextRange::new(x.start.into(), x.end.into()) +} + +#[test] +fn size_serialization() { + assert_tokens(&size(00), &[Token::U32(00)]); + assert_tokens(&size(10), &[Token::U32(10)]); + assert_tokens(&size(20), &[Token::U32(20)]); + assert_tokens(&size(30), &[Token::U32(30)]); +} + +#[test] +fn range_serialization() { + assert_tokens( + &range(00..10), + &[Token::Tuple { len: 2 }, Token::U32(00), Token::U32(10), Token::TupleEnd], + ); + assert_tokens( + &range(10..20), + &[Token::Tuple { len: 2 }, Token::U32(10), Token::U32(20), Token::TupleEnd], + ); + assert_tokens( + &range(20..30), + &[Token::Tuple { len: 2 }, Token::U32(20), Token::U32(30), Token::TupleEnd], + ); + assert_tokens( + &range(30..40), + &[Token::Tuple { len: 2 }, Token::U32(30), Token::U32(40), Token::TupleEnd], + ); +} + +#[test] +fn invalid_range_deserialization() { + assert_tokens::<TextRange>( + &range(62..92), + &[Token::Tuple { len: 2 }, Token::U32(62), Token::U32(92), Token::TupleEnd], + ); + assert_de_tokens_error::<TextRange>( + &[Token::Tuple { len: 2 }, Token::U32(92), Token::U32(62), Token::TupleEnd], + "invalid range: 92..62", + ); +}
diff --git a/lib/ungrammar/Cargo.toml b/lib/ungrammar/Cargo.toml new file mode 100644 index 0000000..b8dcb4a --- /dev/null +++ b/lib/ungrammar/Cargo.toml
@@ -0,0 +1,13 @@ +[package] +name = "ungrammar" +description = "A DSL for describing concrete syntax trees" +version = "1.16.1" +license = "MIT OR Apache-2.0" +repository = "https://github.com/rust-analyzer/ungrammar" +edition = "2024" + +[dependencies] +# nope + +[lints] +workspace = true
diff --git a/lib/ungrammar/README.md b/lib/ungrammar/README.md new file mode 100644 index 0000000..a5e130f --- /dev/null +++ b/lib/ungrammar/README.md
@@ -0,0 +1,21 @@ +# ungrammar + +A DSL for specifying concrete syntax trees. + +See the [blog post][post] for an introduction. + +See [./rust.ungram](./rust.ungram) for an example. + +## Editor support + +- Vim + - [vim-ungrammar][] + - [ungrammar.vim][] +- VSCode + - [ungrammar-tools][] + +[post]: + https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html +[vim-ungrammar]: https://github.com/Iron-E/vim-ungrammar +[ungrammar.vim]: https://github.com/drtychai/ungrammar.vim +[ungrammar-tools]: https://github.com/azdavis/ungrammar-tools
diff --git a/lib/ungrammar/rust.ungram b/lib/ungrammar/rust.ungram new file mode 100644 index 0000000..7781e71 --- /dev/null +++ b/lib/ungrammar/rust.ungram
@@ -0,0 +1,666 @@ +// Note this grammar file does not reflect the current language as this file is no longer maintained. + +// Rust Un-Grammar. +// +// This grammar specifies the structure of Rust's concrete syntax tree. +// It does not specify parsing rules (ambiguities, precedence, etc are out of scope). +// Tokens are processed -- contextual keywords are recognised, compound operators glued. +// +// Legend: +// +// // -- comment +// Name = -- non-terminal definition +// 'ident' -- token (terminal) +// A B -- sequence +// A | B -- alternation +// A* -- zero or more repetition +// A? -- zero or one repetition +// (A) -- same as A +// label:A -- suggested name for field of AST node + +//*************************// +// Names, Paths and Macros // +//*************************// + +Name = + 'ident' | 'self' + +NameRef = + 'ident' | 'int_number' | 'self' | 'super' | 'crate' | 'Self' + +Lifetime = + 'lifetime_ident' + +Path = + (qualifier:Path '::')? segment:PathSegment + +PathSegment = + '::'? NameRef +| NameRef GenericArgList? +| NameRef ParamList RetType? +| '<' PathType ('as' PathType)? '>' + +GenericArgList = + '::'? '<' (GenericArg (',' GenericArg)* ','?)? '>' + +GenericArg = + TypeArg +| AssocTypeArg +| LifetimeArg +| ConstArg + +TypeArg = + Type + +AssocTypeArg = + NameRef GenericParamList? (':' TypeBoundList | '=' Type) + +LifetimeArg = + Lifetime + +ConstArg = + Expr + +MacroCall = + Attr* Path '!' TokenTree ';'? + +TokenTree = + '(' ')' +| '{' '}' +| '[' ']' + +MacroItems = + Item* + +MacroStmts = + statements:Stmt* + Expr? + +//*************************// +// Items // +//*************************// + +SourceFile = + 'shebang'? + Attr* + Item* + +Item = + Const +| Enum +| ExternBlock +| ExternCrate +| Fn +| Impl +| MacroCall +| MacroRules +| MacroDef +| Module +| Static +| Struct +| Trait +| TypeAlias +| Union +| Use + +MacroRules = + Attr* Visibility? + 'macro_rules' '!' Name + TokenTree + +MacroDef = + Attr* Visibility? + 'macro' Name args:TokenTree? + body:TokenTree + +Module = + Attr* Visibility? + 'mod' Name + (ItemList | ';') + +ItemList = + '{' Attr* Item* '}' + +ExternCrate = + Attr* Visibility? + 'extern' 'crate' NameRef Rename? ';' + +Rename = + 'as' (Name | '_') + +Use = + Attr* Visibility? + 'use' UseTree ';' + +UseTree = + (Path? '::')? ('*' | UseTreeList) +| Path Rename? + +UseTreeList = + '{' (UseTree (',' UseTree)* ','?)? '}' + +Fn = + Attr* Visibility? + 'default'? 'const'? 'async'? 'unsafe'? Abi? + 'fn' Name GenericParamList? ParamList RetType? WhereClause? + (body:BlockExpr | ';') + +Abi = + 'extern' 'string'? + +ParamList = + '('( + SelfParam + | (SelfParam ',')? (Param (',' Param)* ','?)? + )')' +| '|' (Param (',' Param)* ','?)? '|' + +SelfParam = + Attr* ( + ('&' Lifetime?)? 'mut'? Name + | 'mut'? Name ':' Type + ) + +Param = + Attr* ( + Pat (':' Type)? + | Type + | '...' + ) + +RetType = + '->' Type + +TypeAlias = + Attr* Visibility? + 'default'? + 'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause? + ('=' Type)? ';' + +Struct = + Attr* Visibility? + 'struct' Name GenericParamList? ( + WhereClause? (RecordFieldList | ';') + | TupleFieldList WhereClause? ';' + ) + +RecordFieldList = + '{' fields:(RecordField (',' RecordField)* ','?)? '}' + +RecordField = + Attr* Visibility? + Name ':' Type + +TupleFieldList = + '(' fields:(TupleField (',' TupleField)* ','?)? ')' + +TupleField = + Attr* Visibility? + Type + +FieldList = + RecordFieldList +| TupleFieldList + +Enum = + Attr* Visibility? + 'enum' Name GenericParamList? WhereClause? + VariantList + +VariantList = + '{' (Variant (',' Variant)* ','?)? '}' + +Variant = + Attr* Visibility? + Name FieldList? ('=' Expr)? + +Union = + Attr* Visibility? + 'union' Name GenericParamList? WhereClause? + RecordFieldList + +// A Data Type. +// +// Not used directly in the grammar, but handy to have anyway. +Adt = + Enum +| Struct +| Union + +Const = + Attr* Visibility? + 'default'? + 'const' (Name | '_') ':' Type + ('=' body:Expr)? ';' + +Static = + Attr* Visibility? + 'static' 'mut'? Name ':' Type + ('=' body:Expr)? ';' + +Trait = + Attr* Visibility? + 'unsafe'? 'auto'? + 'trait' Name GenericParamList? (':' TypeBoundList?)? WhereClause? + AssocItemList + +AssocItemList = + '{' Attr* AssocItem* '}' + +AssocItem = + Const +| Fn +| MacroCall +| TypeAlias + +Impl = + Attr* Visibility? + 'default'? 'unsafe'? + 'impl' GenericParamList? ('const'? '!'? trait:Type 'for')? self_ty:Type WhereClause? + AssocItemList + +ExternBlock = + Attr* 'unsafe'? Abi ExternItemList + +ExternItemList = + '{' Attr* ExternItem* '}' + +ExternItem = + Fn +| MacroCall +| Static +| TypeAlias + +GenericParamList = + '<' (GenericParam (',' GenericParam)* ','?)? '>' + +GenericParam = + ConstParam +| LifetimeParam +| TypeParam + +TypeParam = + Attr* Name (':' TypeBoundList?)? + ('=' default_type:Type)? + +ConstParam = + Attr* 'const' Name ':' Type + ('=' default_val:Expr)? + +LifetimeParam = + Attr* Lifetime (':' TypeBoundList?)? + +WhereClause = + 'where' predicates:(WherePred (',' WherePred)* ','?) + +WherePred = + ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList? + +Visibility = + 'pub' ('(' 'in'? Path ')')? + +Attr = + '#' '!'? '[' Meta ']' + +Meta = + Path ('=' Expr | TokenTree)? + +//****************************// +// Statements and Expressions // +//****************************// + +Stmt = + ';' +| ExprStmt +| Item +| LetStmt + +LetStmt = + Attr* 'let' Pat (':' Type)? + '=' initializer:Expr + LetElse? + ';' + +LetElse = + 'else' BlockExpr + +ExprStmt = + Expr ';'? + +Expr = + ArrayExpr +| AwaitExpr +| BinExpr +| BlockExpr +| BoxExpr +| BreakExpr +| CallExpr +| CastExpr +| ClosureExpr +| ContinueExpr +| FieldExpr +| ForExpr +| IfExpr +| IndexExpr +| Literal +| LoopExpr +| MacroCall +| MacroStmts +| MatchExpr +| MethodCallExpr +| ParenExpr +| PathExpr +| PrefixExpr +| RangeExpr +| RecordExpr +| RefExpr +| ReturnExpr +| TryExpr +| TupleExpr +| WhileExpr +| YieldExpr +| LetExpr +| UnderscoreExpr + +Literal = + Attr* value:( + 'int_number' | 'float_number' + | 'string' | 'raw_string' + | 'byte_string' | 'raw_byte_string' + | 'true' | 'false' + | 'char' | 'byte' + ) + +PathExpr = + Attr* Path + +StmtList = + '{' + Attr* + statements:Stmt* + tail_expr:Expr? + '}' + +RefExpr = + Attr* '&' ('raw' | 'mut' | 'const') Expr + +TryExpr = + Attr* Expr '?' + +BlockExpr = + Attr* Label? ('try' | 'unsafe' | 'async' | 'const') StmtList + +PrefixExpr = + Attr* op:('-' | '!' | '*') Expr + +BinExpr = + Attr* + lhs:Expr + op:( + '||' | '&&' + | '==' | '!=' | '<=' | '>=' | '<' | '>' + | '+' | '*' | '-' | '/' | '%' | '<<' | '>>' | '^' | '|' | '&' + | '=' | '+=' | '/=' | '*=' | '%=' | '>>=' | '<<=' | '-=' | '|=' | '&=' | '^=' + ) + rhs:Expr + +CastExpr = + Attr* Expr 'as' Type + +ParenExpr = + Attr* '(' Attr* Expr ')' + +ArrayExpr = + Attr* '[' Attr* ( + (Expr (',' Expr)* ','?)? + | Expr ';' Expr + ) ']' + +IndexExpr = + Attr* base:Expr '[' index:Expr ']' + +TupleExpr = + Attr* '(' Attr* fields:(Expr (',' Expr)* ','?)? ')' + +RecordExpr = + Path RecordExprFieldList + +RecordExprFieldList = + '{' + Attr* + fields:(RecordExprField (',' RecordExprField)* ','?)? + ('..' spread:Expr?)? + '}' + +RecordExprField = + Attr* (NameRef ':')? Expr + +CallExpr = + Attr* Expr ArgList + +ArgList = + '(' args:(Expr (',' Expr)* ','?)? ')' + +MethodCallExpr = + Attr* receiver:Expr '.' NameRef GenericArgList? ArgList + +FieldExpr = + Attr* Expr '.' NameRef + +ClosureExpr = + Attr* 'static'? 'async'? 'move'? ParamList RetType? + body:Expr + +IfExpr = + Attr* 'if' condition:Expr then_branch:BlockExpr + ('else' else_branch:(IfExpr | BlockExpr))? + +LoopExpr = + Attr* Label? 'loop' + loop_body:BlockExpr + +ForExpr = + Attr* Label? 'for' Pat 'in' iterable:Expr + loop_body:BlockExpr + +WhileExpr = + Attr* Label? 'while' condition:Expr + loop_body:BlockExpr + +Label = + Lifetime ':' + +BreakExpr = + Attr* 'break' Lifetime? Expr? + +ContinueExpr = + Attr* 'continue' Lifetime? + +RangeExpr = + Attr* start:Expr? op:('..' | '..=') end:Expr? + +MatchExpr = + Attr* 'match' Expr MatchArmList + +MatchArmList = + '{' + Attr* + arms:MatchArm* + '}' + +MatchArm = + Attr* Pat guard:MatchGuard? '=>' Expr ','? + +MatchGuard = + 'if' condition:Expr + +ReturnExpr = + Attr* 'return' Expr? + +YieldExpr = + Attr* 'yield' Expr? + +LetExpr = + Attr* 'let' Pat '=' Expr + +UnderscoreExpr = + Attr* '_' + +AwaitExpr = + Attr* Expr '.' 'await' + +BoxExpr = + Attr* 'box' Expr + +//*************************// +// Types // +//*************************// + +Type = + ArrayType +| DynTraitType +| FnPtrType +| ForType +| ImplTraitType +| InferType +| MacroType +| NeverType +| ParenType +| PathType +| PtrType +| RefType +| SliceType +| TupleType + +ParenType = + '(' Type ')' + +NeverType = + '!' + +MacroType = + MacroCall + +PathType = + Path + +TupleType = + '(' fields:(Type (',' Type)* ','?)? ')' + +PtrType = + '*' ('const' | 'mut') Type + +RefType = + '&' Lifetime? 'mut'? Type + +ArrayType = + '[' Type ';' Expr ']' + +SliceType = + '[' Type ']' + +InferType = + '_' + +FnPtrType = + 'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType? + +ForType = + 'for' GenericParamList Type + +ImplTraitType = + 'impl' TypeBoundList + +DynTraitType = + 'dyn' TypeBoundList + +TypeBoundList = + bounds:(TypeBound ('+' TypeBound)* '+'?) + +TypeBound = + Lifetime +| ('?' | '~' 'const')? Type + +//************************// +// Patterns // +//************************// + +Pat = + IdentPat +| BoxPat +| RestPat +| LiteralPat +| MacroPat +| OrPat +| ParenPat +| PathPat +| WildcardPat +| RangePat +| RecordPat +| RefPat +| SlicePat +| TuplePat +| TupleStructPat +| ConstBlockPat + +LiteralPat = + Literal + +IdentPat = + Attr* 'ref'? 'mut'? Name ('@' Pat)? + +WildcardPat = + '_' + +RangePat = + // 1.. + start:Pat op:('..' | '..=') + // 1..2 + | start:Pat op:('..' | '..=') end:Pat + // ..2 + | op:('..' | '..=') end:Pat + +RefPat = + '&' 'mut'? Pat + +RecordPat = + Path RecordPatFieldList + +RecordPatFieldList = + '{' + fields:(RecordPatField (',' RecordPatField)* ','?)? + RestPat? + '}' + +RecordPatField = + Attr* (NameRef ':')? Pat + +TupleStructPat = + Path '(' fields:(Pat (',' Pat)* ','?)? ')' + +TuplePat = + '(' fields:(Pat (',' Pat)* ','?)? ')' + +ParenPat = + '(' Pat ')' + +SlicePat = + '[' (Pat (',' Pat)* ','?)? ']' + +PathPat = + Path + +OrPat = + (Pat ('|' Pat)* '|'?) + +BoxPat = + 'box' Pat + +RestPat = + Attr* '..' + +MacroPat = + MacroCall + +ConstBlockPat = + 'const' BlockExpr
diff --git a/lib/ungrammar/src/error.rs b/lib/ungrammar/src/error.rs new file mode 100644 index 0000000..144f9fc --- /dev/null +++ b/lib/ungrammar/src/error.rs
@@ -0,0 +1,47 @@ +//! Boilerplate error definitions. +use std::fmt; + +use crate::lexer::Location; + +/// A type alias for std's Result with the Error as our error type. +pub type Result<T, E = Error> = std::result::Result<T, E>; + +/// An error encountered when parsing a Grammar. +#[derive(Debug)] +pub struct Error { + pub(crate) message: String, + pub(crate) location: Option<Location>, +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(loc) = self.location { + // Report 1-based indices, to match text editors + write!(f, "{}:{}: ", loc.line + 1, loc.column + 1)? + } + write!(f, "{}", self.message) + } +} + +impl std::error::Error for Error {} + +impl Error { + pub(crate) fn with_location(self, location: Location) -> Error { + Error { location: Some(location), ..self } + } +} + +macro_rules! _format_err { + ($($tt:tt)*) => { + $crate::error::Error { + message: format!($($tt)*), + location: None, + } + }; +} +pub(crate) use _format_err as format_err; + +macro_rules! _bail { + ($($tt:tt)*) => { return Err($crate::error::format_err!($($tt)*)) }; +} +pub(crate) use _bail as bail;
diff --git a/lib/ungrammar/src/lexer.rs b/lib/ungrammar/src/lexer.rs new file mode 100644 index 0000000..23da09a --- /dev/null +++ b/lib/ungrammar/src/lexer.rs
@@ -0,0 +1,129 @@ +//! Simple hand-written ungrammar lexer +use crate::error::{Result, bail}; + +#[derive(Debug, Eq, PartialEq)] +pub(crate) enum TokenKind { + Node(String), + Token(String), + Eq, + Star, + Pipe, + QMark, + Colon, + LParen, + RParen, +} + +#[derive(Debug)] +pub(crate) struct Token { + pub(crate) kind: TokenKind, + pub(crate) loc: Location, +} + +#[derive(Copy, Clone, Default, Debug)] +pub(crate) struct Location { + pub(crate) line: usize, + pub(crate) column: usize, +} + +impl Location { + fn advance(&mut self, text: &str) { + match text.rfind('\n') { + Some(idx) => { + self.line += text.chars().filter(|&it| it == '\n').count(); + self.column = text[idx + 1..].chars().count(); + } + None => self.column += text.chars().count(), + } + } +} + +pub(crate) fn tokenize(mut input: &str) -> Result<Vec<Token>> { + let mut res = Vec::new(); + let mut loc = Location::default(); + while !input.is_empty() { + let old_input = input; + skip_ws(&mut input); + skip_comment(&mut input); + if old_input.len() == input.len() { + match advance(&mut input) { + Ok(kind) => { + res.push(Token { kind, loc }); + } + Err(err) => return Err(err.with_location(loc)), + } + } + let consumed = old_input.len() - input.len(); + loc.advance(&old_input[..consumed]); + } + + Ok(res) +} + +fn skip_ws(input: &mut &str) { + *input = input.trim_start_matches(is_whitespace) +} +fn skip_comment(input: &mut &str) { + if input.starts_with("//") { + let idx = input.find('\n').map_or(input.len(), |it| it + 1); + *input = &input[idx..] + } +} + +fn advance(input: &mut &str) -> Result<TokenKind> { + let mut chars = input.chars(); + let c = chars.next().unwrap(); + let res = match c { + '=' => TokenKind::Eq, + '*' => TokenKind::Star, + '?' => TokenKind::QMark, + '(' => TokenKind::LParen, + ')' => TokenKind::RParen, + '|' => TokenKind::Pipe, + ':' => TokenKind::Colon, + '\'' => { + let mut buf = String::new(); + loop { + match chars.next() { + None => bail!("unclosed token literal"), + Some('\\') => match chars.next() { + Some(c) if is_escapable(c) => buf.push(c), + _ => bail!("invalid escape in token literal"), + }, + Some('\'') => break, + Some(c) => buf.push(c), + } + } + TokenKind::Token(buf) + } + c if is_ident_char(c) => { + let mut buf = String::new(); + buf.push(c); + loop { + match chars.clone().next() { + Some(c) if is_ident_char(c) => { + chars.next(); + buf.push(c); + } + _ => break, + } + } + TokenKind::Node(buf) + } + '\r' => bail!("unexpected `\\r`, only Unix-style line endings allowed"), + c => bail!("unexpected character: `{}`", c), + }; + + *input = chars.as_str(); + Ok(res) +} + +fn is_escapable(c: char) -> bool { + matches!(c, '\\' | '\'') +} +fn is_whitespace(c: char) -> bool { + matches!(c, ' ' | '\t' | '\n') +} +fn is_ident_char(c: char) -> bool { + matches!(c, 'a'..='z' | 'A'..='Z' | '_') +}
diff --git a/lib/ungrammar/src/lib.rs b/lib/ungrammar/src/lib.rs new file mode 100644 index 0000000..6adf8ef --- /dev/null +++ b/lib/ungrammar/src/lib.rs
@@ -0,0 +1,137 @@ +//! Ungrammar -- a DSL for specifying concrete syntax tree grammar. +//! +//! Producing a parser is an explicit non-goal -- it's ok for this grammar to be +//! ambiguous, non LL, non LR, etc. +//! +//! See this +//! [introductory post](https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html) +//! for details. + +#![deny(missing_debug_implementations)] +#![deny(missing_docs)] +#![deny(rust_2018_idioms)] + +mod error; +mod lexer; +mod parser; + +use std::{ops, str::FromStr}; + +pub use error::{Error, Result}; + +/// Returns a Rust grammar. +pub fn rust_grammar() -> Grammar { + let src = include_str!("../rust.ungram"); + src.parse().unwrap() +} + +/// A node, like `A = 'b' | 'c'`. +/// +/// Indexing into a [`Grammar`] with a [`Node`] returns a reference to a +/// [`NodeData`]. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Node(usize); + +/// A token, denoted with single quotes, like `'+'` or `'struct'`. +/// +/// Indexing into a [`Grammar`] with a [`Token`] returns a reference to a +/// [`TokenData`]. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Token(usize); + +/// An Ungrammar grammar. +#[derive(Default, Debug)] +pub struct Grammar { + nodes: Vec<NodeData>, + tokens: Vec<TokenData>, +} + +impl FromStr for Grammar { + type Err = Error; + fn from_str(s: &str) -> Result<Self> { + let tokens = lexer::tokenize(s)?; + parser::parse(tokens) + } +} + +impl Grammar { + /// Returns an iterator over all nodes in the grammar. + pub fn iter(&self) -> impl Iterator<Item = Node> + '_ { + (0..self.nodes.len()).map(Node) + } + + /// Returns an iterator over all tokens in the grammar. + pub fn tokens(&self) -> impl Iterator<Item = Token> + '_ { + (0..self.tokens.len()).map(Token) + } +} + +impl ops::Index<Node> for Grammar { + type Output = NodeData; + fn index(&self, Node(index): Node) -> &NodeData { + &self.nodes[index] + } +} + +impl ops::Index<Token> for Grammar { + type Output = TokenData; + fn index(&self, Token(index): Token) -> &TokenData { + &self.tokens[index] + } +} + +/// Data about a node. +#[derive(Debug)] +pub struct NodeData { + /// The name of the node. + /// + /// In the rule `A = 'b' | 'c'`, this is `"A"`. + pub name: String, + /// The rule for this node. + /// + /// In the rule `A = 'b' | 'c'`, this represents `'b' | 'c'`. + pub rule: Rule, +} + +/// Data about a token. +#[derive(Debug)] +pub struct TokenData { + /// The name of the token. + pub name: String, +} + +/// A production rule. +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum Rule { + /// A labeled rule, like `a:B` (`"a"` is the label, `B` is the rule). + Labeled { + /// The label. + label: String, + /// The rule. + rule: Box<Rule>, + }, + /// A node, like `A`. + Node(Node), + /// A token, like `'struct'`. + Token(Token), + /// A sequence of rules, like `'while' '(' Expr ')' Stmt`. + Seq(Vec<Rule>), + /// An alternative between many rules, like `'+' | '-' | '*' | '/'`. + Alt(Vec<Rule>), + /// An optional rule, like `A?`. + Opt(Box<Rule>), + /// A repeated rule, like `A*`. + Rep(Box<Rule>), +} + +#[test] +fn smoke() { + let grammar = include_str!("../ungrammar.ungram"); + let grammar = grammar.parse::<Grammar>().unwrap(); + drop(grammar) +} + +#[test] +fn test_rust_grammar() { + let _ = rust_grammar(); +}
diff --git a/lib/ungrammar/src/parser.rs b/lib/ungrammar/src/parser.rs new file mode 100644 index 0000000..69c7702 --- /dev/null +++ b/lib/ungrammar/src/parser.rs
@@ -0,0 +1,207 @@ +//! Simple hand-written ungrammar parser. +#![allow(clippy::disallowed_types)] +use std::collections::HashMap; + +use crate::{ + Grammar, Node, NodeData, Rule, Token, TokenData, + error::{Result, format_err}, + lexer::{self, TokenKind}, +}; + +macro_rules! bail { + ($loc:expr, $($tt:tt)*) => {{ + let err = $crate::error::format_err!($($tt)*) + .with_location($loc); + return Err(err); + }}; +} + +pub(crate) fn parse(tokens: Vec<lexer::Token>) -> Result<Grammar> { + let mut p = Parser::new(tokens); + while !p.is_eof() { + node(&mut p)?; + } + p.finish() +} + +#[derive(Default)] +struct Parser { + grammar: Grammar, + tokens: Vec<lexer::Token>, + node_table: HashMap<String, Node>, + token_table: HashMap<String, Token>, +} + +const DUMMY_RULE: Rule = Rule::Node(Node(!0)); + +impl Parser { + fn new(mut tokens: Vec<lexer::Token>) -> Parser { + tokens.reverse(); + Parser { tokens, ..Parser::default() } + } + + fn peek(&self) -> Option<&lexer::Token> { + self.peek_n(0) + } + fn peek_n(&self, n: usize) -> Option<&lexer::Token> { + self.tokens.iter().nth_back(n) + } + fn bump(&mut self) -> Result<lexer::Token> { + self.tokens.pop().ok_or_else(|| format_err!("unexpected EOF")) + } + fn expect(&mut self, kind: TokenKind, what: &str) -> Result<()> { + let token = self.bump()?; + if token.kind != kind { + bail!(token.loc, "unexpected token, expected `{}`", what); + } + Ok(()) + } + fn is_eof(&self) -> bool { + self.tokens.is_empty() + } + fn finish(self) -> Result<Grammar> { + for node_data in &self.grammar.nodes { + if matches!(node_data.rule, DUMMY_RULE) { + crate::error::bail!("Undefined node: {}", node_data.name) + } + } + Ok(self.grammar) + } + fn intern_node(&mut self, name: String) -> Node { + let len = self.node_table.len(); + let grammar = &mut self.grammar; + *self.node_table.entry(name.clone()).or_insert_with(|| { + grammar.nodes.push(NodeData { name, rule: DUMMY_RULE }); + Node(len) + }) + } + fn intern_token(&mut self, name: String) -> Token { + let len = self.token_table.len(); + let grammar = &mut self.grammar; + *self.token_table.entry(name.clone()).or_insert_with(|| { + grammar.tokens.push(TokenData { name }); + Token(len) + }) + } +} + +fn node(p: &mut Parser) -> Result<()> { + let token = p.bump()?; + let node = match token.kind { + TokenKind::Node(it) => p.intern_node(it), + _ => bail!(token.loc, "expected ident"), + }; + p.expect(TokenKind::Eq, "=")?; + if !matches!(p.grammar[node].rule, DUMMY_RULE) { + bail!(token.loc, "duplicate rule: `{}`", p.grammar[node].name) + } + + let rule = rule(p)?; + p.grammar.nodes[node.0].rule = rule; + Ok(()) +} + +fn rule(p: &mut Parser) -> Result<Rule> { + if let Some(lexer::Token { kind: TokenKind::Pipe, loc }) = p.peek() { + bail!( + *loc, + "The first element in a sequence of productions or alternatives \ + must not have a leading pipe (`|`)" + ); + } + + let lhs = seq_rule(p)?; + let mut alt = vec![lhs]; + while let Some(token) = p.peek() { + if token.kind != TokenKind::Pipe { + break; + } + p.bump()?; + let rule = seq_rule(p)?; + alt.push(rule) + } + let res = if alt.len() == 1 { alt.pop().unwrap() } else { Rule::Alt(alt) }; + Ok(res) +} + +fn seq_rule(p: &mut Parser) -> Result<Rule> { + let lhs = atom_rule(p)?; + + let mut seq = vec![lhs]; + while let Some(rule) = opt_atom_rule(p)? { + seq.push(rule) + } + let res = if seq.len() == 1 { seq.pop().unwrap() } else { Rule::Seq(seq) }; + Ok(res) +} + +fn atom_rule(p: &mut Parser) -> Result<Rule> { + match opt_atom_rule(p)? { + Some(it) => Ok(it), + None => { + let token = p.bump()?; + bail!(token.loc, "unexpected token") + } + } +} + +fn opt_atom_rule(p: &mut Parser) -> Result<Option<Rule>> { + let token = match p.peek() { + Some(it) => it, + None => return Ok(None), + }; + let mut res = match &token.kind { + TokenKind::Node(name) => { + if let Some(lookahead) = p.peek_n(1) { + match lookahead.kind { + TokenKind::Eq => return Ok(None), + TokenKind::Colon => { + let label = name.clone(); + p.bump()?; + p.bump()?; + let rule = atom_rule(p)?; + let res = Rule::Labeled { label, rule: Box::new(rule) }; + return Ok(Some(res)); + } + _ => (), + } + } + match p.peek_n(1) { + Some(token) if token.kind == TokenKind::Eq => return Ok(None), + _ => (), + } + let name = name.clone(); + p.bump()?; + let node = p.intern_node(name); + Rule::Node(node) + } + TokenKind::Token(name) => { + let name = name.clone(); + p.bump()?; + let token = p.intern_token(name); + Rule::Token(token) + } + TokenKind::LParen => { + p.bump()?; + let rule = rule(p)?; + p.expect(TokenKind::RParen, ")")?; + rule + } + _ => return Ok(None), + }; + + if let Some(token) = p.peek() { + match &token.kind { + TokenKind::QMark => { + p.bump()?; + res = Rule::Opt(Box::new(res)); + } + TokenKind::Star => { + p.bump()?; + res = Rule::Rep(Box::new(res)); + } + _ => (), + } + } + Ok(Some(res)) +}
diff --git a/lib/ungrammar/ungrammar.ungram b/lib/ungrammar/ungrammar.ungram new file mode 100644 index 0000000..856a6ce --- /dev/null +++ b/lib/ungrammar/ungrammar.ungram
@@ -0,0 +1,16 @@ +/// ungrammar for ungrammar +Grammar = + Node * + +Node = + name:'ident' '=' Rule + +Rule = + 'ident' +| 'token_ident' +| Rule * +| Rule ( '|' Rule) * +| Rule '?' +| Rule '*' +| '(' Rule ')' +| label:'ident' ':' Rule
diff --git a/lib/ungrammar/ungrammar2json/Cargo.toml b/lib/ungrammar/ungrammar2json/Cargo.toml new file mode 100644 index 0000000..0fa08bb --- /dev/null +++ b/lib/ungrammar/ungrammar2json/Cargo.toml
@@ -0,0 +1,12 @@ +[package] +name = "ungrammar2json" +description = "Convert ungrammar files to JSON" +version = "1.0.0" +license = "MIT OR Apache-2.0" +repository = "https://github.com/matklad/ungrammar" +authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"] +edition = "2024" + +[dependencies] +write-json = "0.1.1" +ungrammar = { path = "../", version = "1.1.0" }
diff --git a/lib/ungrammar/ungrammar2json/src/main.rs b/lib/ungrammar/ungrammar2json/src/main.rs new file mode 100644 index 0000000..3b8588c --- /dev/null +++ b/lib/ungrammar/ungrammar2json/src/main.rs
@@ -0,0 +1,78 @@ +#![allow(clippy::print_stderr, clippy::print_stdout)] +use std::{ + env, + io::{self, Read}, + process, +}; + +use ungrammar::{Grammar, Rule}; + +fn main() { + if let Err(err) = try_main() { + eprintln!("{}", err); + process::exit(101); + } +} + +fn try_main() -> io::Result<()> { + if env::args().count() != 1 { + eprintln!("Usage: ungrammar2json < grammar.ungram > grammar.json"); + return Ok(()); + } + let grammar = read_stdin()?; + let grammar = grammar + .parse::<Grammar>() + .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; + + let mut buf = String::new(); + grammar_to_json(&grammar, write_json::object(&mut buf)); + println!("{}", buf); + Ok(()) +} + +fn read_stdin() -> io::Result<String> { + let mut buf = String::new(); + io::stdin().lock().read_to_string(&mut buf)?; + Ok(buf) +} + +fn grammar_to_json(grammar: &Grammar, mut obj: write_json::Object<'_>) { + for node in grammar.iter() { + let node = &grammar[node]; + rule_to_json(grammar, &node.rule, obj.object(&node.name)); + } +} + +fn rule_to_json(grammar: &Grammar, rule: &Rule, mut obj: write_json::Object<'_>) { + match rule { + Rule::Labeled { label, rule } => { + obj.string("label", label); + rule_to_json(grammar, rule, obj.object("rule")) + } + Rule::Node(node) => { + obj.string("node", &grammar[*node].name); + } + Rule::Token(token) => { + obj.string("token", &grammar[*token].name); + } + Rule::Seq(rules) | Rule::Alt(rules) => { + let tag = match rule { + Rule::Seq(_) => "seq", + Rule::Alt(_) => "alt", + _ => unreachable!(), + }; + let mut array = obj.array(tag); + for rule in rules { + rule_to_json(grammar, rule, array.object()); + } + } + Rule::Opt(arg) | Rule::Rep(arg) => { + let tag = match rule { + Rule::Opt(_) => "opt", + Rule::Rep(_) => "rep", + _ => unreachable!(), + }; + rule_to_json(grammar, arg, obj.object(tag)); + } + } +}
diff --git a/rust-version b/rust-version index 0e89b4a..f545ef4 100644 --- a/rust-version +++ b/rust-version
@@ -1 +1 @@ -c5dabe8cf798123087d094f06417f5a767ca73e8 +6159a44067ebce42b38f062cc7df267a1348e092
diff --git a/xtask/src/codegen/grammar.rs b/xtask/src/codegen/grammar.rs index 9bd87a7..18f6c1f 100644 --- a/xtask/src/codegen/grammar.rs +++ b/xtask/src/codegen/grammar.rs
@@ -706,7 +706,23 @@ } }; - add_preamble(crate::flags::CodegenType::Grammar, reformat(ast.to_string())) + let result = add_preamble(crate::flags::CodegenType::Grammar, reformat(ast.to_string())); + + if let Some(start) = result.find("macro_rules ! T_") + && let Some(macro_end) = result[start..].find("\nimpl ::core::marker::Copy") + { + let macro_section = &result[start..start + macro_end]; + let formatted_macro = macro_section + .replace("T_ { [", "T_ {\n [") + .replace(" ; [", ";\n [") + .replace(" ; }", ";\n}") + .trim_end() + .to_owned() + + "\n"; + return result.replace(macro_section, &formatted_macro); + } + + result } fn to_upper_snake_case(s: &str) -> String {
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs index 40997eb..ebfc7d0 100644 --- a/xtask/src/tidy.rs +++ b/xtask/src/tidy.rs
@@ -127,21 +127,24 @@ } fn check_licenses(sh: &Shell) { - const EXPECTED: [&str; 20] = [ + const EXPECTED: &[&str] = &[ "(MIT OR Apache-2.0) AND Unicode-3.0", "0BSD OR MIT OR Apache-2.0", - "Apache-2.0", + "Apache-2.0 / MIT", "Apache-2.0 OR BSL-1.0", "Apache-2.0 OR MIT", - "Apache-2.0 WITH LLVM-exception", "Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT", + "Apache-2.0 WITH LLVM-exception", + "Apache-2.0", "Apache-2.0/MIT", + "BSD-2-Clause OR Apache-2.0 OR MIT", "CC0-1.0", "ISC", - "MIT", "MIT / Apache-2.0", + "MIT OR Apache-2.0 OR LGPL-2.1-or-later", "MIT OR Apache-2.0", "MIT OR Zlib OR Apache-2.0", + "MIT", "MIT/Apache-2.0", "MPL-2.0", "Unicode-3.0", @@ -159,18 +162,20 @@ .collect::<Vec<_>>(); licenses.sort_unstable(); licenses.dedup(); - if licenses != EXPECTED { + let mut expected = EXPECTED.to_vec(); + expected.sort_unstable(); + if licenses != expected { let mut diff = String::new(); diff.push_str("New Licenses:\n"); for &l in licenses.iter() { - if !EXPECTED.contains(&l) { + if !expected.contains(&l) { diff += &format!(" {l}\n") } } diff.push_str("\nMissing Licenses:\n"); - for l in EXPECTED { + for l in expected { if !licenses.contains(&l) { diff += &format!(" {l}\n") } @@ -178,7 +183,7 @@ panic!("different set of licenses!\n{diff}"); } - assert_eq!(licenses, EXPECTED); + assert_eq!(licenses, expected); } fn check_test_attrs(path: &Path, text: &str) {