Merge pull request #20702 from A4-Tacks/else-not-before-else
Fix `else` completion before else keyword
diff --git a/Cargo.lock b/Cargo.lock
index b70b89e..17dea1b 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -280,35 +280,6 @@
]
[[package]]
-name = "chalk-recursive"
-version = "0.104.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "882959c242558cc686de7ff0aa59860295598d119e84a4b100215f44c3d606c4"
-dependencies = [
- "chalk-derive",
- "chalk-ir",
- "chalk-solve",
- "rustc-hash 1.1.0",
- "tracing",
-]
-
-[[package]]
-name = "chalk-solve"
-version = "0.104.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72860086494ccfa05bbd3779a74babb8ace27da9a0cbabffa1315223c7290927"
-dependencies = [
- "chalk-derive",
- "chalk-ir",
- "ena",
- "indexmap",
- "itertools 0.12.1",
- "petgraph",
- "rustc-hash 1.1.0",
- "tracing",
-]
-
-[[package]]
name = "clap"
version = "4.5.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -575,12 +546,6 @@
]
[[package]]
-name = "fixedbitset"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
-
-[[package]]
name = "flate2"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -706,7 +671,7 @@
"hir-ty",
"indexmap",
"intern",
- "itertools 0.14.0",
+ "itertools",
"ra-ap-rustc_type_ir",
"rustc-hash 2.1.1",
"smallvec",
@@ -739,7 +704,7 @@
"hir-expand",
"indexmap",
"intern",
- "itertools 0.14.0",
+ "itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"mbe",
"query-group-macro",
@@ -773,7 +738,7 @@
"either",
"expect-test",
"intern",
- "itertools 0.14.0",
+ "itertools",
"mbe",
"parser",
"query-group-macro",
@@ -799,8 +764,6 @@
"bitflags 2.9.1",
"chalk-derive",
"chalk-ir",
- "chalk-recursive",
- "chalk-solve",
"cov-mark",
"either",
"ena",
@@ -809,7 +772,7 @@
"hir-expand",
"indexmap",
"intern",
- "itertools 0.14.0",
+ "itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"oorandom",
"project-model",
@@ -949,7 +912,7 @@
"ide-db",
"ide-diagnostics",
"ide-ssr",
- "itertools 0.14.0",
+ "itertools",
"nohash-hasher",
"oorandom",
"profile",
@@ -977,7 +940,7 @@
"expect-test",
"hir",
"ide-db",
- "itertools 0.14.0",
+ "itertools",
"smallvec",
"stdx",
"syntax",
@@ -995,7 +958,7 @@
"expect-test",
"hir",
"ide-db",
- "itertools 0.14.0",
+ "itertools",
"smallvec",
"stdx",
"syntax",
@@ -1018,7 +981,7 @@
"fst",
"hir",
"indexmap",
- "itertools 0.14.0",
+ "itertools",
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr",
"nohash-hasher",
@@ -1049,7 +1012,7 @@
"expect-test",
"hir",
"ide-db",
- "itertools 0.14.0",
+ "itertools",
"paths",
"serde_json",
"stdx",
@@ -1067,7 +1030,7 @@
"expect-test",
"hir",
"ide-db",
- "itertools 0.14.0",
+ "itertools",
"parser",
"syntax",
"test-fixture",
@@ -1148,15 +1111,6 @@
[[package]]
name = "itertools"
-version = "0.12.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
-dependencies = [
- "either",
-]
-
-[[package]]
-name = "itertools"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
@@ -1283,7 +1237,7 @@
"hir-expand",
"ide-db",
"intern",
- "itertools 0.14.0",
+ "itertools",
"proc-macro-api",
"project-model",
"span",
@@ -1641,16 +1595,6 @@
]
[[package]]
-name = "petgraph"
-version = "0.6.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
-dependencies = [
- "fixedbitset",
- "indexmap",
-]
-
-[[package]]
name = "pin-project-lite"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1784,7 +1728,7 @@
"cfg",
"expect-test",
"intern",
- "itertools 0.14.0",
+ "itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"paths",
"rustc-hash 2.1.1",
@@ -1863,9 +1807,9 @@
[[package]]
name = "ra-ap-rustc_abi"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8da95e732b424802b1f043ab4007c78a0fc515ab249587abbea4634bf5fdce9a"
+checksum = "597bb303548ddcca3a2eb05af254508aaf39cf334d4350bb5da51de1eb728859"
dependencies = [
"bitflags 2.9.1",
"ra-ap-rustc_hashes",
@@ -1875,24 +1819,24 @@
[[package]]
name = "ra-ap-rustc_ast_ir"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3838d9d7a3a5cdc511cfb6ad78740ce532f75a2366d3fc3b9853ea1b5c872779"
+checksum = "78982b4e4432ee4b938e47bb5c8f1a5a5a88c27c782f193aefcc12a3250bd2e2"
[[package]]
name = "ra-ap-rustc_hashes"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bdc8995d268d3bb4ece910f575ea5a063d6003e193ec155d15703b65882d53fb"
+checksum = "2f7f33a422f724cc1ab43972cdd76a556b17fc256f301d23be620adfc8351df7"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed0ccdf6e5627c6c3e54e571e52ce0bc8b94d5f0b94b7460269ca68a4706be69"
+checksum = "8a6006023c8be18c3ac225d69c1b42f55b3f597f3db03fb40764b4cf1454fd13"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@@ -1900,9 +1844,9 @@
[[package]]
name = "ra-ap-rustc_index_macros"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd28f42362b5c9fb9b8766c3189df02a402b13363600c6885e11027889f03ee6"
+checksum = "9217c29f7fcc30d07ed13a62262144f665410ef1460202599ae924f9ae47ad78"
dependencies = [
"proc-macro2",
"quote",
@@ -1911,9 +1855,9 @@
[[package]]
name = "ra-ap-rustc_lexer"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1c31a82f091b910a27ee53a86a9af28a2df10c3484e2f1bbfe70633aa84dee9"
+checksum = "573ad4f5da620e8ba1849d8862866abd7bc765c3d81cb2488c3ecbef33ce2c69"
dependencies = [
"memchr",
"unicode-properties",
@@ -1922,9 +1866,9 @@
[[package]]
name = "ra-ap-rustc_next_trait_solver"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f8cac6c2b5a8924209d4ca682cbc507252c58a664911e0ef463c112882ba6f72"
+checksum = "0d42b095b99e988aeb94622ae62ebda4b7de55d7d98846eec352b8a5a2b8a858"
dependencies = [
"derive-where",
"ra-ap-rustc_index",
@@ -1935,9 +1879,9 @@
[[package]]
name = "ra-ap-rustc_parse_format"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a085a1cf902dcca8abbc537faaef154bbccbbb51850f779ce5484ae3782b5d8f"
+checksum = "a21b4e95cb45f840c172493c05f5b9471cf44adb2eccf95d76a0d76e88007870"
dependencies = [
"ra-ap-rustc_lexer",
"rustc-literal-escaper 0.0.5",
@@ -1945,9 +1889,9 @@
[[package]]
name = "ra-ap-rustc_pattern_analysis"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ba32e3985367bc34856b41c7604133649d4a367eb5d7bdf50623025731459d8"
+checksum = "b6aeacef1248066f7b67e7296ef135eeab6446d5d2a5c7f02b8d7b747b41e39b"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.1.1",
@@ -1958,9 +1902,9 @@
[[package]]
name = "ra-ap-rustc_type_ir"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c9911d72f75d85d21fe88374d7bcec94f2200feffb7234108a24cc3da7c3591"
+checksum = "52e35ee9e052406035016b8e6d54ca202bc39ccba1702780b33b2d5fb10d1da8"
dependencies = [
"arrayvec",
"bitflags 2.9.1",
@@ -1978,9 +1922,9 @@
[[package]]
name = "ra-ap-rustc_type_ir_macros"
-version = "0.128.0"
+version = "0.132.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22f539b87991683ce17cc52e62600fdf2b4a8af43952db30387edc1a576d3b43"
+checksum = "9b934c956b0c88df8176803416b69d85d2c392a69c8aa794a4c338f22c527d38"
dependencies = [
"proc-macro2",
"quote",
@@ -2062,7 +2006,7 @@
"ide-ssr",
"indexmap",
"intern",
- "itertools 0.14.0",
+ "itertools",
"load-cargo",
"lsp-server 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types",
@@ -2400,7 +2344,7 @@
"backtrace",
"crossbeam-channel",
"crossbeam-utils",
- "itertools 0.14.0",
+ "itertools",
"jod-thread",
"libc",
"miow",
@@ -2436,7 +2380,7 @@
dependencies = [
"either",
"expect-test",
- "itertools 0.14.0",
+ "itertools",
"parser",
"rayon",
"rowan",
@@ -3285,7 +3229,7 @@
"edition",
"either",
"flate2",
- "itertools 0.14.0",
+ "itertools",
"proc-macro2",
"quote",
"stdx",
diff --git a/Cargo.toml b/Cargo.toml
index c5ffad5..0401367 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -37,9 +37,7 @@
[patch.'crates-io']
# rowan = { path = "../rowan" }
-# chalk-solve = { path = "../chalk/chalk-solve" }
# chalk-ir = { path = "../chalk/chalk-ir" }
-# chalk-recursive = { path = "../chalk/chalk-recursive" }
# chalk-derive = { path = "../chalk/chalk-derive" }
# line-index = { path = "lib/line-index" }
# la-arena = { path = "lib/la-arena" }
@@ -89,14 +87,14 @@
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
-ra-ap-rustc_lexer = { version = "0.128", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.128", default-features = false }
-ra-ap-rustc_index = { version = "0.128", default-features = false }
-ra-ap-rustc_abi = { version = "0.128", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.128", default-features = false }
-ra-ap-rustc_ast_ir = { version = "0.128", default-features = false }
-ra-ap-rustc_type_ir = { version = "0.128", default-features = false }
-ra-ap-rustc_next_trait_solver = { version = "0.128", default-features = false }
+ra-ap-rustc_lexer = { version = "0.132", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.132", default-features = false }
+ra-ap-rustc_index = { version = "0.132", default-features = false }
+ra-ap-rustc_abi = { version = "0.132", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.132", default-features = false }
+ra-ap-rustc_ast_ir = { version = "0.132", default-features = false }
+ra-ap-rustc_type_ir = { version = "0.132", default-features = false }
+ra-ap-rustc_next_trait_solver = { version = "0.132", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@@ -111,9 +109,7 @@
bitflags = "2.9.1"
cargo_metadata = "0.21.0"
camino = "1.1.10"
-chalk-solve = { version = "0.104.0", default-features = false }
chalk-ir = "0.104.0"
-chalk-recursive = { version = "0.104.0", default-features = false }
chalk-derive = "0.104.0"
crossbeam-channel = "0.5.15"
dissimilar = "1.0.10"
diff --git a/crates/hir-def/src/signatures.rs b/crates/hir-def/src/signatures.rs
index bf72faf..4763861 100644
--- a/crates/hir-def/src/signatures.rs
+++ b/crates/hir-def/src/signatures.rs
@@ -489,6 +489,7 @@
const HAS_TARGET_FEATURE = 1 << 9;
const DEPRECATED_SAFE_2024 = 1 << 10;
const EXPLICIT_SAFE = 1 << 11;
+ const RUSTC_INTRINSIC = 1 << 12;
}
}
@@ -522,6 +523,9 @@
if attrs.by_key(sym::target_feature).exists() {
flags.insert(FnFlags::HAS_TARGET_FEATURE);
}
+ if attrs.by_key(sym::rustc_intrinsic).exists() {
+ flags.insert(FnFlags::RUSTC_INTRINSIC);
+ }
let legacy_const_generics_indices = attrs.rustc_legacy_const_generics();
let source = loc.source(db);
@@ -617,6 +621,21 @@
pub fn has_target_feature(&self) -> bool {
self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
}
+
+ pub fn is_intrinsic(db: &dyn DefDatabase, id: FunctionId) -> bool {
+ let data = db.function_signature(id);
+ data.flags.contains(FnFlags::RUSTC_INTRINSIC)
+ // Keep this around for a bit until extern "rustc-intrinsic" abis are no longer used
+ || match &data.abi {
+ Some(abi) => *abi == sym::rust_dash_intrinsic,
+ None => match id.lookup(db).container {
+ ItemContainerId::ExternBlockId(block) => {
+ block.abi(db) == Some(sym::rust_dash_intrinsic)
+ }
+ _ => false,
+ },
+ }
+ }
}
bitflags! {
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index 6418211..138d02e 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -24,9 +24,7 @@
tracing.workspace = true
rustc-hash.workspace = true
scoped-tls = "1.0.1"
-chalk-solve.workspace = true
chalk-ir.workspace = true
-chalk-recursive.workspace = true
chalk-derive.workspace = true
la-arena.workspace = true
triomphe.workspace = true
diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs
index 82696a5..fd60ffc 100644
--- a/crates/hir-ty/src/autoderef.rs
+++ b/crates/hir-ty/src/autoderef.rs
@@ -3,27 +3,28 @@
//! reference to a type with the field `bar`. This is an approximation of the
//! logic in rustc (which lives in rustc_hir_analysis/check/autoderef.rs).
-use std::mem;
+use std::fmt;
-use chalk_ir::cast::Cast;
-use hir_def::lang_item::LangItem;
-use hir_expand::name::Name;
-use intern::sym;
+use hir_def::{TypeAliasId, lang_item::LangItem};
+use rustc_type_ir::inherent::{IntoKind, Ty as _};
+use tracing::debug;
use triomphe::Arc;
+use crate::next_solver::infer::InferOk;
use crate::{
- Canonical, Goal, Interner, ProjectionTyExt, TraitEnvironment, Ty, TyBuilder, TyKind,
- db::HirDatabase, infer::unify::InferenceTable, next_solver::mapping::ChalkToNextSolver,
+ TraitEnvironment,
+ db::HirDatabase,
+ infer::unify::InferenceTable,
+ next_solver::{
+ Ty, TyKind,
+ infer::traits::{ObligationCause, PredicateObligations},
+ mapping::{ChalkToNextSolver, NextSolverToChalk},
+ obligation_ctxt::ObligationCtxt,
+ },
};
const AUTODEREF_RECURSION_LIMIT: usize = 20;
-#[derive(Debug)]
-pub(crate) enum AutoderefKind {
- Builtin,
- Overloaded,
-}
-
/// Returns types that `ty` transitively dereferences to. This function is only meant to be used
/// outside `hir-ty`.
///
@@ -34,16 +35,17 @@
pub fn autoderef(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
- ty: Canonical<Ty>,
-) -> impl Iterator<Item = Ty> {
+ ty: crate::Canonical<crate::Ty>,
+) -> impl Iterator<Item = crate::Ty> {
let mut table = InferenceTable::new(db, env);
+ let interner = table.interner;
let ty = table.instantiate_canonical(ty);
- let mut autoderef = Autoderef::new_no_tracking(&mut table, ty, false, false);
+ let mut autoderef = Autoderef::new_no_tracking(&mut table, ty.to_nextsolver(interner));
let mut v = Vec::new();
while let Some((ty, _steps)) = autoderef.next() {
// `ty` may contain unresolved inference variables. Since there's no chance they would be
// resolved, just replace with fallback type.
- let resolved = autoderef.table.resolve_completely(ty);
+ let resolved = autoderef.table.resolve_completely(ty.to_chalk(interner));
// If the deref chain contains a cycle (e.g. `A` derefs to `B` and `B` derefs to `A`), we
// would revisit some already visited types. Stop here to avoid duplication.
@@ -59,178 +61,267 @@
v.into_iter()
}
-trait TrackAutoderefSteps {
+pub(crate) trait TrackAutoderefSteps<'db>: Default + fmt::Debug {
fn len(&self) -> usize;
- fn push(&mut self, kind: AutoderefKind, ty: &Ty);
+ fn push(&mut self, ty: Ty<'db>, kind: AutoderefKind);
}
-impl TrackAutoderefSteps for usize {
+impl<'db> TrackAutoderefSteps<'db> for usize {
fn len(&self) -> usize {
*self
}
- fn push(&mut self, _: AutoderefKind, _: &Ty) {
+ fn push(&mut self, _: Ty<'db>, _: AutoderefKind) {
*self += 1;
}
}
-impl TrackAutoderefSteps for Vec<(AutoderefKind, Ty)> {
+impl<'db> TrackAutoderefSteps<'db> for Vec<(Ty<'db>, AutoderefKind)> {
fn len(&self) -> usize {
self.len()
}
- fn push(&mut self, kind: AutoderefKind, ty: &Ty) {
- self.push((kind, ty.clone()));
+ fn push(&mut self, ty: Ty<'db>, kind: AutoderefKind) {
+ self.push((ty, kind));
}
}
-#[derive(Debug)]
-pub(crate) struct Autoderef<'table, 'db, T = Vec<(AutoderefKind, Ty)>> {
- pub(crate) table: &'table mut InferenceTable<'db>,
- ty: Ty,
+#[derive(Copy, Clone, Debug)]
+pub(crate) enum AutoderefKind {
+ /// A true pointer type, such as `&T` and `*mut T`.
+ Builtin,
+ /// A type which must dispatch to a `Deref` implementation.
+ Overloaded,
+}
+
+struct AutoderefSnapshot<'db, Steps> {
at_start: bool,
- steps: T,
- explicit: bool,
+ reached_recursion_limit: bool,
+ steps: Steps,
+ cur_ty: Ty<'db>,
+ obligations: PredicateObligations<'db>,
+}
+
+#[derive(Clone, Copy)]
+struct AutoderefTraits {
+ trait_target: TypeAliasId,
+}
+
+/// Recursively dereference a type, considering both built-in
+/// dereferences (`*`) and the `Deref` trait.
+/// Although called `Autoderef` it can be configured to use the
+/// `Receiver` trait instead of the `Deref` trait.
+pub(crate) struct Autoderef<'a, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> {
+ // Meta infos:
+ pub(crate) table: &'a mut InferenceTable<'db>,
+ traits: Option<AutoderefTraits>,
+
+ // Current state:
+ state: AutoderefSnapshot<'db, Steps>,
+
+ // Configurations:
+ include_raw_pointers: bool,
use_receiver_trait: bool,
}
-impl<'table, 'db> Autoderef<'table, 'db> {
- pub(crate) fn new(
- table: &'table mut InferenceTable<'db>,
- ty: Ty,
- explicit: bool,
- use_receiver_trait: bool,
- ) -> Self {
- let ty = table.structurally_resolve_type(&ty);
- Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit, use_receiver_trait }
- }
+impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Iterator for Autoderef<'a, 'db, Steps> {
+ type Item = (Ty<'db>, usize);
- pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] {
- &self.steps
- }
-}
-
-impl<'table, 'db> Autoderef<'table, 'db, usize> {
- pub(crate) fn new_no_tracking(
- table: &'table mut InferenceTable<'db>,
- ty: Ty,
- explicit: bool,
- use_receiver_trait: bool,
- ) -> Self {
- let ty = table.structurally_resolve_type(&ty);
- Autoderef { table, ty, at_start: true, steps: 0, explicit, use_receiver_trait }
- }
-}
-
-#[allow(private_bounds)]
-impl<T: TrackAutoderefSteps> Autoderef<'_, '_, T> {
- pub(crate) fn step_count(&self) -> usize {
- self.steps.len()
- }
-
- pub(crate) fn final_ty(&self) -> Ty {
- self.ty.clone()
- }
-}
-
-impl<T: TrackAutoderefSteps> Iterator for Autoderef<'_, '_, T> {
- type Item = (Ty, usize);
-
- #[tracing::instrument(skip_all)]
fn next(&mut self) -> Option<Self::Item> {
- if mem::take(&mut self.at_start) {
- return Some((self.ty.clone(), 0));
+ debug!("autoderef: steps={:?}, cur_ty={:?}", self.state.steps, self.state.cur_ty);
+ if self.state.at_start {
+ self.state.at_start = false;
+ debug!("autoderef stage #0 is {:?}", self.state.cur_ty);
+ return Some((self.state.cur_ty, 0));
}
- if self.steps.len() > AUTODEREF_RECURSION_LIMIT {
+ // If we have reached the recursion limit, error gracefully.
+ if self.state.steps.len() >= AUTODEREF_RECURSION_LIMIT {
+ self.state.reached_recursion_limit = true;
return None;
}
- let (kind, new_ty) =
- autoderef_step(self.table, self.ty.clone(), self.explicit, self.use_receiver_trait)?;
-
- self.steps.push(kind, &self.ty);
- self.ty = new_ty;
-
- Some((self.ty.clone(), self.step_count()))
- }
-}
-
-pub(crate) fn autoderef_step(
- table: &mut InferenceTable<'_>,
- ty: Ty,
- explicit: bool,
- use_receiver_trait: bool,
-) -> Option<(AutoderefKind, Ty)> {
- if let Some(derefed) = builtin_deref(table.db, &ty, explicit) {
- Some((AutoderefKind::Builtin, table.structurally_resolve_type(derefed)))
- } else {
- Some((AutoderefKind::Overloaded, deref_by_trait(table, ty, use_receiver_trait)?))
- }
-}
-
-pub(crate) fn builtin_deref<'ty>(
- db: &dyn HirDatabase,
- ty: &'ty Ty,
- explicit: bool,
-) -> Option<&'ty Ty> {
- match ty.kind(Interner) {
- TyKind::Ref(.., ty) => Some(ty),
- TyKind::Raw(.., ty) if explicit => Some(ty),
- &TyKind::Adt(chalk_ir::AdtId(adt), ref substs) if crate::lang_items::is_box(db, adt) => {
- substs.at(Interner, 0).ty(Interner)
+ if self.state.cur_ty.is_ty_var() {
+ return None;
}
- _ => None,
- }
-}
-pub(crate) fn deref_by_trait(
- table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>,
- ty: Ty,
- use_receiver_trait: bool,
-) -> Option<Ty> {
- let _p = tracing::info_span!("deref_by_trait").entered();
- if table.structurally_resolve_type(&ty).inference_var(Interner).is_some() {
- // don't try to deref unknown variables
- return None;
- }
-
- let trait_id = || {
- // FIXME: Remove the `false` once `Receiver` needs to be stabilized, doing so will
- // effectively bump the MSRV of rust-analyzer to 1.84 due to 1.83 and below lacking the
- // blanked impl on `Deref`.
- #[expect(clippy::overly_complex_bool_expr)]
- if use_receiver_trait
- && false
- && let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate)
+ // Otherwise, deref if type is derefable:
+ // NOTE: in the case of self.use_receiver_trait = true, you might think it would
+ // be better to skip this clause and use the Overloaded case only, since &T
+ // and &mut T implement Receiver. But built-in derefs apply equally to Receiver
+ // and Deref, and this has benefits for const and the emitted MIR.
+ let (kind, new_ty) = if let Some(ty) =
+ self.state.cur_ty.builtin_deref(self.table.db, self.include_raw_pointers)
{
- return Some(receiver);
- }
- // Old rustc versions might not have `Receiver` trait.
- // Fallback to `Deref` if they don't
- LangItem::Deref.resolve_trait(db, table.trait_env.krate)
- };
- let trait_id = trait_id()?;
- let target =
- trait_id.trait_items(db).associated_type_by_name(&Name::new_symbol_root(sym::Target))?;
-
- let projection = {
- let b = TyBuilder::subst_for_def(db, trait_id, None);
- if b.remaining() != 1 {
- // the Target type + Deref trait should only have one generic parameter,
- // namely Deref's Self type
+ debug_assert_eq!(ty, self.table.infer_ctxt.resolve_vars_if_possible(ty));
+ // NOTE: we may still need to normalize the built-in deref in case
+ // we have some type like `&<Ty as Trait>::Assoc`, since users of
+ // autoderef expect this type to have been structurally normalized.
+ if let TyKind::Alias(..) = ty.kind() {
+ let (normalized_ty, obligations) = structurally_normalize_ty(self.table, ty)?;
+ self.state.obligations.extend(obligations);
+ (AutoderefKind::Builtin, normalized_ty)
+ } else {
+ (AutoderefKind::Builtin, ty)
+ }
+ } else if let Some(ty) = self.overloaded_deref_ty(self.state.cur_ty) {
+ // The overloaded deref check already normalizes the pointee type.
+ (AutoderefKind::Overloaded, ty)
+ } else {
return None;
- }
- let deref_subst = b.push(ty).build();
- TyBuilder::assoc_type_projection(db, target, Some(deref_subst)).build()
- };
+ };
- // Check that the type implements Deref at all
- let trait_ref = projection.trait_ref(db);
- let implements_goal: Goal = trait_ref.cast(Interner);
- if table.try_obligation(implements_goal.clone()).no_solution() {
- return None;
+ self.state.steps.push(self.state.cur_ty, kind);
+ debug!(
+ "autoderef stage #{:?} is {:?} from {:?}",
+ self.step_count(),
+ new_ty,
+ (self.state.cur_ty, kind)
+ );
+ self.state.cur_ty = new_ty;
+
+ Some((self.state.cur_ty, self.step_count()))
+ }
+}
+
+impl<'a, 'db> Autoderef<'a, 'db> {
+ pub(crate) fn new(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self {
+ Self::new_impl(table, base_ty)
+ }
+}
+
+impl<'a, 'db> Autoderef<'a, 'db, usize> {
+ pub(crate) fn new_no_tracking(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self {
+ Self::new_impl(table, base_ty)
+ }
+}
+
+impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> {
+ fn new_impl(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self {
+ Autoderef {
+ state: AutoderefSnapshot {
+ steps: Steps::default(),
+ cur_ty: table.infer_ctxt.resolve_vars_if_possible(base_ty),
+ obligations: PredicateObligations::new(),
+ at_start: true,
+ reached_recursion_limit: false,
+ },
+ table,
+ traits: None,
+ include_raw_pointers: false,
+ use_receiver_trait: false,
+ }
}
- table.register_obligation(implements_goal.to_nextsolver(table.interner));
+ fn autoderef_traits(&mut self) -> Option<AutoderefTraits> {
+ match &mut self.traits {
+ Some(it) => Some(*it),
+ None => {
+ let traits = if self.use_receiver_trait {
+ AutoderefTraits {
+ trait_target: LangItem::ReceiverTarget
+ .resolve_type_alias(self.table.db, self.table.trait_env.krate)
+ .or_else(|| {
+ LangItem::DerefTarget
+ .resolve_type_alias(self.table.db, self.table.trait_env.krate)
+ })?,
+ }
+ } else {
+ AutoderefTraits {
+ trait_target: LangItem::DerefTarget
+ .resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
+ }
+ };
+ Some(*self.traits.insert(traits))
+ }
+ }
+ }
- let result = table.normalize_projection_ty(projection);
- Some(table.structurally_resolve_type(&result))
+ fn overloaded_deref_ty(&mut self, ty: Ty<'db>) -> Option<Ty<'db>> {
+ debug!("overloaded_deref_ty({:?})", ty);
+ let interner = self.table.interner;
+
+ // <ty as Deref>, or whatever the equivalent trait is that we've been asked to walk.
+ let AutoderefTraits { trait_target } = self.autoderef_traits()?;
+
+ let (normalized_ty, obligations) = structurally_normalize_ty(
+ self.table,
+ Ty::new_projection(interner, trait_target.into(), [ty]),
+ )?;
+ debug!("overloaded_deref_ty({:?}) = ({:?}, {:?})", ty, normalized_ty, obligations);
+ self.state.obligations.extend(obligations);
+
+ Some(self.table.infer_ctxt.resolve_vars_if_possible(normalized_ty))
+ }
+
+ /// Returns the final type we ended up with, which may be an unresolved
+ /// inference variable.
+ pub(crate) fn final_ty(&self) -> Ty<'db> {
+ self.state.cur_ty
+ }
+
+ pub(crate) fn step_count(&self) -> usize {
+ self.state.steps.len()
+ }
+
+ pub(crate) fn take_obligations(&mut self) -> PredicateObligations<'db> {
+ std::mem::take(&mut self.state.obligations)
+ }
+
+ pub(crate) fn steps(&self) -> &Steps {
+ &self.state.steps
+ }
+
+ #[expect(dead_code)]
+ pub(crate) fn reached_recursion_limit(&self) -> bool {
+ self.state.reached_recursion_limit
+ }
+
+ /// also dereference through raw pointer types
+ /// e.g., assuming ptr_to_Foo is the type `*const Foo`
+ /// fcx.autoderef(span, ptr_to_Foo) => [*const Foo]
+ /// fcx.autoderef(span, ptr_to_Foo).include_raw_ptrs() => [*const Foo, Foo]
+ pub(crate) fn include_raw_pointers(mut self) -> Self {
+ self.include_raw_pointers = true;
+ self
+ }
+
+ /// Use `core::ops::Receiver` and `core::ops::Receiver::Target` as
+ /// the trait and associated type to iterate, instead of
+ /// `core::ops::Deref` and `core::ops::Deref::Target`
+ pub(crate) fn use_receiver_trait(mut self) -> Self {
+ self.use_receiver_trait = true;
+ self
+ }
+}
+
+fn structurally_normalize_ty<'db>(
+ table: &InferenceTable<'db>,
+ ty: Ty<'db>,
+) -> Option<(Ty<'db>, PredicateObligations<'db>)> {
+ let mut ocx = ObligationCtxt::new(&table.infer_ctxt);
+ let Ok(normalized_ty) =
+ ocx.structurally_normalize_ty(&ObligationCause::misc(), table.param_env, ty)
+ else {
+ // We shouldn't have errors here in the old solver, except for
+ // evaluate/fulfill mismatches, but that's not a reason for an ICE.
+ return None;
+ };
+ let errors = ocx.select_where_possible();
+ if !errors.is_empty() {
+ unreachable!();
+ }
+
+ Some((normalized_ty, ocx.into_pending_obligations()))
+}
+
+pub(crate) fn overloaded_deref_ty<'db>(
+ table: &InferenceTable<'db>,
+ ty: Ty<'db>,
+) -> Option<InferOk<'db, Ty<'db>>> {
+ let interner = table.interner;
+
+ let trait_target = LangItem::DerefTarget.resolve_type_alias(table.db, table.trait_env.krate)?;
+
+ let (normalized_ty, obligations) =
+ structurally_normalize_ty(table, Ty::new_projection(interner, trait_target.into(), [ty]))?;
+
+ Some(InferOk { value: normalized_ty, obligations })
}
diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs
index 8af8fb7..3755175 100644
--- a/crates/hir-ty/src/builder.rs
+++ b/crates/hir-ty/src/builder.rs
@@ -1,16 +1,12 @@
//! `TyBuilder`, a helper for building instances of `Ty` and related types.
-use std::iter;
-
use chalk_ir::{
AdtId, DebruijnIndex, Scalar,
cast::{Cast, CastTo, Caster},
fold::TypeFoldable,
interner::HasInterner,
};
-use hir_def::{
- DefWithBodyId, GenericDefId, GenericParamId, TraitId, TypeAliasId, builtin_type::BuiltinType,
-};
+use hir_def::{GenericDefId, GenericParamId, TraitId, TypeAliasId, builtin_type::BuiltinType};
use smallvec::SmallVec;
use crate::{
@@ -246,47 +242,6 @@
TyBuilder::new((), params, parent_subst)
}
- /// Creates a `TyBuilder` to build `Substitution` for a coroutine defined in `parent`.
- ///
- /// A coroutine's substitution consists of:
- /// - resume type of coroutine
- /// - yield type of coroutine ([`Coroutine::Yield`](std::ops::Coroutine::Yield))
- /// - return type of coroutine ([`Coroutine::Return`](std::ops::Coroutine::Return))
- /// - generic parameters in scope on `parent`
- ///
- /// in this order.
- ///
- /// This method prepopulates the builder with placeholder substitution of `parent`, so you
- /// should only push exactly 3 `GenericArg`s before building.
- pub fn subst_for_coroutine(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> {
- let parent_subst =
- parent.as_generic_def_id(db).map(|p| generics(db, p).placeholder_subst(db));
- // These represent resume type, yield type, and return type of coroutine.
- let params = std::iter::repeat_n(ParamKind::Type, 3).collect();
- TyBuilder::new((), params, parent_subst)
- }
-
- pub fn subst_for_closure(
- db: &dyn HirDatabase,
- parent: DefWithBodyId,
- sig_ty: Ty,
- ) -> Substitution {
- let sig_ty = sig_ty.cast(Interner);
- let self_subst = iter::once(&sig_ty);
- let Some(parent) = parent.as_generic_def_id(db) else {
- return Substitution::from_iter(Interner, self_subst);
- };
- Substitution::from_iter(
- Interner,
- generics(db, parent)
- .placeholder_subst(db)
- .iter(Interner)
- .chain(self_subst)
- .cloned()
- .collect::<Vec<_>>(),
- )
- }
-
pub fn build(self) -> Substitution {
let ((), subst) = self.build_internal();
subst
diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs
index 9f0ea14..1faf9f6 100644
--- a/crates/hir-ty/src/chalk_ext.rs
+++ b/crates/hir-ty/src/chalk_ext.rs
@@ -210,7 +210,7 @@
match self.kind(Interner) {
TyKind::Function(fn_ptr) => Some(CallableSig::from_fn_ptr(fn_ptr)),
TyKind::FnDef(def, parameters) => Some(CallableSig::from_def(db, *def, parameters)),
- TyKind::Closure(.., substs) => ClosureSubst(substs).sig_ty().callable_sig(db),
+ TyKind::Closure(.., substs) => ClosureSubst(substs).sig_ty(db).callable_sig(db),
_ => None,
}
}
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 0aec2b9..448fc4a 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -283,6 +283,14 @@
def: TyDefId,
) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>;
+ /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
+ /// a `StructId` or `EnumVariantId` with a record constructor.
+ #[salsa::invoke(crate::lower_nextsolver::value_ty_query)]
+ fn value_ty_ns<'db>(
+ &'db self,
+ def: ValueTyDefId,
+ ) -> Option<crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>>;
+
#[salsa::invoke(crate::lower_nextsolver::type_for_type_alias_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower_nextsolver::type_for_type_alias_with_diagnostics_cycle_result)]
fn type_for_type_alias_with_diagnostics_ns<'db>(
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 64c4cde..3f04b72 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -14,6 +14,7 @@
};
use span::Edition;
+use crate::utils::TargetFeatureIsSafeInTarget;
use crate::{
InferenceResult, Interner, TargetFeatures, TyExt, TyKind,
db::HirDatabase,
@@ -147,7 +148,7 @@
edition: Edition,
/// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when
/// the target feature is not enabled. This flag encodes that.
- target_feature_is_safe: bool,
+ target_feature_is_safe: TargetFeatureIsSafeInTarget,
}
impl<'db> UnsafeVisitor<'db> {
@@ -167,7 +168,7 @@
let edition = krate.data(db).edition;
let target_feature_is_safe = match &krate.workspace_data(db).target {
Ok(target) => target_feature_is_safe_in_target(target),
- Err(_) => false,
+ Err(_) => TargetFeatureIsSafeInTarget::No,
};
Self {
db,
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 039fa70..519e4b5 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -38,14 +38,16 @@
};
use rustc_hash::FxHashSet;
use rustc_type_ir::{
- AliasTyKind, RegionKind,
- inherent::{AdtDef, IntoKind, SliceLike},
+ AliasTyKind, CoroutineArgsParts, RegionKind,
+ inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike},
};
use smallvec::SmallVec;
use span::Edition;
use stdx::never;
use triomphe::Arc;
+use crate::next_solver::infer::DbInternerInferExt;
+use crate::next_solver::infer::traits::ObligationCause;
use crate::{
AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const, ConstScalar,
ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData,
@@ -789,11 +791,11 @@
) -> Result<(), HirDisplayError> {
let trait_env = TraitEnvironment::empty(f.krate());
let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block);
- let ty = crate::next_solver::project::solve_normalize::normalize(
- interner,
- trait_env.env.to_nextsolver(interner),
- ty,
- );
+ let infcx = interner.infer_ctxt().build(rustc_type_ir::TypingMode::PostAnalysis);
+ let ty = infcx
+ .at(&ObligationCause::new(), trait_env.env.to_nextsolver(interner))
+ .deeply_normalize(ty)
+ .unwrap_or(ty);
render_const_scalar_inner(f, b, memory_map, ty, trait_env)
}
@@ -895,7 +897,7 @@
}
f.write_str("]")
}
- TyKind::Dynamic(_, _, _) => {
+ TyKind::Dynamic(_, _) => {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let ty_id = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
let Ok(t) = memory_map.vtable_ty(ty_id) else {
@@ -1062,7 +1064,7 @@
| TyKind::Bound(_, _)
| TyKind::Infer(_) => f.write_str("<placeholder-or-unknown-type>"),
// The below arms are unreachable, since we handled them in ref case.
- TyKind::Slice(_) | TyKind::Str | TyKind::Dynamic(_, _, _) => f.write_str("<unsized-value>"),
+ TyKind::Slice(_) | TyKind::Str | TyKind::Dynamic(_, _) => f.write_str("<unsized-value>"),
}
}
@@ -1211,7 +1213,7 @@
})
};
let (preds_to_print, has_impl_fn_pred) = match t.kind() {
- TyKind::Dynamic(bounds, region, _) => {
+ TyKind::Dynamic(bounds, region) => {
let render_lifetime = f.render_region(region);
(
bounds.len() + render_lifetime as usize,
@@ -1556,7 +1558,7 @@
}
_ => (),
}
- let sig = ClosureSubst(&substs).sig_ty().callable_sig(db);
+ let sig = ClosureSubst(&substs).sig_ty(db).callable_sig(db);
if let Some(sig) = sig {
let InternedClosure(def, _) = db.lookup_intern_closure(id);
let infer = db.infer(def);
@@ -1696,26 +1698,17 @@
DisplaySourceCodeError::Coroutine,
));
}
- let subst = convert_args_for_result(interner, subst.as_slice());
- let subst = subst.as_slice(Interner);
- let a: Option<SmallVec<[&Ty; 3]>> = subst
- .get(subst.len() - 3..)
- .and_then(|args| args.iter().map(|arg| arg.ty(Interner)).collect());
+ let CoroutineArgsParts { resume_ty, yield_ty, return_ty, .. } =
+ subst.split_coroutine_args();
+ write!(f, "|")?;
+ resume_ty.hir_fmt(f)?;
+ write!(f, "|")?;
- if let Some([resume_ty, yield_ty, ret_ty]) = a.as_deref() {
- write!(f, "|")?;
- resume_ty.hir_fmt(f)?;
- write!(f, "|")?;
+ write!(f, " yields ")?;
+ yield_ty.hir_fmt(f)?;
- write!(f, " yields ")?;
- yield_ty.hir_fmt(f)?;
-
- write!(f, " -> ")?;
- ret_ty.hir_fmt(f)?;
- } else {
- // This *should* be unreachable, but fallback just in case.
- write!(f, "{{coroutine}}")?;
- }
+ write!(f, " -> ")?;
+ return_ty.hir_fmt(f)?;
}
TyKind::CoroutineWitness(..) => write!(f, "{{coroutine witness}}")?,
TyKind::Pat(_, _) => write!(f, "{{pat}}")?,
diff --git a/crates/hir-ty/src/drop.rs b/crates/hir-ty/src/drop.rs
index a7e942d..f5c2f41 100644
--- a/crates/hir-ty/src/drop.rs
+++ b/crates/hir-ty/src/drop.rs
@@ -120,7 +120,7 @@
let env = db.trait_environment_for_body(owner);
captures
.iter()
- .map(|capture| db.has_drop_glue(capture.ty(subst), env.clone()))
+ .map(|capture| db.has_drop_glue(capture.ty(db, subst), env.clone()))
.max()
.unwrap_or(DropGlue::None)
}
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 3d91a25..0171197 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -13,6 +13,7 @@
//! to certain types. To record this, we use the union-find implementation from
//! the `ena` crate, which is extracted from rustc.
+mod autoderef;
pub(crate) mod cast;
pub(crate) mod closure;
mod coerce;
@@ -25,6 +26,7 @@
use std::{cell::OnceCell, convert::identity, iter, ops::Index};
+use base_db::Crate;
use chalk_ir::{
DebruijnIndex, Mutability, Safety, Scalar, TyKind, TypeFlags, Variance,
cast::Cast,
@@ -54,27 +56,30 @@
use stdx::{always, never};
use triomphe::Arc;
-use crate::next_solver::DbInterner;
-use crate::next_solver::mapping::NextSolverToChalk;
+use crate::db::InternedClosureId;
use crate::{
AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, ImplTraitId, ImplTraitIdx,
IncorrectGenericsLenKind, Interner, Lifetime, OpaqueTyId, ParamLoweringMode,
- PathLoweringDiagnostic, ProjectionTy, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
+ PathLoweringDiagnostic, ProjectionTy, Substitution, TargetFeatures, TraitEnvironment, Ty,
+ TyBuilder, TyExt,
db::HirDatabase,
fold_tys,
generics::Generics,
infer::{
- coerce::CoerceMany,
+ coerce::{CoerceMany, DynamicCoerceMany},
diagnostics::{Diagnostics, InferenceTyLoweringContext as TyLoweringContext},
expr::ExprIsRead,
unify::InferenceTable,
},
lower::{ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic},
mir::MirSpan,
- next_solver::{self, mapping::ChalkToNextSolver},
+ next_solver::{
+ self, DbInterner,
+ mapping::{ChalkToNextSolver, NextSolverToChalk},
+ },
static_lifetime, to_assoc_type_id,
traits::FnTrait,
- utils::UnevaluatedConstEvaluatorFolder,
+ utils::{TargetFeatureIsSafeInTarget, UnevaluatedConstEvaluatorFolder},
};
// This lint has a false positive here. See the link below for details.
@@ -86,7 +91,7 @@
pub use unify::{could_unify, could_unify_deeply};
use cast::{CastCheck, CastError};
-pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
+pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
@@ -159,7 +164,7 @@
let mut table = unify::InferenceTable::new(db, trait_env);
let ty_with_vars = table.normalize_associated_types_in(ty);
- table.resolve_obligations_as_possible();
+ table.select_obligations_where_possible();
table.propagate_diverging_flag();
table.resolve_completely(ty_with_vars)
}
@@ -183,18 +188,14 @@
}
}
+// FIXME: Remove this `InferOk`, switch all code to the second one, that uses `Obligation` instead of `Goal`.
#[derive(Debug)]
pub(crate) struct InferOk<'db, T> {
+ #[allow(dead_code)]
value: T,
goals: Vec<next_solver::Goal<'db, next_solver::Predicate<'db>>>,
}
-impl<'db, T> InferOk<'db, T> {
- fn map<U>(self, f: impl FnOnce(T) -> U) -> InferOk<'db, U> {
- InferOk { value: f(self.value), goals: self.goals }
- }
-}
-
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum InferenceTyDiagnosticSource {
/// Diagnostics that come from types in the body.
@@ -378,6 +379,26 @@
}
}
+/// At least for initial deployment, we want to limit two-phase borrows to
+/// only a few specific cases. Right now, those are mostly "things that desugar"
+/// into method calls:
+/// - using `x.some_method()` syntax, where some_method takes `&mut self`,
+/// - using `Foo::some_method(&mut x, ...)` syntax,
+/// - binary assignment operators (`+=`, `-=`, `*=`, etc.).
+///
+/// Anything else should be rejected until generalized two-phase borrow support
+/// is implemented. Right now, dataflow can't handle the general case where there
+/// is more than one use of a mutable borrow, and we don't want to accept too much
+/// new code via two-phase borrows, so we try to limit where we create two-phase
+/// capable mutable borrows.
+/// See #49434 for tracking.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub(crate) enum AllowTwoPhase {
+ // FIXME: We should use this when appropriate.
+ Yes,
+ No,
+}
+
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Adjust {
/// Go from ! to any type.
@@ -393,8 +414,6 @@
/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
/// The target type is `U` in both cases, with the region and mutability
/// being those shared by both the receiver and the returned reference.
-///
-/// Mutability is `None` when we are not sure.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct OverloadedDeref(pub Option<Mutability>);
@@ -656,6 +675,7 @@
/// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext
/// and resolve the path via its methods. This will ensure proper error reporting.
pub(crate) resolver: Resolver<'db>,
+ target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
generic_def: GenericDefId,
generics: OnceCell<Generics>,
table: unify::InferenceTable<'db>,
@@ -673,11 +693,11 @@
/// If `Some`, this stores coercion information for returned
/// expressions. If `None`, this is in a context where return is
/// inappropriate, such as a const expression.
- return_coercion: Option<CoerceMany>,
+ return_coercion: Option<DynamicCoerceMany<'db>>,
/// The resume type and the yield type, respectively, of the coroutine being inferred.
resume_yield_tys: Option<(Ty, Ty)>,
diverges: Diverges,
- breakables: Vec<BreakableContext>,
+ breakables: Vec<BreakableContext<'db>>,
/// Whether we are inside the pattern of a destructuring assignment.
inside_assignment: bool,
@@ -692,21 +712,21 @@
/// We do that because sometimes we truncate projections (when a closure captures
/// both `a.b` and `a.b.c`), and we want to provide accurate spans in this case.
current_capture_span_stack: Vec<MirSpan>,
- current_closure: Option<ClosureId>,
+ current_closure: Option<InternedClosureId>,
/// Stores the list of closure ids that need to be analyzed before this closure. See the
/// comment on `InferenceContext::sort_closures`
- closure_dependencies: FxHashMap<ClosureId, Vec<ClosureId>>,
- deferred_closures: FxHashMap<ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>>,
+ closure_dependencies: FxHashMap<InternedClosureId, Vec<InternedClosureId>>,
+ deferred_closures: FxHashMap<InternedClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>>,
diagnostics: Diagnostics,
}
#[derive(Clone, Debug)]
-struct BreakableContext {
+struct BreakableContext<'db> {
/// Whether this context contains at least one break expression.
may_break: bool,
/// The coercion target of the context.
- coerce: Option<CoerceMany>,
+ coerce: Option<DynamicCoerceMany<'db>>,
/// The optional label of the context.
label: Option<LabelId>,
kind: BreakableKind,
@@ -721,10 +741,10 @@
Border,
}
-fn find_breakable(
- ctxs: &mut [BreakableContext],
+fn find_breakable<'a, 'db>(
+ ctxs: &'a mut [BreakableContext<'db>],
label: Option<LabelId>,
-) -> Option<&mut BreakableContext> {
+) -> Option<&'a mut BreakableContext<'db>> {
let mut ctxs = ctxs
.iter_mut()
.rev()
@@ -735,10 +755,10 @@
}
}
-fn find_continuable(
- ctxs: &mut [BreakableContext],
+fn find_continuable<'a, 'db>(
+ ctxs: &'a mut [BreakableContext<'db>],
label: Option<LabelId>,
-) -> Option<&mut BreakableContext> {
+) -> Option<&'a mut BreakableContext<'db>> {
match label {
Some(_) => find_breakable(ctxs, label).filter(|it| matches!(it.kind, BreakableKind::Loop)),
None => find_breakable(ctxs, label),
@@ -759,6 +779,7 @@
) -> Self {
let trait_env = db.trait_environment_for_body(owner);
InferenceContext {
+ target_features: OnceCell::new(),
generics: OnceCell::new(),
result: InferenceResult::default(),
table: unify::InferenceTable::new(db, trait_env),
@@ -794,18 +815,56 @@
self.generics.get_or_init(|| crate::generics::generics(self.db, self.generic_def))
}
+ #[inline]
+ fn krate(&self) -> Crate {
+ self.resolver.krate()
+ }
+
+ fn target_features<'a>(
+ db: &dyn HirDatabase,
+ target_features: &'a OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
+ owner: DefWithBodyId,
+ krate: Crate,
+ ) -> (&'a TargetFeatures, TargetFeatureIsSafeInTarget) {
+ let (target_features, target_feature_is_safe) = target_features.get_or_init(|| {
+ let target_features = match owner {
+ DefWithBodyId::FunctionId(id) => TargetFeatures::from_attrs(&db.attrs(id.into())),
+ _ => TargetFeatures::default(),
+ };
+ let target_feature_is_safe = match &krate.workspace_data(db).target {
+ Ok(target) => crate::utils::target_feature_is_safe_in_target(target),
+ Err(_) => TargetFeatureIsSafeInTarget::No,
+ };
+ (target_features, target_feature_is_safe)
+ });
+ (target_features, *target_feature_is_safe)
+ }
+
+ #[inline]
+ pub(crate) fn set_tainted_by_errors(&mut self) {
+ self.result.has_errors = true;
+ }
+
// FIXME: This function should be private in module. It is currently only used in the consteval, since we need
// `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
// used this function for another workaround, mention it here. If you really need this function and believe that
// there is no problem in it being `pub(crate)`, remove this comment.
- pub(crate) fn resolve_all(self) -> InferenceResult {
+ pub(crate) fn resolve_all(mut self) -> InferenceResult {
+ self.table.select_obligations_where_possible();
+ self.table.fallback_if_possible();
+
+ // Comment from rustc:
+ // Even though coercion casts provide type hints, we check casts after fallback for
+ // backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
+ let cast_checks = std::mem::take(&mut self.deferred_cast_checks);
+ for mut cast in cast_checks.into_iter() {
+ if let Err(diag) = cast.check(&mut self) {
+ self.diagnostics.push(diag);
+ }
+ }
+
let InferenceContext {
- mut table,
- mut result,
- mut deferred_cast_checks,
- tuple_field_accesses_rev,
- diagnostics,
- ..
+ mut table, mut result, tuple_field_accesses_rev, diagnostics, ..
} = self;
let mut diagnostics = diagnostics.finish();
// Destructure every single field so whenever new fields are added to `InferenceResult` we
@@ -831,31 +890,12 @@
closure_info: _,
mutated_bindings_in_closure: _,
tuple_field_access_types: _,
- coercion_casts,
+ coercion_casts: _,
diagnostics: _,
} = &mut result;
- table.resolve_obligations_as_possible();
- table.fallback_if_possible();
-
- // Comment from rustc:
- // Even though coercion casts provide type hints, we check casts after fallback for
- // backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
- let mut apply_adjustments = |expr, adj: Vec<_>| {
- expr_adjustments.insert(expr, adj.into_boxed_slice());
- };
- let mut set_coercion_cast = |expr| {
- coercion_casts.insert(expr);
- };
- for cast in deferred_cast_checks.iter_mut() {
- if let Err(diag) =
- cast.check(&mut table, &mut apply_adjustments, &mut set_coercion_cast)
- {
- diagnostics.push(diag);
- }
- }
// FIXME resolve obligations as well (use Guidance if necessary)
- table.resolve_obligations_as_possible();
+ table.select_obligations_where_possible();
// make sure diverging type variables are marked as such
table.propagate_diverging_flag();
@@ -1081,7 +1121,8 @@
};
self.return_ty = self.process_user_written_ty(return_ty);
- self.return_coercion = Some(CoerceMany::new(self.return_ty.clone()));
+ self.return_coercion =
+ Some(CoerceMany::new(self.return_ty.to_nextsolver(self.table.interner)));
// Functions might be defining usage sites of TAITs.
// To define an TAITs, that TAIT must appear in the function's signatures.
@@ -1117,8 +1158,12 @@
fold_tys(
t,
|ty, _| {
+ let ty = self.table.structurally_resolve_type(&ty);
let opaque_ty_id = match ty.kind(Interner) {
- TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id,
+ TyKind::OpaqueType(opaque_ty_id, _)
+ | TyKind::Alias(AliasTy::Opaque(crate::OpaqueTy { opaque_ty_id, .. })) => {
+ *opaque_ty_id
+ }
_ => return ty,
};
let (impl_traits, idx) =
@@ -1214,9 +1259,11 @@
ty: &chalk_ir::Ty<Interner>,
outer_binder: DebruijnIndex,
) -> std::ops::ControlFlow<Self::BreakTy> {
- let ty = self.table.resolve_ty_shallow(ty);
+ let ty = self.table.structurally_resolve_type(ty);
- if let TyKind::OpaqueType(id, _) = ty.kind(Interner)
+ if let TyKind::OpaqueType(id, _)
+ | TyKind::Alias(AliasTy::Opaque(crate::OpaqueTy { opaque_ty_id: id, .. })) =
+ ty.kind(Interner)
&& let ImplTraitId::TypeAliasImplTrait(alias_id, _) =
self.db.lookup_intern_impl_trait_id((*id).into())
{
@@ -1361,6 +1408,13 @@
}
}
+ fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[Ty]>) {
+ if adjustments.is_empty() {
+ return;
+ }
+ self.result.pat_adjustments.entry(pat).or_default().extend(adjustments);
+ }
+
fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: Substitution) {
self.result.method_resolutions.insert(expr, (func, subst));
}
@@ -1587,24 +1641,14 @@
self.table.process_remote_user_written_ty(ty)
}
- /// Recurses through the given type, normalizing associated types mentioned
- /// in it by replacing them by type variables and registering obligations to
- /// resolve later. This should be done once for every type we get from some
- /// type annotation (e.g. from a let type annotation, field type or function
- /// call). `make_ty` handles this already, but e.g. for field types we need
- /// to do it as well.
- fn normalize_associated_types_in<T, U>(&mut self, ty: T) -> T
- where
- T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + ChalkToNextSolver<'db, U>,
- U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable<DbInterner<'db>>,
- {
- self.table.normalize_associated_types_in(ty)
- }
-
fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty {
self.table.resolve_ty_shallow(ty)
}
+ fn shallow_resolve(&self, ty: crate::next_solver::Ty<'db>) -> crate::next_solver::Ty<'db> {
+ self.table.shallow_resolve(ty)
+ }
+
fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
}
diff --git a/crates/hir-ty/src/infer/autoderef.rs b/crates/hir-ty/src/infer/autoderef.rs
new file mode 100644
index 0000000..77b1ae6
--- /dev/null
+++ b/crates/hir-ty/src/infer/autoderef.rs
@@ -0,0 +1,54 @@
+//! Autoderef helpers for inference.
+
+use std::iter;
+
+use crate::{
+ Adjust, Adjustment, OverloadedDeref,
+ autoderef::{Autoderef, AutoderefKind},
+ infer::unify::InferenceTable,
+ next_solver::{
+ Ty,
+ infer::{InferOk, traits::PredicateObligations},
+ mapping::NextSolverToChalk,
+ },
+};
+
+impl<'db> InferenceTable<'db> {
+ pub(crate) fn autoderef(&mut self, base_ty: Ty<'db>) -> Autoderef<'_, 'db> {
+ Autoderef::new(self, base_ty)
+ }
+}
+
+impl<'db> Autoderef<'_, 'db> {
+ /// Returns the adjustment steps.
+ pub(crate) fn adjust_steps(mut self) -> Vec<Adjustment> {
+ let infer_ok = self.adjust_steps_as_infer_ok();
+ self.table.register_infer_ok(infer_ok)
+ }
+
+ pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment>> {
+ let steps = self.steps();
+ if steps.is_empty() {
+ return InferOk { obligations: PredicateObligations::new(), value: vec![] };
+ }
+
+ let targets = steps.iter().skip(1).map(|&(ty, _)| ty).chain(iter::once(self.final_ty()));
+ let steps: Vec<_> = steps
+ .iter()
+ .map(|&(_source, kind)| {
+ if let AutoderefKind::Overloaded = kind {
+ Some(OverloadedDeref(Some(chalk_ir::Mutability::Not)))
+ } else {
+ None
+ }
+ })
+ .zip(targets)
+ .map(|(autoderef, target)| Adjustment {
+ kind: Adjust::Deref(autoderef),
+ target: target.to_chalk(self.table.interner),
+ })
+ .collect();
+
+ InferOk { obligations: self.take_obligations(), value: steps }
+ }
+}
diff --git a/crates/hir-ty/src/infer/cast.rs b/crates/hir-ty/src/infer/cast.rs
index bc3ee3c..4cd6144 100644
--- a/crates/hir-ty/src/infer/cast.rs
+++ b/crates/hir-ty/src/infer/cast.rs
@@ -4,12 +4,14 @@
use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags};
use stdx::never;
+use crate::infer::coerce::CoerceNever;
use crate::{
- Adjustment, Binders, DynTy, InferenceDiagnostic, Interner, PlaceholderIndex,
- QuantifiedWhereClauses, Ty, TyExt, TyKind, TypeFlags, WhereClause,
+ Binders, DynTy, InferenceDiagnostic, Interner, PlaceholderIndex, QuantifiedWhereClauses, Ty,
+ TyExt, TyKind, TypeFlags, WhereClause,
db::HirDatabase,
from_chalk_trait_id,
- infer::{coerce::CoerceNever, unify::InferenceTable},
+ infer::{AllowTwoPhase, InferenceContext},
+ next_solver::mapping::ChalkToNextSolver,
};
#[derive(Debug)]
@@ -93,23 +95,25 @@
Self { expr, source_expr, expr_ty, cast_ty }
}
- pub(super) fn check<F, G>(
+ pub(super) fn check(
&mut self,
- table: &mut InferenceTable<'_>,
- apply_adjustments: &mut F,
- set_coercion_cast: &mut G,
- ) -> Result<(), InferenceDiagnostic>
- where
- F: FnMut(ExprId, Vec<Adjustment>),
- G: FnMut(ExprId),
- {
- self.expr_ty = table.eagerly_normalize_and_resolve_shallow_in(self.expr_ty.clone());
- self.cast_ty = table.eagerly_normalize_and_resolve_shallow_in(self.cast_ty.clone());
+ ctx: &mut InferenceContext<'_>,
+ ) -> Result<(), InferenceDiagnostic> {
+ self.expr_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.expr_ty.clone());
+ self.cast_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.cast_ty.clone());
// This should always come first so that we apply the coercion, which impacts infer vars.
- if let Ok((adj, _)) = table.coerce(&self.expr_ty, &self.cast_ty, CoerceNever::Yes) {
- apply_adjustments(self.source_expr, adj);
- set_coercion_cast(self.source_expr);
+ if ctx
+ .coerce(
+ self.source_expr.into(),
+ self.expr_ty.to_nextsolver(ctx.table.interner),
+ self.cast_ty.to_nextsolver(ctx.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ )
+ .is_ok()
+ {
+ ctx.result.coercion_casts.insert(self.source_expr);
return Ok(());
}
@@ -118,7 +122,7 @@
}
if !self.cast_ty.data(Interner).flags.contains(TypeFlags::HAS_TY_INFER)
- && !table.is_sized(&self.cast_ty)
+ && !ctx.table.is_sized(&self.cast_ty)
{
return Err(InferenceDiagnostic::CastToUnsized {
expr: self.expr,
@@ -133,30 +137,31 @@
return Ok(());
}
- self.do_check(table, apply_adjustments)
+ self.do_check(ctx)
.map_err(|e| e.into_diagnostic(self.expr, self.expr_ty.clone(), self.cast_ty.clone()))
}
- fn do_check<F>(
- &self,
- table: &mut InferenceTable<'_>,
- apply_adjustments: &mut F,
- ) -> Result<(), CastError>
- where
- F: FnMut(ExprId, Vec<Adjustment>),
- {
+ fn do_check(&self, ctx: &mut InferenceContext<'_>) -> Result<(), CastError> {
let (t_from, t_cast) = match (
- CastTy::from_ty(table.db, &self.expr_ty),
- CastTy::from_ty(table.db, &self.cast_ty),
+ CastTy::from_ty(ctx.db, &self.expr_ty),
+ CastTy::from_ty(ctx.db, &self.cast_ty),
) {
(Some(t_from), Some(t_cast)) => (t_from, t_cast),
(None, Some(t_cast)) => match self.expr_ty.kind(Interner) {
TyKind::FnDef(..) => {
- let sig = self.expr_ty.callable_sig(table.db).expect("FnDef had no sig");
- let sig = table.eagerly_normalize_and_resolve_shallow_in(sig);
+ let sig = self.expr_ty.callable_sig(ctx.db).expect("FnDef had no sig");
+ let sig = ctx.table.eagerly_normalize_and_resolve_shallow_in(sig);
let fn_ptr = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
- if let Ok((adj, _)) = table.coerce(&self.expr_ty, &fn_ptr, CoerceNever::Yes) {
- apply_adjustments(self.source_expr, adj);
+ if ctx
+ .coerce(
+ self.source_expr.into(),
+ self.expr_ty.to_nextsolver(ctx.table.interner),
+ fn_ptr.to_nextsolver(ctx.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ )
+ .is_ok()
+ {
} else {
return Err(CastError::IllegalCast);
}
@@ -176,11 +181,11 @@
},
// array-ptr-cast
CastTy::Ptr(t, m) => {
- let t = table.eagerly_normalize_and_resolve_shallow_in(t);
- if !table.is_sized(&t) {
+ let t = ctx.table.eagerly_normalize_and_resolve_shallow_in(t);
+ if !ctx.table.is_sized(&t) {
return Err(CastError::IllegalCast);
}
- self.check_ref_cast(table, inner_ty, *mutbl, &t, m, apply_adjustments)
+ self.check_ref_cast(ctx, inner_ty, *mutbl, &t, m)
}
_ => Err(CastError::NonScalar),
};
@@ -202,12 +207,10 @@
}
(CastTy::Int(Int::Bool | Int::CEnum | Int::Char) | CastTy::Float, CastTy::Ptr(..))
| (CastTy::Ptr(..) | CastTy::FnPtr, CastTy::Float) => Err(CastError::IllegalCast),
- (CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => {
- self.check_ptr_ptr_cast(table, &src, &dst)
- }
- (CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(table, &src),
- (CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(table, &dst),
- (CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(table, &dst),
+ (CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => self.check_ptr_ptr_cast(ctx, &src, &dst),
+ (CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(ctx, &src),
+ (CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(ctx, &dst),
+ (CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(ctx, &dst),
(CastTy::Int(Int::CEnum), CastTy::Int(_)) => Ok(()),
(CastTy::Int(Int::Char | Int::Bool), CastTy::Int(_)) => Ok(()),
(CastTy::Int(_) | CastTy::Float, CastTy::Int(_) | CastTy::Float) => Ok(()),
@@ -215,26 +218,30 @@
}
}
- fn check_ref_cast<F>(
+ fn check_ref_cast(
&self,
- table: &mut InferenceTable<'_>,
+ ctx: &mut InferenceContext<'_>,
t_expr: &Ty,
m_expr: Mutability,
t_cast: &Ty,
m_cast: Mutability,
- apply_adjustments: &mut F,
- ) -> Result<(), CastError>
- where
- F: FnMut(ExprId, Vec<Adjustment>),
- {
+ ) -> Result<(), CastError> {
// Mutability order is opposite to rustc. `Mut < Not`
if m_expr <= m_cast
&& let TyKind::Array(ety, _) = t_expr.kind(Interner)
{
// Coerce to a raw pointer so that we generate RawPtr in MIR.
let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner);
- if let Ok((adj, _)) = table.coerce(&self.expr_ty, &array_ptr_type, CoerceNever::Yes) {
- apply_adjustments(self.source_expr, adj);
+ if ctx
+ .coerce(
+ self.source_expr.into(),
+ self.expr_ty.to_nextsolver(ctx.table.interner),
+ array_ptr_type.to_nextsolver(ctx.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ )
+ .is_ok()
+ {
} else {
never!(
"could not cast from reference to array to pointer to array ({:?} to {:?})",
@@ -245,7 +252,16 @@
// This is a less strict condition than rustc's `demand_eqtype`,
// but false negative is better than false positive
- if table.coerce(ety, t_cast, CoerceNever::Yes).is_ok() {
+ if ctx
+ .coerce(
+ self.source_expr.into(),
+ ety.to_nextsolver(ctx.table.interner),
+ t_cast.to_nextsolver(ctx.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ )
+ .is_ok()
+ {
return Ok(());
}
}
@@ -255,12 +271,12 @@
fn check_ptr_ptr_cast(
&self,
- table: &mut InferenceTable<'_>,
+ ctx: &mut InferenceContext<'_>,
src: &Ty,
dst: &Ty,
) -> Result<(), CastError> {
- let src_kind = pointer_kind(src, table).map_err(|_| CastError::Unknown)?;
- let dst_kind = pointer_kind(dst, table).map_err(|_| CastError::Unknown)?;
+ let src_kind = pointer_kind(src, ctx).map_err(|_| CastError::Unknown)?;
+ let dst_kind = pointer_kind(dst, ctx).map_err(|_| CastError::Unknown)?;
match (src_kind, dst_kind) {
(Some(PointerKind::Error), _) | (_, Some(PointerKind::Error)) => Ok(()),
@@ -285,9 +301,9 @@
return Ok(());
}
let src_principal =
- table.db.trait_signature(from_chalk_trait_id(src_principal));
+ ctx.db.trait_signature(from_chalk_trait_id(src_principal));
let dst_principal =
- table.db.trait_signature(from_chalk_trait_id(dst_principal));
+ ctx.db.trait_signature(from_chalk_trait_id(dst_principal));
if src_principal.flags.contains(TraitFlags::AUTO)
&& dst_principal.flags.contains(TraitFlags::AUTO)
{
@@ -306,10 +322,10 @@
fn check_ptr_addr_cast(
&self,
- table: &mut InferenceTable<'_>,
+ ctx: &mut InferenceContext<'_>,
expr_ty: &Ty,
) -> Result<(), CastError> {
- match pointer_kind(expr_ty, table).map_err(|_| CastError::Unknown)? {
+ match pointer_kind(expr_ty, ctx).map_err(|_| CastError::Unknown)? {
// None => Err(CastError::UnknownExprPtrKind),
None => Ok(()),
Some(PointerKind::Error) => Ok(()),
@@ -320,10 +336,10 @@
fn check_addr_ptr_cast(
&self,
- table: &mut InferenceTable<'_>,
+ ctx: &mut InferenceContext<'_>,
cast_ty: &Ty,
) -> Result<(), CastError> {
- match pointer_kind(cast_ty, table).map_err(|_| CastError::Unknown)? {
+ match pointer_kind(cast_ty, ctx).map_err(|_| CastError::Unknown)? {
// None => Err(CastError::UnknownCastPtrKind),
None => Ok(()),
Some(PointerKind::Error) => Ok(()),
@@ -336,10 +352,10 @@
fn check_fptr_ptr_cast(
&self,
- table: &mut InferenceTable<'_>,
+ ctx: &mut InferenceContext<'_>,
cast_ty: &Ty,
) -> Result<(), CastError> {
- match pointer_kind(cast_ty, table).map_err(|_| CastError::Unknown)? {
+ match pointer_kind(cast_ty, ctx).map_err(|_| CastError::Unknown)? {
// None => Err(CastError::UnknownCastPtrKind),
None => Ok(()),
Some(PointerKind::Error) => Ok(()),
@@ -362,10 +378,10 @@
Error,
}
-fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result<Option<PointerKind>, ()> {
- let ty = table.eagerly_normalize_and_resolve_shallow_in(ty.clone());
+fn pointer_kind(ty: &Ty, ctx: &mut InferenceContext<'_>) -> Result<Option<PointerKind>, ()> {
+ let ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(ty.clone());
- if table.is_sized(&ty) {
+ if ctx.table.is_sized(&ty) {
return Ok(Some(PointerKind::Thin));
}
@@ -378,11 +394,11 @@
return Err(());
};
- let struct_data = id.fields(table.db);
+ let struct_data = id.fields(ctx.db);
if let Some((last_field, _)) = struct_data.fields().iter().last() {
let last_field_ty =
- table.db.field_types(id.into())[last_field].clone().substitute(Interner, subst);
- pointer_kind(&last_field_ty, table)
+ ctx.db.field_types(id.into())[last_field].clone().substitute(Interner, subst);
+ pointer_kind(&last_field_ty, ctx)
} else {
Ok(Some(PointerKind::Thin))
}
@@ -390,7 +406,7 @@
TyKind::Tuple(_, subst) => {
match subst.iter(Interner).last().and_then(|arg| arg.ty(Interner)) {
None => Ok(Some(PointerKind::Thin)),
- Some(ty) => pointer_kind(ty, table),
+ Some(ty) => pointer_kind(ty, ctx),
}
}
TyKind::Foreign(_) => Ok(Some(PointerKind::Thin)),
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index fd7e5a6..1d5d8dd 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -1,143 +1,168 @@
//! Inference of closure parameter types based on the closure's expected type.
-use std::{cmp, convert::Infallible, mem, ops::ControlFlow};
+pub(crate) mod analysis;
-use chalk_ir::{
- BoundVar, DebruijnIndex, FnSubst, GenericArg, Mutability, TyKind,
- cast::Cast,
- fold::{FallibleTypeFolder, Shift, TypeFoldable},
- visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
-};
-use either::Either;
+use std::ops::ControlFlow;
+use std::{iter, mem};
+
use hir_def::{
- DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId,
- expr_store::path::Path,
- hir::{
- Array, AsmOperand, BinaryOp, BindingId, CaptureBy, ClosureKind, Expr, ExprId, ExprOrPatId,
- Pat, PatId, Statement, UnaryOp,
- },
- item_tree::FieldsShape,
+ TraitId,
+ hir::{ClosureKind, ExprId, PatId},
lang_item::LangItem,
- resolver::ValueNs,
type_ref::TypeRefId,
};
-use hir_def::{ItemContainerId, Lookup, TraitId};
-use hir_expand::name::Name;
-use intern::sym;
-use rustc_hash::{FxHashMap, FxHashSet};
-use smallvec::{SmallVec, smallvec};
-use stdx::{format_to, never};
-use syntax::utils::is_raw_identifier;
+use rustc_type_ir::{
+ ClosureArgs, ClosureArgsParts, CoroutineArgs, CoroutineArgsParts, Interner, TypeSuperVisitable,
+ TypeVisitable, TypeVisitableExt, TypeVisitor,
+ inherent::{BoundExistentialPredicates, GenericArgs as _, IntoKind, SliceLike, Ty as _},
+};
+use tracing::debug;
+use crate::traits::FnTrait;
use crate::{
- Adjust, Adjustment, AliasEq, AliasTy, Binders, BindingMode, ClosureId, DynTy, DynTyExt, FnAbi,
- FnPointer, FnSig, Interner, OpaqueTy, ProjectionTy, ProjectionTyExt, Substitution, Ty,
- TyBuilder, TyExt, WhereClause,
- db::{HirDatabase, InternedClosure, InternedCoroutine},
- error_lifetime, from_assoc_type_id, from_chalk_trait_id, from_placeholder_idx,
- generics::Generics,
- infer::{BreakableKind, CoerceMany, Diverges, coerce::CoerceNever},
- make_binders,
- mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
- next_solver::mapping::ChalkToNextSolver,
- to_assoc_type_id,
- traits::FnTrait,
- utils::{self, elaborate_clause_supertraits},
+ FnAbi,
+ db::{InternedClosure, InternedCoroutine},
+ infer::{BreakableKind, Diverges, coerce::CoerceMany},
+ next_solver::{
+ AliasTy, Binder, ClauseKind, DbInterner, ErrorGuaranteed, FnSig, GenericArgs, PolyFnSig,
+ PolyProjectionPredicate, Predicate, PredicateKind, SolverDefId, Ty, TyKind,
+ abi::Safety,
+ infer::{
+ BoundRegionConversionTime, DefineOpaqueTypes, InferOk, InferResult,
+ traits::{ObligationCause, PredicateObligations},
+ },
+ mapping::{ChalkToNextSolver, NextSolverToChalk},
+ util::explicit_item_bounds,
+ },
};
use super::{Expectation, InferenceContext};
#[derive(Debug)]
-pub(super) struct ClosureSignature {
- pub(super) ret_ty: Ty,
- pub(super) expected_sig: FnPointer,
+struct ClosureSignatures<'tcx> {
+ /// The signature users of the closure see.
+ bound_sig: PolyFnSig<'tcx>,
+ /// The signature within the function body.
+ /// This mostly differs in the sense that lifetimes are now early bound and any
+ /// opaque types from the signature expectation are overridden in case there are
+ /// explicit hidden types written by the user in the closure signature.
+ liberated_sig: FnSig<'tcx>,
}
impl<'db> InferenceContext<'db> {
pub(super) fn infer_closure(
&mut self,
- body: &ExprId,
+ body: ExprId,
args: &[PatId],
- ret_type: &Option<TypeRefId>,
+ ret_type: Option<TypeRefId>,
arg_types: &[Option<TypeRefId>],
closure_kind: ClosureKind,
tgt_expr: ExprId,
expected: &Expectation,
- ) -> Ty {
+ ) -> crate::Ty {
assert_eq!(args.len(), arg_types.len());
+ let interner = self.table.interner;
let (expected_sig, expected_kind) = match expected.to_option(&mut self.table) {
- Some(expected_ty) => self.deduce_closure_signature(&expected_ty, closure_kind),
+ Some(expected_ty) => {
+ self.deduce_closure_signature(expected_ty.to_nextsolver(interner), closure_kind)
+ }
None => (None, None),
};
- let ClosureSignature { expected_sig: mut bound_sig, ret_ty: body_ret_ty } =
- self.sig_of_closure(body, ret_type, arg_types, closure_kind, expected_sig);
- bound_sig.substitution.0 = self
- .normalize_associated_types_in::<_, crate::next_solver::GenericArgs<'db>>(
- bound_sig.substitution.0,
- );
- let bound_sig = bound_sig;
- let sig_ty = TyKind::Function(bound_sig.clone()).intern(Interner);
+ let ClosureSignatures { bound_sig, liberated_sig } =
+ self.sig_of_closure(arg_types, ret_type, expected_sig);
+ let body_ret_ty = bound_sig.output().skip_binder();
+ let sig_ty = Ty::new_fn_ptr(interner, bound_sig);
+ let parent_args = GenericArgs::identity_for_item(interner, self.generic_def.into());
let (id, ty, resume_yield_tys) = match closure_kind {
ClosureKind::Coroutine(_) => {
- let sig_tys = bound_sig.substitution.0.as_slice(Interner);
- // FIXME: report error when there are more than 1 parameter.
- let resume_ty = match sig_tys.first() {
- // When `sig_tys.len() == 1` the first type is the return type, not the
- // first parameter type.
- Some(ty) if sig_tys.len() > 1 => ty.assert_ty_ref(Interner).clone(),
- _ => self.result.standard_types.unit.clone(),
- };
- let yield_ty = self.table.new_type_var();
+ let yield_ty = self.table.next_ty_var();
+ let resume_ty = liberated_sig
+ .inputs()
+ .get(0)
+ .unwrap_or(self.result.standard_types.unit.to_nextsolver(interner));
- let subst = TyBuilder::subst_for_coroutine(self.db, self.owner)
- .push(resume_ty.clone())
- .push(yield_ty.clone())
- .push(body_ret_ty.clone())
- .build();
+ // FIXME: Infer the upvars later.
+ let parts = CoroutineArgsParts {
+ parent_args,
+ kind_ty: Ty::new_unit(interner),
+ resume_ty,
+ yield_ty,
+ return_ty: body_ret_ty,
+ tupled_upvars_ty: Ty::new_unit(interner),
+ };
let coroutine_id =
self.db.intern_coroutine(InternedCoroutine(self.owner, tgt_expr)).into();
- let coroutine_ty = TyKind::Coroutine(coroutine_id, subst).intern(Interner);
+ let coroutine_ty = Ty::new_coroutine(
+ interner,
+ coroutine_id,
+ CoroutineArgs::new(interner, parts).args,
+ );
- (None, coroutine_ty, Some((resume_ty, yield_ty)))
- }
- ClosureKind::Closure | ClosureKind::Async => {
- let closure_id =
- self.db.intern_closure(InternedClosure(self.owner, tgt_expr)).into();
- let closure_ty = TyKind::Closure(
- closure_id,
- TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()),
+ (
+ None,
+ coroutine_ty,
+ Some((resume_ty.to_chalk(interner), yield_ty.to_chalk(interner))),
)
- .intern(Interner);
+ }
+ // FIXME(next-solver): `ClosureKind::Async` should really be a separate arm that creates a `CoroutineClosure`.
+ // But for now we treat it as a closure.
+ ClosureKind::Closure | ClosureKind::Async => {
+ let closure_id = self.db.intern_closure(InternedClosure(self.owner, tgt_expr));
+ match expected_kind {
+ Some(kind) => {
+ self.result.closure_info.insert(
+ closure_id.into(),
+ (
+ Vec::new(),
+ match kind {
+ rustc_type_ir::ClosureKind::Fn => FnTrait::Fn,
+ rustc_type_ir::ClosureKind::FnMut => FnTrait::FnMut,
+ rustc_type_ir::ClosureKind::FnOnce => FnTrait::FnOnce,
+ },
+ ),
+ );
+ }
+ None => {}
+ };
+ // FIXME: Infer the kind and the upvars later when needed.
+ let parts = ClosureArgsParts {
+ parent_args,
+ closure_kind_ty: Ty::from_closure_kind(
+ interner,
+ expected_kind.unwrap_or(rustc_type_ir::ClosureKind::Fn),
+ ),
+ closure_sig_as_fn_ptr_ty: sig_ty,
+ tupled_upvars_ty: Ty::new_unit(interner),
+ };
+ let closure_ty = Ty::new_closure(
+ interner,
+ closure_id.into(),
+ ClosureArgs::new(interner, parts).args,
+ );
self.deferred_closures.entry(closure_id).or_default();
self.add_current_closure_dependency(closure_id);
(Some(closure_id), closure_ty, None)
}
};
- // Eagerly try to relate the closure type with the expected
- // type, otherwise we often won't have enough information to
- // infer the body.
- self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected, expected_kind);
-
// Now go through the argument patterns
- for (arg_pat, arg_ty) in args.iter().zip(bound_sig.substitution.0.as_slice(Interner).iter())
- {
- self.infer_top_pat(*arg_pat, arg_ty.assert_ty_ref(Interner), None);
+ for (arg_pat, arg_ty) in args.iter().zip(bound_sig.skip_binder().inputs()) {
+ self.infer_top_pat(*arg_pat, &arg_ty.to_chalk(interner), None);
}
// FIXME: lift these out into a struct
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_closure = mem::replace(&mut self.current_closure, id);
- let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty.clone());
+ let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty.to_chalk(interner));
let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(body_ret_ty));
let prev_resume_yield_tys = mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
- this.infer_return(*body);
+ this.infer_return(body);
});
self.diverges = prev_diverges;
@@ -146,176 +171,203 @@
self.current_closure = prev_closure;
self.resume_yield_tys = prev_resume_yield_tys;
- self.table.normalize_associated_types_in(ty)
+ ty.to_chalk(interner)
}
- // This function handles both closures and coroutines.
- pub(super) fn deduce_closure_type_from_expectations(
- &mut self,
- closure_expr: ExprId,
- closure_ty: &Ty,
- sig_ty: &Ty,
- expectation: &Expectation,
- expected_kind: Option<FnTrait>,
- ) {
- let expected_ty = match expectation.to_option(&mut self.table) {
- Some(ty) => ty,
- None => return,
- };
-
- match (closure_ty.kind(Interner), expected_kind) {
- (TyKind::Closure(closure_id, _), Some(closure_kind)) => {
- self.result
- .closure_info
- .entry(*closure_id)
- .or_insert_with(|| (Vec::new(), closure_kind));
- }
- _ => {}
- }
-
- // Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here.
- let _ = self.coerce(Some(closure_expr), closure_ty, &expected_ty, CoerceNever::Yes);
-
- // Coroutines are not Fn* so return early.
- if matches!(closure_ty.kind(Interner), TyKind::Coroutine(..)) {
- return;
- }
-
- // Deduction based on the expected `dyn Fn` is done separately.
- if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner)
- && let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty)
- {
- let expected_sig_ty = TyKind::Function(sig).intern(Interner);
-
- self.unify(sig_ty, &expected_sig_ty);
+ fn fn_trait_kind_from_def_id(&self, trait_id: TraitId) -> Option<rustc_type_ir::ClosureKind> {
+ let lang_item = self.db.lang_attr(trait_id.into())?;
+ match lang_item {
+ LangItem::Fn => Some(rustc_type_ir::ClosureKind::Fn),
+ LangItem::FnMut => Some(rustc_type_ir::ClosureKind::FnMut),
+ LangItem::FnOnce => Some(rustc_type_ir::ClosureKind::FnOnce),
+ _ => None,
}
}
- // Closure kind deductions are mostly from `rustc_hir_typeck/src/closure.rs`.
- // Might need to port closure sig deductions too.
- pub(super) fn deduce_closure_signature(
+ fn async_fn_trait_kind_from_def_id(
+ &self,
+ trait_id: TraitId,
+ ) -> Option<rustc_type_ir::ClosureKind> {
+ let lang_item = self.db.lang_attr(trait_id.into())?;
+ match lang_item {
+ LangItem::AsyncFn => Some(rustc_type_ir::ClosureKind::Fn),
+ LangItem::AsyncFnMut => Some(rustc_type_ir::ClosureKind::FnMut),
+ LangItem::AsyncFnOnce => Some(rustc_type_ir::ClosureKind::FnOnce),
+ _ => None,
+ }
+ }
+
+ /// Given the expected type, figures out what it can about this closure we
+ /// are about to type check:
+ fn deduce_closure_signature(
&mut self,
- expected_ty: &Ty,
+ expected_ty: Ty<'db>,
closure_kind: ClosureKind,
- ) -> (Option<FnSubst<Interner>>, Option<FnTrait>) {
- match expected_ty.kind(Interner) {
- TyKind::Alias(AliasTy::Opaque(OpaqueTy { .. })) | TyKind::OpaqueType(..) => {
- let clauses = expected_ty.impl_trait_bounds(self.db).into_iter().flatten().map(
- |b: chalk_ir::Binders<chalk_ir::WhereClause<Interner>>| {
- b.into_value_and_skipped_binders().0
- },
- );
- self.deduce_closure_kind_from_predicate_clauses(expected_ty, clauses, closure_kind)
- }
- TyKind::Dyn(dyn_ty) => {
- let sig =
- dyn_ty.bounds.skip_binders().as_slice(Interner).iter().find_map(|bound| {
- if let WhereClause::AliasEq(AliasEq {
- alias: AliasTy::Projection(projection_ty),
- ty: projected_ty,
- }) = bound.skip_binders()
- && let Some(sig) = self.deduce_sig_from_projection(
- closure_kind,
- projection_ty,
- projected_ty,
- )
- {
- return Some(sig);
- }
- None
- });
-
- let kind = dyn_ty.principal().and_then(|principal_trait_ref| {
- self.fn_trait_kind_from_trait_id(from_chalk_trait_id(
- principal_trait_ref.skip_binders().skip_binders().trait_id,
- ))
+ ) -> (Option<PolyFnSig<'db>>, Option<rustc_type_ir::ClosureKind>) {
+ match expected_ty.kind() {
+ TyKind::Alias(rustc_type_ir::Opaque, AliasTy { def_id, args, .. }) => self
+ .deduce_closure_signature_from_predicates(
+ expected_ty,
+ closure_kind,
+ explicit_item_bounds(self.table.interner, def_id)
+ .iter_instantiated(self.table.interner, args)
+ .map(|clause| clause.as_predicate()),
+ ),
+ TyKind::Dynamic(object_type, ..) => {
+ let sig = object_type.projection_bounds().into_iter().find_map(|pb| {
+ let pb =
+ pb.with_self_ty(self.table.interner, Ty::new_unit(self.table.interner));
+ self.deduce_sig_from_projection(closure_kind, pb)
});
-
+ let kind = object_type
+ .principal_def_id()
+ .and_then(|did| self.fn_trait_kind_from_def_id(did.0));
(sig, kind)
}
- TyKind::InferenceVar(ty, chalk_ir::TyVariableKind::General) => {
- let clauses = self.clauses_for_self_ty(*ty);
- self.deduce_closure_kind_from_predicate_clauses(
- expected_ty,
- clauses.into_iter(),
+ TyKind::Infer(rustc_type_ir::TyVar(vid)) => self
+ .deduce_closure_signature_from_predicates(
+ Ty::new_var(self.table.interner, self.table.infer_ctxt.root_var(vid)),
closure_kind,
- )
- }
- TyKind::Function(fn_ptr) => match closure_kind {
- ClosureKind::Closure => (Some(fn_ptr.substitution.clone()), Some(FnTrait::Fn)),
- ClosureKind::Async | ClosureKind::Coroutine(_) => (None, None),
+ self.table.obligations_for_self_ty(vid).into_iter().map(|obl| obl.predicate),
+ ),
+ TyKind::FnPtr(sig_tys, hdr) => match closure_kind {
+ ClosureKind::Closure => {
+ let expected_sig = sig_tys.with(hdr);
+ (Some(expected_sig), Some(rustc_type_ir::ClosureKind::Fn))
+ }
+ ClosureKind::Coroutine(_) | ClosureKind::Async => (None, None),
},
_ => (None, None),
}
}
- fn deduce_closure_kind_from_predicate_clauses(
+ fn deduce_closure_signature_from_predicates(
&mut self,
- expected_ty: &Ty,
- clauses: impl DoubleEndedIterator<Item = WhereClause>,
+ expected_ty: Ty<'db>,
closure_kind: ClosureKind,
- ) -> (Option<FnSubst<Interner>>, Option<FnTrait>) {
+ predicates: impl DoubleEndedIterator<Item = Predicate<'db>>,
+ ) -> (Option<PolyFnSig<'db>>, Option<rustc_type_ir::ClosureKind>) {
let mut expected_sig = None;
let mut expected_kind = None;
- for clause in elaborate_clause_supertraits(self.db, clauses.rev()) {
+ for pred in rustc_type_ir::elaborate::elaborate(
+ self.table.interner,
+ // Reverse the obligations here, since `elaborate_*` uses a stack,
+ // and we want to keep inference generally in the same order of
+ // the registered obligations.
+ predicates.rev(),
+ )
+ // We only care about self bounds
+ .filter_only_self()
+ {
+ debug!(?pred);
+ let bound_predicate = pred.kind();
+
+ // Given a Projection predicate, we can potentially infer
+ // the complete signature.
if expected_sig.is_none()
- && let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
- &clause
+ && let PredicateKind::Clause(ClauseKind::Projection(proj_predicate)) =
+ bound_predicate.skip_binder()
{
- let inferred_sig = self.deduce_sig_from_projection(closure_kind, projection, ty);
+ let inferred_sig = self.deduce_sig_from_projection(
+ closure_kind,
+ bound_predicate.rebind(proj_predicate),
+ );
+
// Make sure that we didn't infer a signature that mentions itself.
// This can happen when we elaborate certain supertrait bounds that
- // mention projections containing the `Self` type. See rust-lang/rust#105401.
- struct MentionsTy<'a> {
- expected_ty: &'a Ty,
+ // mention projections containing the `Self` type. See #105401.
+ struct MentionsTy<'db> {
+ expected_ty: Ty<'db>,
}
- impl TypeVisitor<Interner> for MentionsTy<'_> {
- type BreakTy = ();
+ impl<'db> TypeVisitor<DbInterner<'db>> for MentionsTy<'db> {
+ type Result = ControlFlow<()>;
- fn interner(&self) -> Interner {
- Interner
- }
-
- fn as_dyn(
- &mut self,
- ) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy>
- {
- self
- }
-
- fn visit_ty(&mut self, t: &Ty, db: chalk_ir::DebruijnIndex) -> ControlFlow<()> {
+ fn visit_ty(&mut self, t: Ty<'db>) -> Self::Result {
if t == self.expected_ty {
ControlFlow::Break(())
} else {
- t.super_visit_with(self, db)
+ t.super_visit_with(self)
}
}
}
- if inferred_sig
- .visit_with(&mut MentionsTy { expected_ty }, chalk_ir::DebruijnIndex::INNERMOST)
- .is_continue()
- {
+
+ // Don't infer a closure signature from a goal that names the closure type as this will
+ // (almost always) lead to occurs check errors later in type checking.
+ if let Some(inferred_sig) = inferred_sig {
+ // In the new solver it is difficult to explicitly normalize the inferred signature as we
+ // would have to manually handle universes and rewriting bound vars and placeholders back
+ // and forth.
+ //
+ // Instead we take advantage of the fact that we relating an inference variable with an alias
+ // will only instantiate the variable if the alias is rigid(*not quite). Concretely we:
+ // - Create some new variable `?sig`
+ // - Equate `?sig` with the unnormalized signature, e.g. `fn(<Foo<?x> as Trait>::Assoc)`
+ // - Depending on whether `<Foo<?x> as Trait>::Assoc` is rigid, ambiguous or normalizeable,
+ // we will either wind up with `?sig=<Foo<?x> as Trait>::Assoc/?y/ConcreteTy` respectively.
+ //
+ // *: In cases where there are ambiguous aliases in the signature that make use of bound vars
+ // they will wind up present in `?sig` even though they are non-rigid.
+ //
+ // This is a bit weird and means we may wind up discarding the goal due to it naming `expected_ty`
+ // even though the normalized form may not name `expected_ty`. However, this matches the existing
+ // behaviour of the old solver and would be technically a breaking change to fix.
+ let generalized_fnptr_sig = self.table.next_ty_var();
+ let inferred_fnptr_sig = Ty::new_fn_ptr(self.table.interner, inferred_sig);
+ // FIXME: Report diagnostics.
+ _ = self
+ .table
+ .infer_ctxt
+ .at(&ObligationCause::new(), self.table.param_env)
+ .eq(DefineOpaqueTypes::Yes, inferred_fnptr_sig, generalized_fnptr_sig)
+ .map(|infer_ok| self.table.register_infer_ok(infer_ok));
+
+ let resolved_sig =
+ self.table.infer_ctxt.resolve_vars_if_possible(generalized_fnptr_sig);
+
+ if resolved_sig.visit_with(&mut MentionsTy { expected_ty }).is_continue() {
+ expected_sig = Some(resolved_sig.fn_sig(self.table.interner));
+ }
+ } else if inferred_sig.visit_with(&mut MentionsTy { expected_ty }).is_continue() {
expected_sig = inferred_sig;
}
}
- let trait_id = match clause {
- WhereClause::AliasEq(AliasEq {
- alias: AliasTy::Projection(projection), ..
- }) => projection.trait_(self.db),
- WhereClause::Implemented(trait_ref) => from_chalk_trait_id(trait_ref.trait_id),
- _ => continue,
+ // Even if we can't infer the full signature, we may be able to
+ // infer the kind. This can occur when we elaborate a predicate
+ // like `F : Fn<A>`. Note that due to subtyping we could encounter
+ // many viable options, so pick the most restrictive.
+ let trait_def_id = match bound_predicate.skip_binder() {
+ PredicateKind::Clause(ClauseKind::Projection(data)) => {
+ Some(data.projection_term.trait_def_id(self.table.interner).0)
+ }
+ PredicateKind::Clause(ClauseKind::Trait(data)) => Some(data.def_id().0),
+ _ => None,
};
- if let Some(closure_kind) = self.fn_trait_kind_from_trait_id(trait_id) {
- // always use the closure kind that is more permissive.
- match (expected_kind, closure_kind) {
- (None, _) => expected_kind = Some(closure_kind),
- (Some(FnTrait::FnMut), FnTrait::Fn) => expected_kind = Some(FnTrait::Fn),
- (Some(FnTrait::FnOnce), FnTrait::Fn | FnTrait::FnMut) => {
- expected_kind = Some(closure_kind)
+
+ if let Some(trait_def_id) = trait_def_id {
+ let found_kind = match closure_kind {
+ ClosureKind::Closure => self.fn_trait_kind_from_def_id(trait_def_id),
+ ClosureKind::Async => self
+ .async_fn_trait_kind_from_def_id(trait_def_id)
+ .or_else(|| self.fn_trait_kind_from_def_id(trait_def_id)),
+ _ => None,
+ };
+
+ if let Some(found_kind) = found_kind {
+ // always use the closure kind that is more permissive.
+ match (expected_kind, found_kind) {
+ (None, _) => expected_kind = Some(found_kind),
+ (
+ Some(rustc_type_ir::ClosureKind::FnMut),
+ rustc_type_ir::ClosureKind::Fn,
+ ) => expected_kind = Some(rustc_type_ir::ClosureKind::Fn),
+ (
+ Some(rustc_type_ir::ClosureKind::FnOnce),
+ rustc_type_ir::ClosureKind::Fn | rustc_type_ir::ClosureKind::FnMut,
+ ) => expected_kind = Some(found_kind),
+ _ => {}
}
- _ => {}
}
}
}
@@ -323,1519 +375,440 @@
(expected_sig, expected_kind)
}
- fn deduce_sig_from_dyn_ty(&self, dyn_ty: &DynTy) -> Option<FnPointer> {
- // Search for a predicate like `<$self as FnX<Args>>::Output == Ret`
-
- let fn_traits: SmallVec<[TraitId; 3]> =
- utils::fn_traits(self.db, self.owner.module(self.db).krate()).collect();
-
- let self_ty = self.result.standard_types.unknown.clone();
- let bounds = dyn_ty.bounds.clone().substitute(Interner, &[self_ty.cast(Interner)]);
- for bound in bounds.iter(Interner) {
- // NOTE(skip_binders): the extracted types are rebound by the returned `FnPointer`
- if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
- bound.skip_binders()
- {
- let trait_ =
- match from_assoc_type_id(projection.associated_ty_id).lookup(self.db).container
- {
- ItemContainerId::TraitId(t) => t,
- _ => panic!("associated type not in trait"),
- };
- if !fn_traits.contains(&trait_) {
- return None;
- }
-
- // Skip `Self`, get the type argument.
- let arg = projection.substitution.as_slice(Interner).get(1)?;
- if let Some(subst) = arg.ty(Interner)?.as_tuple() {
- let generic_args = subst.as_slice(Interner);
- let mut sig_tys = Vec::with_capacity(generic_args.len() + 1);
- for arg in generic_args {
- sig_tys.push(arg.ty(Interner)?.clone());
- }
- sig_tys.push(ty.clone());
-
- cov_mark::hit!(dyn_fn_param_informs_call_site_closure_signature);
- return Some(FnPointer {
- num_binders: bound.len(Interner),
- sig: FnSig {
- abi: FnAbi::RustCall,
- safety: chalk_ir::Safety::Safe,
- variadic: false,
- },
- substitution: FnSubst(Substitution::from_iter(Interner, sig_tys)),
- });
- }
- }
- }
-
- None
- }
-
+ /// Given a projection like "<F as Fn(X)>::Result == Y", we can deduce
+ /// everything we need to know about a closure or coroutine.
+ ///
+ /// The `cause_span` should be the span that caused us to
+ /// have this expected signature, or `None` if we can't readily
+ /// know that.
fn deduce_sig_from_projection(
&mut self,
closure_kind: ClosureKind,
- projection_ty: &ProjectionTy,
- projected_ty: &Ty,
- ) -> Option<FnSubst<Interner>> {
- let container =
- from_assoc_type_id(projection_ty.associated_ty_id).lookup(self.db).container;
- let trait_ = match container {
- hir_def::ItemContainerId::TraitId(trait_) => trait_,
- _ => return None,
- };
+ projection: PolyProjectionPredicate<'db>,
+ ) -> Option<PolyFnSig<'db>> {
+ let SolverDefId::TypeAliasId(def_id) = projection.item_def_id() else { unreachable!() };
+ let lang_item = self.db.lang_attr(def_id.into());
// For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits,
// for closures and async closures, respectively.
- let fn_trait_kind = self.fn_trait_kind_from_trait_id(trait_)?;
- if !matches!(closure_kind, ClosureKind::Closure | ClosureKind::Async) {
- return None;
- }
- if fn_trait_kind.is_async() {
- // If the expected trait is `AsyncFn(...) -> X`, we don't know what the return type is,
- // but we do know it must implement `Future<Output = X>`.
- self.extract_async_fn_sig_from_projection(projection_ty, projected_ty)
- } else {
- self.extract_sig_from_projection(projection_ty, projected_ty)
+ match closure_kind {
+ ClosureKind::Closure if lang_item == Some(LangItem::FnOnceOutput) => {
+ self.extract_sig_from_projection(projection)
+ }
+ ClosureKind::Async if lang_item == Some(LangItem::AsyncFnOnceOutput) => {
+ self.extract_sig_from_projection(projection)
+ }
+ // It's possible we've passed the closure to a (somewhat out-of-fashion)
+ // `F: FnOnce() -> Fut, Fut: Future<Output = T>` style bound. Let's still
+ // guide inference here, since it's beneficial for the user.
+ ClosureKind::Async if lang_item == Some(LangItem::FnOnceOutput) => {
+ self.extract_sig_from_projection_and_future_bound(projection)
+ }
+ _ => None,
}
}
+ /// Given an `FnOnce::Output` or `AsyncFn::Output` projection, extract the args
+ /// and return type to infer a [`ty::PolyFnSig`] for the closure.
fn extract_sig_from_projection(
&self,
- projection_ty: &ProjectionTy,
- projected_ty: &Ty,
- ) -> Option<FnSubst<Interner>> {
- let arg_param_ty = projection_ty.substitution.as_slice(Interner)[1].assert_ty_ref(Interner);
+ projection: PolyProjectionPredicate<'db>,
+ ) -> Option<PolyFnSig<'db>> {
+ let projection = self.table.infer_ctxt.resolve_vars_if_possible(projection);
- let TyKind::Tuple(_, input_tys) = arg_param_ty.kind(Interner) else {
+ let arg_param_ty = projection.skip_binder().projection_term.args.type_at(1);
+ debug!(?arg_param_ty);
+
+ let TyKind::Tuple(input_tys) = arg_param_ty.kind() else {
return None;
};
- let ret_param_ty = projected_ty;
+ // Since this is a return parameter type it is safe to unwrap.
+ let ret_param_ty = projection.skip_binder().term.expect_type();
+ debug!(?ret_param_ty);
- Some(FnSubst(Substitution::from_iter(
- Interner,
- input_tys.iter(Interner).map(|t| t.cast(Interner)).chain(Some(GenericArg::new(
- Interner,
- chalk_ir::GenericArgData::Ty(ret_param_ty.clone()),
- ))),
- )))
+ let sig = projection.rebind(self.table.interner.mk_fn_sig(
+ input_tys,
+ ret_param_ty,
+ false,
+ Safety::Safe,
+ FnAbi::Rust,
+ ));
+
+ Some(sig)
}
- fn extract_async_fn_sig_from_projection(
+ /// When an async closure is passed to a function that has a "two-part" `Fn`
+ /// and `Future` trait bound, like:
+ ///
+ /// ```rust
+ /// use std::future::Future;
+ ///
+ /// fn not_exactly_an_async_closure<F, Fut>(_f: F)
+ /// where
+ /// F: FnOnce(String, u32) -> Fut,
+ /// Fut: Future<Output = i32>,
+ /// {}
+ /// ```
+ ///
+ /// The we want to be able to extract the signature to guide inference in the async
+ /// closure. We will have two projection predicates registered in this case. First,
+ /// we identify the `FnOnce<Args, Output = ?Fut>` bound, and if the output type is
+ /// an inference variable `?Fut`, we check if that is bounded by a `Future<Output = Ty>`
+ /// projection.
+ ///
+ /// This function is actually best-effort with the return type; if we don't find a
+ /// `Future` projection, we still will return arguments that we extracted from the `FnOnce`
+ /// projection, and the output will be an unconstrained type variable instead.
+ fn extract_sig_from_projection_and_future_bound(
&mut self,
- projection_ty: &ProjectionTy,
- projected_ty: &Ty,
- ) -> Option<FnSubst<Interner>> {
- let arg_param_ty = projection_ty.substitution.as_slice(Interner)[1].assert_ty_ref(Interner);
+ projection: PolyProjectionPredicate<'db>,
+ ) -> Option<PolyFnSig<'db>> {
+ let projection = self.table.infer_ctxt.resolve_vars_if_possible(projection);
- let TyKind::Tuple(_, input_tys) = arg_param_ty.kind(Interner) else {
+ let arg_param_ty = projection.skip_binder().projection_term.args.type_at(1);
+ debug!(?arg_param_ty);
+
+ let TyKind::Tuple(input_tys) = arg_param_ty.kind() else {
return None;
};
- let ret_param_future_output = projected_ty;
- let ret_param_future = self.table.new_type_var();
- let future_output =
- LangItem::FutureOutput.resolve_type_alias(self.db, self.resolver.krate())?;
- let future_projection = crate::AliasTy::Projection(crate::ProjectionTy {
- associated_ty_id: to_assoc_type_id(future_output),
- substitution: Substitution::from1(Interner, ret_param_future.clone()),
- });
- let goal: crate::Goal =
- crate::AliasEq { alias: future_projection, ty: ret_param_future_output.clone() }
- .cast(Interner);
- self.table.register_obligation(goal.to_nextsolver(self.table.interner));
-
- Some(FnSubst(Substitution::from_iter(
- Interner,
- input_tys.iter(Interner).map(|t| t.cast(Interner)).chain(Some(GenericArg::new(
- Interner,
- chalk_ir::GenericArgData::Ty(ret_param_future),
- ))),
- )))
- }
-
- fn fn_trait_kind_from_trait_id(&self, trait_id: hir_def::TraitId) -> Option<FnTrait> {
- FnTrait::from_lang_item(self.db.lang_attr(trait_id.into())?)
- }
-
- fn supplied_sig_of_closure(
- &mut self,
- body: &ExprId,
- ret_type: &Option<TypeRefId>,
- arg_types: &[Option<TypeRefId>],
- closure_kind: ClosureKind,
- ) -> ClosureSignature {
- let mut sig_tys = Vec::with_capacity(arg_types.len() + 1);
-
- // collect explicitly written argument types
- for arg_type in arg_types.iter() {
- let arg_ty = match arg_type {
- // FIXME: I think rustc actually lowers closure params with `LifetimeElisionKind::AnonymousCreateParameter`
- // (but the return type with infer).
- Some(type_ref) => self.make_body_ty(*type_ref),
- None => self.table.new_type_var(),
- };
- sig_tys.push(arg_ty);
- }
-
- // add return type
- let ret_ty = match ret_type {
- Some(type_ref) => self.make_body_ty(*type_ref),
- None => self.table.new_type_var(),
- };
- if let ClosureKind::Async = closure_kind {
- sig_tys.push(self.lower_async_block_type_impl_trait(ret_ty.clone(), *body));
- } else {
- sig_tys.push(ret_ty.clone());
- }
-
- let expected_sig = FnPointer {
- num_binders: 0,
- sig: FnSig { abi: FnAbi::RustCall, safety: chalk_ir::Safety::Safe, variadic: false },
- substitution: FnSubst(
- Substitution::from_iter(Interner, sig_tys.iter().cloned()).shifted_in(Interner),
- ),
+ // If the return type is a type variable, look for bounds on it.
+ // We could theoretically support other kinds of return types here,
+ // but none of them would be useful, since async closures return
+ // concrete anonymous future types, and their futures are not coerced
+ // into any other type within the body of the async closure.
+ let TyKind::Infer(rustc_type_ir::TyVar(return_vid)) =
+ projection.skip_binder().term.expect_type().kind()
+ else {
+ return None;
};
- ClosureSignature { ret_ty, expected_sig }
+ // FIXME: We may want to elaborate here, though I assume this will be exceedingly rare.
+ let mut return_ty = None;
+ for bound in self.table.obligations_for_self_ty(return_vid) {
+ if let PredicateKind::Clause(ClauseKind::Projection(ret_projection)) =
+ bound.predicate.kind().skip_binder()
+ && let ret_projection = bound.predicate.kind().rebind(ret_projection)
+ && let Some(ret_projection) = ret_projection.no_bound_vars()
+ && let SolverDefId::TypeAliasId(assoc_type) = ret_projection.def_id()
+ && self.db.lang_attr(assoc_type.into()) == Some(LangItem::FutureOutput)
+ {
+ return_ty = Some(ret_projection.term.expect_type());
+ break;
+ }
+ }
+
+ // SUBTLE: If we didn't find a `Future<Output = ...>` bound for the return
+ // vid, we still want to attempt to provide inference guidance for the async
+ // closure's arguments. Instantiate a new vid to plug into the output type.
+ //
+ // You may be wondering, what if it's higher-ranked? Well, given that we
+ // found a type variable for the `FnOnce::Output` projection above, we know
+ // that the output can't mention any of the vars.
+ //
+ // Also note that we use a fresh var here for the signature since the signature
+ // records the output of the *future*, and `return_vid` above is the type
+ // variable of the future, not its output.
+ //
+ // FIXME: We probably should store this signature inference output in a way
+ // that does not misuse a `FnSig` type, but that can be done separately.
+ let return_ty = return_ty.unwrap_or_else(|| self.table.next_ty_var());
+
+ let sig = projection.rebind(self.table.interner.mk_fn_sig(
+ input_tys,
+ return_ty,
+ false,
+ Safety::Safe,
+ FnAbi::Rust,
+ ));
+
+ Some(sig)
}
- /// The return type is the signature of the closure, and the return type
- /// *as represented inside the body* (so, for async closures, the `Output` ty)
- pub(super) fn sig_of_closure(
+ fn sig_of_closure(
&mut self,
- body: &ExprId,
- ret_type: &Option<TypeRefId>,
- arg_types: &[Option<TypeRefId>],
- closure_kind: ClosureKind,
- expected_sig: Option<FnSubst<Interner>>,
- ) -> ClosureSignature {
+ decl_inputs: &[Option<TypeRefId>],
+ decl_output: Option<TypeRefId>,
+ expected_sig: Option<PolyFnSig<'db>>,
+ ) -> ClosureSignatures<'db> {
if let Some(e) = expected_sig {
- self.sig_of_closure_with_expectation(body, ret_type, arg_types, closure_kind, e)
+ self.sig_of_closure_with_expectation(decl_inputs, decl_output, e)
} else {
- self.sig_of_closure_no_expectation(body, ret_type, arg_types, closure_kind)
+ self.sig_of_closure_no_expectation(decl_inputs, decl_output)
}
}
+ /// If there is no expected signature, then we will convert the
+ /// types that the user gave into a signature.
fn sig_of_closure_no_expectation(
&mut self,
- body: &ExprId,
- ret_type: &Option<TypeRefId>,
- arg_types: &[Option<TypeRefId>],
- closure_kind: ClosureKind,
- ) -> ClosureSignature {
- self.supplied_sig_of_closure(body, ret_type, arg_types, closure_kind)
+ decl_inputs: &[Option<TypeRefId>],
+ decl_output: Option<TypeRefId>,
+ ) -> ClosureSignatures<'db> {
+ let bound_sig = self.supplied_sig_of_closure(decl_inputs, decl_output);
+
+ self.closure_sigs(bound_sig)
}
+ /// Invoked to compute the signature of a closure expression. This
+ /// combines any user-provided type annotations (e.g., `|x: u32|
+ /// -> u32 { .. }`) with the expected signature.
+ ///
+ /// The approach is as follows:
+ ///
+ /// - Let `S` be the (higher-ranked) signature that we derive from the user's annotations.
+ /// - Let `E` be the (higher-ranked) signature that we derive from the expectations, if any.
+ /// - If we have no expectation `E`, then the signature of the closure is `S`.
+ /// - Otherwise, the signature of the closure is E. Moreover:
+ /// - Skolemize the late-bound regions in `E`, yielding `E'`.
+ /// - Instantiate all the late-bound regions bound in the closure within `S`
+ /// with fresh (existential) variables, yielding `S'`
+ /// - Require that `E' = S'`
+ /// - We could use some kind of subtyping relationship here,
+ /// I imagine, but equality is easier and works fine for
+ /// our purposes.
+ ///
+ /// The key intuition here is that the user's types must be valid
+ /// from "the inside" of the closure, but the expectation
+ /// ultimately drives the overall signature.
+ ///
+ /// # Examples
+ ///
+ /// ```ignore (illustrative)
+ /// fn with_closure<F>(_: F)
+ /// where F: Fn(&u32) -> &u32 { .. }
+ ///
+ /// with_closure(|x: &u32| { ... })
+ /// ```
+ ///
+ /// Here:
+ /// - E would be `fn(&u32) -> &u32`.
+ /// - S would be `fn(&u32) -> ?T`
+ /// - E' is `&'!0 u32 -> &'!0 u32`
+ /// - S' is `&'?0 u32 -> ?T`
+ ///
+ /// S' can be unified with E' with `['?0 = '!0, ?T = &'!10 u32]`.
+ ///
+ /// # Arguments
+ ///
+ /// - `expr_def_id`: the `LocalDefId` of the closure expression
+ /// - `decl`: the HIR declaration of the closure
+ /// - `body`: the body of the closure
+ /// - `expected_sig`: the expected signature (if any). Note that
+ /// this is missing a binder: that is, there may be late-bound
+ /// regions with depth 1, which are bound then by the closure.
fn sig_of_closure_with_expectation(
&mut self,
- body: &ExprId,
- ret_type: &Option<TypeRefId>,
- arg_types: &[Option<TypeRefId>],
- closure_kind: ClosureKind,
- expected_sig: FnSubst<Interner>,
- ) -> ClosureSignature {
- let expected_sig = FnPointer {
- num_binders: 0,
- sig: FnSig { abi: FnAbi::RustCall, safety: chalk_ir::Safety::Safe, variadic: false },
- substitution: expected_sig,
- };
-
- // If the expected signature does not match the actual arg types,
- // then just return the expected signature
- if expected_sig.substitution.0.len(Interner) != arg_types.len() + 1 {
- let ret_ty = match ret_type {
- Some(type_ref) => self.make_body_ty(*type_ref),
- None => self.table.new_type_var(),
- };
- return ClosureSignature { expected_sig, ret_ty };
+ decl_inputs: &[Option<TypeRefId>],
+ decl_output: Option<TypeRefId>,
+ expected_sig: PolyFnSig<'db>,
+ ) -> ClosureSignatures<'db> {
+ // Watch out for some surprises and just ignore the
+ // expectation if things don't see to match up with what we
+ // expect.
+ if expected_sig.c_variadic() {
+ return self.sig_of_closure_no_expectation(decl_inputs, decl_output);
+ } else if expected_sig.skip_binder().inputs_and_output.len() != decl_inputs.len() + 1 {
+ return self
+ .sig_of_closure_with_mismatched_number_of_arguments(decl_inputs, decl_output);
}
- self.merge_supplied_sig_with_expectation(
- body,
- ret_type,
- arg_types,
- closure_kind,
- expected_sig,
- )
+ // Create a `PolyFnSig`. Note the oddity that late bound
+ // regions appearing free in `expected_sig` are now bound up
+ // in this binder we are creating.
+ assert!(!expected_sig.skip_binder().has_vars_bound_above(rustc_type_ir::INNERMOST));
+ let bound_sig = expected_sig.map_bound(|sig| {
+ self.table.interner.mk_fn_sig(
+ sig.inputs(),
+ sig.output(),
+ sig.c_variadic,
+ Safety::Safe,
+ FnAbi::RustCall,
+ )
+ });
+
+ // `deduce_expectations_from_expected_type` introduces
+ // late-bound lifetimes defined elsewhere, which we now
+ // anonymize away, so as not to confuse the user.
+ let bound_sig = self.table.interner.anonymize_bound_vars(bound_sig);
+
+ let closure_sigs = self.closure_sigs(bound_sig);
+
+ // Up till this point, we have ignored the annotations that the user
+ // gave. This function will check that they unify successfully.
+ // Along the way, it also writes out entries for types that the user
+ // wrote into our typeck results, which are then later used by the privacy
+ // check.
+ match self.merge_supplied_sig_with_expectation(decl_inputs, decl_output, closure_sigs) {
+ Ok(infer_ok) => self.table.register_infer_ok(infer_ok),
+ Err(_) => self.sig_of_closure_no_expectation(decl_inputs, decl_output),
+ }
}
+ fn sig_of_closure_with_mismatched_number_of_arguments(
+ &mut self,
+ decl_inputs: &[Option<TypeRefId>],
+ decl_output: Option<TypeRefId>,
+ ) -> ClosureSignatures<'db> {
+ let error_sig = self.error_sig_of_closure(decl_inputs, decl_output);
+
+ self.closure_sigs(error_sig)
+ }
+
+ /// Enforce the user's types against the expectation. See
+ /// `sig_of_closure_with_expectation` for details on the overall
+ /// strategy.
fn merge_supplied_sig_with_expectation(
&mut self,
- body: &ExprId,
- ret_type: &Option<TypeRefId>,
- arg_types: &[Option<TypeRefId>],
- closure_kind: ClosureKind,
- expected_sig: FnPointer,
- ) -> ClosureSignature {
- let supplied_sig = self.supplied_sig_of_closure(body, ret_type, arg_types, closure_kind);
+ decl_inputs: &[Option<TypeRefId>],
+ decl_output: Option<TypeRefId>,
+ mut expected_sigs: ClosureSignatures<'db>,
+ ) -> InferResult<'db, ClosureSignatures<'db>> {
+ // Get the signature S that the user gave.
+ //
+ // (See comment on `sig_of_closure_with_expectation` for the
+ // meaning of these letters.)
+ let supplied_sig = self.supplied_sig_of_closure(decl_inputs, decl_output);
- let snapshot = self.table.snapshot();
- if !self.table.unify::<_, crate::next_solver::GenericArgs<'_>>(
- &expected_sig.substitution.0,
- &supplied_sig.expected_sig.substitution.0,
- ) {
- self.table.rollback_to(snapshot);
- }
+ debug!(?supplied_sig);
- supplied_sig
- }
-}
-
-// The below functions handle capture and closure kind (Fn, FnMut, ..)
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub(crate) struct HirPlace {
- pub(crate) local: BindingId,
- pub(crate) projections: Vec<ProjectionElem<Infallible, Ty>>,
-}
-
-impl HirPlace {
- fn ty(&self, ctx: &mut InferenceContext<'_>) -> Ty {
- let mut ty = ctx.table.resolve_completely(ctx.result[self.local].clone());
- for p in &self.projections {
- ty = p.projected_ty(
- ty,
- ctx.db,
- |_, _, _| {
- unreachable!("Closure field only happens in MIR");
- },
- ctx.owner.module(ctx.db).krate(),
+ // FIXME(#45727): As discussed in [this comment][c1], naively
+ // forcing equality here actually results in suboptimal error
+ // messages in some cases. For now, if there would have been
+ // an obvious error, we fallback to declaring the type of the
+ // closure to be the one the user gave, which allows other
+ // error message code to trigger.
+ //
+ // However, I think [there is potential to do even better
+ // here][c2], since in *this* code we have the precise span of
+ // the type parameter in question in hand when we report the
+ // error.
+ //
+ // [c1]: https://github.com/rust-lang/rust/pull/45072#issuecomment-341089706
+ // [c2]: https://github.com/rust-lang/rust/pull/45072#issuecomment-341096796
+ self.table.commit_if_ok(|table| {
+ let mut all_obligations = PredicateObligations::new();
+ let supplied_sig = table.infer_ctxt.instantiate_binder_with_fresh_vars(
+ BoundRegionConversionTime::FnCall,
+ supplied_sig,
);
- }
- ty
- }
- fn capture_kind_of_truncated_place(
- &self,
- mut current_capture: CaptureKind,
- len: usize,
- ) -> CaptureKind {
- if let CaptureKind::ByRef(BorrowKind::Mut {
- kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow,
- }) = current_capture
- && self.projections[len..].contains(&ProjectionElem::Deref)
- {
- current_capture =
- CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture });
- }
- current_capture
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
-pub enum CaptureKind {
- ByRef(BorrowKind),
- ByValue,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct CapturedItem {
- pub(crate) place: HirPlace,
- pub(crate) kind: CaptureKind,
- /// The inner vec is the stacks; the outer vec is for each capture reference.
- ///
- /// Even though we always report only the last span (i.e. the most inclusive span),
- /// we need to keep them all, since when a closure occurs inside a closure, we
- /// copy all captures of the inner closure to the outer closure, and then we may
- /// truncate them, and we want the correct span to be reported.
- span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
- pub(crate) ty: Binders<Ty>,
-}
-
-impl CapturedItem {
- pub fn local(&self) -> BindingId {
- self.place.local
- }
-
- /// Returns whether this place has any field (aka. non-deref) projections.
- pub fn has_field_projections(&self) -> bool {
- self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref))
- }
-
- pub fn ty(&self, subst: &Substitution) -> Ty {
- self.ty.clone().substitute(Interner, utils::ClosureSubst(subst).parent_subst())
- }
-
- pub fn kind(&self) -> CaptureKind {
- self.kind
- }
-
- pub fn spans(&self) -> SmallVec<[MirSpan; 3]> {
- self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect()
- }
-
- /// Converts the place to a name that can be inserted into source code.
- pub fn place_to_name(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
- let body = db.body(owner);
- let mut result = body[self.place.local].name.as_str().to_owned();
- for proj in &self.place.projections {
- match proj {
- ProjectionElem::Deref => {}
- ProjectionElem::Field(Either::Left(f)) => {
- let variant_data = f.parent.fields(db);
- match variant_data.shape {
- FieldsShape::Record => {
- result.push('_');
- result.push_str(variant_data.fields()[f.local_id].name.as_str())
- }
- FieldsShape::Tuple => {
- let index =
- variant_data.fields().iter().position(|it| it.0 == f.local_id);
- if let Some(index) = index {
- format_to!(result, "_{index}");
- }
- }
- FieldsShape::Unit => {}
- }
- }
- ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index),
- &ProjectionElem::ClosureField(field) => format_to!(result, "_{field}"),
- ProjectionElem::Index(_)
- | ProjectionElem::ConstantIndex { .. }
- | ProjectionElem::Subslice { .. }
- | ProjectionElem::OpaqueCast(_) => {
- never!("Not happen in closure capture");
- continue;
- }
+ // The liberated version of this signature should be a subtype
+ // of the liberated form of the expectation.
+ for (supplied_ty, expected_ty) in
+ iter::zip(supplied_sig.inputs(), expected_sigs.liberated_sig.inputs())
+ {
+ // Check that E' = S'.
+ let cause = ObligationCause::new();
+ let InferOk { value: (), obligations } = table
+ .infer_ctxt
+ .at(&cause, table.param_env)
+ .eq(DefineOpaqueTypes::Yes, expected_ty, supplied_ty)?;
+ all_obligations.extend(obligations);
}
- }
- if is_raw_identifier(&result, owner.module(db).krate().data(db).edition) {
- result.insert_str(0, "r#");
- }
- result
- }
- pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
- let body = db.body(owner);
- let krate = owner.krate(db);
- let edition = krate.data(db).edition;
- let mut result = body[self.place.local].name.display(db, edition).to_string();
- for proj in &self.place.projections {
- match proj {
- // In source code autoderef kicks in.
- ProjectionElem::Deref => {}
- ProjectionElem::Field(Either::Left(f)) => {
- let variant_data = f.parent.fields(db);
- match variant_data.shape {
- FieldsShape::Record => format_to!(
- result,
- ".{}",
- variant_data.fields()[f.local_id].name.display(db, edition)
- ),
- FieldsShape::Tuple => format_to!(
- result,
- ".{}",
- variant_data
- .fields()
- .iter()
- .position(|it| it.0 == f.local_id)
- .unwrap_or_default()
- ),
- FieldsShape::Unit => {}
- }
- }
- ProjectionElem::Field(Either::Right(f)) => {
- let field = f.index;
- format_to!(result, ".{field}");
- }
- &ProjectionElem::ClosureField(field) => {
- format_to!(result, ".{field}");
- }
- ProjectionElem::Index(_)
- | ProjectionElem::ConstantIndex { .. }
- | ProjectionElem::Subslice { .. }
- | ProjectionElem::OpaqueCast(_) => {
- never!("Not happen in closure capture");
- continue;
- }
- }
- }
- let final_derefs_count = self
- .place
- .projections
- .iter()
- .rev()
- .take_while(|proj| matches!(proj, ProjectionElem::Deref))
- .count();
- result.insert_str(0, &"*".repeat(final_derefs_count));
- result
- }
+ let supplied_output_ty = supplied_sig.output();
+ let cause = ObligationCause::new();
+ let InferOk { value: (), obligations } =
+ table.infer_ctxt.at(&cause, table.param_env).eq(
+ DefineOpaqueTypes::Yes,
+ expected_sigs.liberated_sig.output(),
+ supplied_output_ty,
+ )?;
+ all_obligations.extend(obligations);
- pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
- let body = db.body(owner);
- let krate = owner.krate(db);
- let edition = krate.data(db).edition;
- let mut result = body[self.place.local].name.display(db, edition).to_string();
- let mut field_need_paren = false;
- for proj in &self.place.projections {
- match proj {
- ProjectionElem::Deref => {
- result = format!("*{result}");
- field_need_paren = true;
- }
- ProjectionElem::Field(Either::Left(f)) => {
- if field_need_paren {
- result = format!("({result})");
- }
- let variant_data = f.parent.fields(db);
- let field = match variant_data.shape {
- FieldsShape::Record => {
- variant_data.fields()[f.local_id].name.as_str().to_owned()
- }
- FieldsShape::Tuple => variant_data
- .fields()
- .iter()
- .position(|it| it.0 == f.local_id)
- .unwrap_or_default()
- .to_string(),
- FieldsShape::Unit => "[missing field]".to_owned(),
- };
- result = format!("{result}.{field}");
- field_need_paren = false;
- }
- ProjectionElem::Field(Either::Right(f)) => {
- let field = f.index;
- if field_need_paren {
- result = format!("({result})");
- }
- result = format!("{result}.{field}");
- field_need_paren = false;
- }
- &ProjectionElem::ClosureField(field) => {
- if field_need_paren {
- result = format!("({result})");
- }
- result = format!("{result}.{field}");
- field_need_paren = false;
- }
- ProjectionElem::Index(_)
- | ProjectionElem::ConstantIndex { .. }
- | ProjectionElem::Subslice { .. }
- | ProjectionElem::OpaqueCast(_) => {
- never!("Not happen in closure capture");
- continue;
- }
- }
- }
- result
- }
-}
+ let inputs = supplied_sig
+ .inputs()
+ .into_iter()
+ .map(|ty| table.infer_ctxt.resolve_vars_if_possible(ty));
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub(crate) struct CapturedItemWithoutTy {
- pub(crate) place: HirPlace,
- pub(crate) kind: CaptureKind,
- /// The inner vec is the stacks; the outer vec is for each capture reference.
- pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
-}
+ expected_sigs.liberated_sig = table.interner.mk_fn_sig(
+ inputs,
+ supplied_output_ty,
+ expected_sigs.liberated_sig.c_variadic,
+ Safety::Safe,
+ FnAbi::RustCall,
+ );
-impl CapturedItemWithoutTy {
- fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem {
- let ty = self.place.ty(ctx);
- let ty = match &self.kind {
- CaptureKind::ByValue => ty,
- CaptureKind::ByRef(bk) => {
- let m = match bk {
- BorrowKind::Mut { .. } => Mutability::Mut,
- _ => Mutability::Not,
- };
- TyKind::Ref(m, error_lifetime(), ty).intern(Interner)
- }
- };
- return CapturedItem {
- place: self.place,
- kind: self.kind,
- span_stacks: self.span_stacks,
- ty: replace_placeholder_with_binder(ctx, ty),
- };
-
- fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders<Ty> {
- struct Filler<'a> {
- db: &'a dyn HirDatabase,
- generics: &'a Generics,
- }
- impl FallibleTypeFolder<Interner> for Filler<'_> {
- type Error = ();
-
- fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
- self
- }
-
- fn interner(&self) -> Interner {
- Interner
- }
-
- fn try_fold_free_placeholder_const(
- &mut self,
- ty: chalk_ir::Ty<Interner>,
- idx: chalk_ir::PlaceholderIndex,
- outer_binder: DebruijnIndex,
- ) -> Result<chalk_ir::Const<Interner>, Self::Error> {
- let x = from_placeholder_idx(self.db, idx).0;
- let Some(idx) = self.generics.type_or_const_param_idx(x) else {
- return Err(());
- };
- Ok(BoundVar::new(outer_binder, idx).to_const(Interner, ty))
- }
-
- fn try_fold_free_placeholder_ty(
- &mut self,
- idx: chalk_ir::PlaceholderIndex,
- outer_binder: DebruijnIndex,
- ) -> std::result::Result<Ty, Self::Error> {
- let x = from_placeholder_idx(self.db, idx).0;
- let Some(idx) = self.generics.type_or_const_param_idx(x) else {
- return Err(());
- };
- Ok(BoundVar::new(outer_binder, idx).to_ty(Interner))
- }
- }
- let filler = &mut Filler { db: ctx.db, generics: ctx.generics() };
- let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
- make_binders(ctx.db, filler.generics, result)
- }
- }
-}
-
-impl InferenceContext<'_> {
- fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
- let r = self.place_of_expr_without_adjust(tgt_expr)?;
- let adjustments =
- self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default();
- apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
- }
-
- /// Pushes the span into `current_capture_span_stack`, *without clearing it first*.
- fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option<HirPlace> {
- if path.type_anchor().is_some() {
- return None;
- }
- let hygiene = self.body.expr_or_pat_path_hygiene(id);
- self.resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).and_then(|result| {
- match result {
- ValueNs::LocalBinding(binding) => {
- let mir_span = match id {
- ExprOrPatId::ExprId(id) => MirSpan::ExprId(id),
- ExprOrPatId::PatId(id) => MirSpan::PatId(id),
- };
- self.current_capture_span_stack.push(mir_span);
- Some(HirPlace { local: binding, projections: Vec::new() })
- }
- _ => None,
- }
+ Ok(InferOk { value: expected_sigs, obligations: all_obligations })
})
}
- /// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
- fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
- self.current_capture_span_stack.clear();
- match &self.body[tgt_expr] {
- Expr::Path(p) => {
- let resolver_guard =
- self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
- let result = self.path_place(p, tgt_expr.into());
- self.resolver.reset_to_guard(resolver_guard);
- return result;
- }
- Expr::Field { expr, name: _ } => {
- let mut place = self.place_of_expr(*expr)?;
- let field = self.result.field_resolution(tgt_expr)?;
- self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
- place.projections.push(ProjectionElem::Field(field));
- return Some(place);
- }
- Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
- if matches!(
- self.expr_ty_after_adjustments(*expr).kind(Interner),
- TyKind::Ref(..) | TyKind::Raw(..)
- ) {
- let mut place = self.place_of_expr(*expr)?;
- self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
- place.projections.push(ProjectionElem::Deref);
- return Some(place);
- }
- }
- _ => (),
- }
- None
- }
-
- fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) {
- self.current_captures.push(CapturedItemWithoutTy {
- place,
- kind,
- span_stacks: smallvec![self.current_capture_span_stack.iter().copied().collect()],
- });
- }
-
- fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) {
- // The first span is the identifier, and it must always remain.
- truncate_to += 1;
- for span_stack in &mut capture.span_stacks {
- let mut remained = truncate_to;
- let mut actual_truncate_to = 0;
- for &span in &*span_stack {
- actual_truncate_to += 1;
- if !span.is_ref_span(self.body) {
- remained -= 1;
- if remained == 0 {
- break;
- }
- }
- }
- if actual_truncate_to < span_stack.len()
- && span_stack[actual_truncate_to].is_ref_span(self.body)
- {
- // Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect.
- actual_truncate_to += 1;
- }
- span_stack.truncate(actual_truncate_to);
- }
- }
-
- fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
- if let Some(place) = place {
- self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared));
- }
- self.walk_expr(expr);
- }
-
- fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) {
- if self.is_upvar(&place) {
- self.push_capture(place, kind);
- }
- }
-
- fn mutate_path_pat(&mut self, path: &Path, id: PatId) {
- if let Some(place) = self.path_place(path, id.into()) {
- self.add_capture(
- place,
- CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
- );
- self.current_capture_span_stack.pop(); // Remove the pattern span.
- }
- }
-
- fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
- if let Some(place) = place {
- self.add_capture(
- place,
- CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
- );
- }
- self.walk_expr(expr);
- }
-
- fn consume_expr(&mut self, expr: ExprId) {
- if let Some(place) = self.place_of_expr(expr) {
- self.consume_place(place);
- }
- self.walk_expr(expr);
- }
-
- fn consume_place(&mut self, place: HirPlace) {
- if self.is_upvar(&place) {
- let ty = place.ty(self);
- let kind = if self.is_ty_copy(ty) {
- CaptureKind::ByRef(BorrowKind::Shared)
- } else {
- CaptureKind::ByValue
- };
- self.push_capture(place, kind);
- }
- }
-
- fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
- if let Some((last, rest)) = adjustment.split_last() {
- match &last.kind {
- Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
- self.walk_expr_with_adjust(tgt_expr, rest)
- }
- Adjust::Deref(Some(m)) => match m.0 {
- Some(m) => {
- self.ref_capture_with_adjusts(m, tgt_expr, rest);
- }
- None => unreachable!(),
- },
- Adjust::Borrow(b) => {
- self.ref_capture_with_adjusts(b.mutability(), tgt_expr, rest);
- }
- }
- } else {
- self.walk_expr_without_adjust(tgt_expr);
- }
- }
-
- fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) {
- let capture_kind = match m {
- Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
- Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
- };
- if let Some(place) = self.place_of_expr_without_adjust(tgt_expr)
- && let Some(place) =
- apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest)
- {
- self.add_capture(place, capture_kind);
- }
- self.walk_expr_with_adjust(tgt_expr, rest);
- }
-
- fn walk_expr(&mut self, tgt_expr: ExprId) {
- if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) {
- // FIXME: this take is completely unneeded, and just is here to make borrow checker
- // happy. Remove it if you can.
- let x_taken = mem::take(it);
- self.walk_expr_with_adjust(tgt_expr, &x_taken);
- *self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken;
- } else {
- self.walk_expr_without_adjust(tgt_expr);
- }
- }
-
- fn walk_expr_without_adjust(&mut self, tgt_expr: ExprId) {
- match &self.body[tgt_expr] {
- Expr::OffsetOf(_) => (),
- Expr::InlineAsm(e) => e.operands.iter().for_each(|(_, op)| match op {
- AsmOperand::In { expr, .. }
- | AsmOperand::Out { expr: Some(expr), .. }
- | AsmOperand::InOut { expr, .. } => self.walk_expr_without_adjust(*expr),
- AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
- self.walk_expr_without_adjust(*in_expr);
- if let Some(out_expr) = out_expr {
- self.walk_expr_without_adjust(*out_expr);
- }
- }
- AsmOperand::Out { expr: None, .. }
- | AsmOperand::Const(_)
- | AsmOperand::Label(_)
- | AsmOperand::Sym(_) => (),
- }),
- Expr::If { condition, then_branch, else_branch } => {
- self.consume_expr(*condition);
- self.consume_expr(*then_branch);
- if let &Some(expr) = else_branch {
- self.consume_expr(expr);
- }
- }
- Expr::Async { statements, tail, .. }
- | Expr::Unsafe { statements, tail, .. }
- | Expr::Block { statements, tail, .. } => {
- for s in statements.iter() {
- match s {
- Statement::Let { pat, type_ref: _, initializer, else_branch } => {
- if let Some(else_branch) = else_branch {
- self.consume_expr(*else_branch);
- }
- if let Some(initializer) = initializer {
- if else_branch.is_some() {
- self.consume_expr(*initializer);
- } else {
- self.walk_expr(*initializer);
- }
- if let Some(place) = self.place_of_expr(*initializer) {
- self.consume_with_pat(place, *pat);
- }
- }
- }
- Statement::Expr { expr, has_semi: _ } => {
- self.consume_expr(*expr);
- }
- Statement::Item(_) => (),
- }
- }
- if let Some(tail) = tail {
- self.consume_expr(*tail);
- }
- }
- Expr::Call { callee, args } => {
- self.consume_expr(*callee);
- self.consume_exprs(args.iter().copied());
- }
- Expr::MethodCall { receiver, args, .. } => {
- self.consume_expr(*receiver);
- self.consume_exprs(args.iter().copied());
- }
- Expr::Match { expr, arms } => {
- for arm in arms.iter() {
- self.consume_expr(arm.expr);
- if let Some(guard) = arm.guard {
- self.consume_expr(guard);
- }
- }
- self.walk_expr(*expr);
- if let Some(discr_place) = self.place_of_expr(*expr)
- && self.is_upvar(&discr_place)
- {
- let mut capture_mode = None;
- for arm in arms.iter() {
- self.walk_pat(&mut capture_mode, arm.pat);
- }
- if let Some(c) = capture_mode {
- self.push_capture(discr_place, c);
- }
- }
- }
- Expr::Break { expr, label: _ }
- | Expr::Return { expr }
- | Expr::Yield { expr }
- | Expr::Yeet { expr } => {
- if let &Some(expr) = expr {
- self.consume_expr(expr);
- }
- }
- &Expr::Become { expr } => {
- self.consume_expr(expr);
- }
- Expr::RecordLit { fields, spread, .. } => {
- if let &Some(expr) = spread {
- self.consume_expr(expr);
- }
- self.consume_exprs(fields.iter().map(|it| it.expr));
- }
- Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
- Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
- if matches!(
- self.expr_ty_after_adjustments(*expr).kind(Interner),
- TyKind::Ref(..) | TyKind::Raw(..)
- ) {
- self.select_from_expr(*expr);
- } else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
- let mutability = 'b: {
- if let Some(deref_trait) =
- self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
- && let Some(deref_fn) = deref_trait
- .trait_items(self.db)
- .method_by_name(&Name::new_symbol_root(sym::deref_mut))
- {
- break 'b deref_fn == f;
- }
- false
- };
- let place = self.place_of_expr(*expr);
- if mutability {
- self.mutate_expr(*expr, place);
- } else {
- self.ref_expr(*expr, place);
- }
- } else {
- self.select_from_expr(*expr);
- }
- }
- Expr::Let { pat, expr } => {
- self.walk_expr(*expr);
- if let Some(place) = self.place_of_expr(*expr) {
- self.consume_with_pat(place, *pat);
- }
- }
- Expr::UnaryOp { expr, op: _ }
- | Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
- | Expr::Await { expr }
- | Expr::Loop { body: expr, label: _ }
- | Expr::Box { expr }
- | Expr::Cast { expr, type_ref: _ } => {
- self.consume_expr(*expr);
- }
- Expr::Ref { expr, rawness: _, mutability } => {
- // We need to do this before we push the span so the order will be correct.
- let place = self.place_of_expr(*expr);
- self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
- match mutability {
- hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr, place),
- hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr, place),
- }
- }
- Expr::BinaryOp { lhs, rhs, op } => {
- let Some(op) = op else {
- return;
- };
- if matches!(op, BinaryOp::Assignment { .. }) {
- let place = self.place_of_expr(*lhs);
- self.mutate_expr(*lhs, place);
- self.consume_expr(*rhs);
- return;
- }
- self.consume_expr(*lhs);
- self.consume_expr(*rhs);
- }
- Expr::Range { lhs, rhs, range_type: _ } => {
- if let &Some(expr) = lhs {
- self.consume_expr(expr);
- }
- if let &Some(expr) = rhs {
- self.consume_expr(expr);
- }
- }
- Expr::Index { base, index } => {
- self.select_from_expr(*base);
- self.consume_expr(*index);
- }
- Expr::Closure { .. } => {
- let ty = self.expr_ty(tgt_expr);
- let TyKind::Closure(id, _) = ty.kind(Interner) else {
- never!("closure type is always closure");
- return;
- };
- let (captures, _) =
- self.result.closure_info.get(id).expect(
- "We sort closures, so we should always have data for inner closures",
- );
- let mut cc = mem::take(&mut self.current_captures);
- cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
- CapturedItemWithoutTy {
- place: it.place.clone(),
- kind: it.kind,
- span_stacks: it.span_stacks.clone(),
- }
- }));
- self.current_captures = cc;
- }
- Expr::Array(Array::ElementList { elements: exprs }) | Expr::Tuple { exprs } => {
- self.consume_exprs(exprs.iter().copied())
- }
- &Expr::Assignment { target, value } => {
- self.walk_expr(value);
- let resolver_guard =
- self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
- match self.place_of_expr(value) {
- Some(rhs_place) => {
- self.inside_assignment = true;
- self.consume_with_pat(rhs_place, target);
- self.inside_assignment = false;
- }
- None => self.body.walk_pats(target, &mut |pat| match &self.body[pat] {
- Pat::Path(path) => self.mutate_path_pat(path, pat),
- &Pat::Expr(expr) => {
- let place = self.place_of_expr(expr);
- self.mutate_expr(expr, place);
- }
- _ => {}
- }),
- }
- self.resolver.reset_to_guard(resolver_guard);
- }
-
- Expr::Missing
- | Expr::Continue { .. }
- | Expr::Path(_)
- | Expr::Literal(_)
- | Expr::Const(_)
- | Expr::Underscore => (),
- }
- }
-
- fn walk_pat(&mut self, result: &mut Option<CaptureKind>, pat: PatId) {
- let mut update_result = |ck: CaptureKind| match result {
- Some(r) => {
- *r = cmp::max(*r, ck);
- }
- None => *result = Some(ck),
- };
-
- self.walk_pat_inner(
- pat,
- &mut update_result,
- BorrowKind::Mut { kind: MutBorrowKind::Default },
- );
- }
-
- fn walk_pat_inner(
- &mut self,
- p: PatId,
- update_result: &mut impl FnMut(CaptureKind),
- mut for_mut: BorrowKind,
- ) {
- match &self.body[p] {
- Pat::Ref { .. }
- | Pat::Box { .. }
- | Pat::Missing
- | Pat::Wild
- | Pat::Tuple { .. }
- | Pat::Expr(_)
- | Pat::Or(_) => (),
- Pat::TupleStruct { .. } | Pat::Record { .. } => {
- if let Some(variant) = self.result.variant_resolution_for_pat(p) {
- let adt = variant.adt_id(self.db);
- let is_multivariant = match adt {
- hir_def::AdtId::EnumId(e) => e.enum_variants(self.db).variants.len() != 1,
- _ => false,
- };
- if is_multivariant {
- update_result(CaptureKind::ByRef(BorrowKind::Shared));
- }
- }
- }
- Pat::Slice { .. }
- | Pat::ConstBlock(_)
- | Pat::Path(_)
- | Pat::Lit(_)
- | Pat::Range { .. } => {
- update_result(CaptureKind::ByRef(BorrowKind::Shared));
- }
- Pat::Bind { id, .. } => match self.result.binding_modes[p] {
- crate::BindingMode::Move => {
- if self.is_ty_copy(self.result.type_of_binding[*id].clone()) {
- update_result(CaptureKind::ByRef(BorrowKind::Shared));
- } else {
- update_result(CaptureKind::ByValue);
- }
- }
- crate::BindingMode::Ref(r) => match r {
- Mutability::Mut => update_result(CaptureKind::ByRef(for_mut)),
- Mutability::Not => update_result(CaptureKind::ByRef(BorrowKind::Shared)),
- },
- },
- }
- if self.result.pat_adjustments.get(&p).is_some_and(|it| !it.is_empty()) {
- for_mut = BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture };
- }
- self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
- }
-
- fn expr_ty(&self, expr: ExprId) -> Ty {
- self.result[expr].clone()
- }
-
- fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
- let mut ty = None;
- if let Some(it) = self.result.expr_adjustments.get(&e)
- && let Some(it) = it.last()
- {
- ty = Some(it.target.clone());
- }
- ty.unwrap_or_else(|| self.expr_ty(e))
- }
-
- fn is_upvar(&self, place: &HirPlace) -> bool {
- if let Some(c) = self.current_closure {
- let InternedClosure(_, root) = self.db.lookup_intern_closure(c.into());
- return self.body.is_binding_upvar(place.local, root);
- }
- false
- }
-
- fn is_ty_copy(&mut self, ty: Ty) -> bool {
- if let TyKind::Closure(id, _) = ty.kind(Interner) {
- // FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
- // should probably let chalk know which closures are copy, but I don't know how doing it
- // without creating query cycles.
- return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true);
- }
- self.table.resolve_completely(ty).is_copy(self.db, self.owner)
- }
-
- fn select_from_expr(&mut self, expr: ExprId) {
- self.walk_expr(expr);
- }
-
- fn restrict_precision_for_unsafe(&mut self) {
- // FIXME: Borrow checker problems without this.
- let mut current_captures = std::mem::take(&mut self.current_captures);
- for capture in &mut current_captures {
- let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
- if ty.as_raw_ptr().is_some() || ty.is_union() {
- capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
- self.truncate_capture_spans(capture, 0);
- capture.place.projections.truncate(0);
- continue;
- }
- for (i, p) in capture.place.projections.iter().enumerate() {
- ty = p.projected_ty(
- ty,
- self.db,
- |_, _, _| {
- unreachable!("Closure field only happens in MIR");
- },
- self.owner.module(self.db).krate(),
- );
- if ty.as_raw_ptr().is_some() || ty.is_union() {
- capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
- self.truncate_capture_spans(capture, i + 1);
- capture.place.projections.truncate(i + 1);
- break;
- }
- }
- }
- self.current_captures = current_captures;
- }
-
- fn adjust_for_move_closure(&mut self) {
- // FIXME: Borrow checker won't allow without this.
- let mut current_captures = std::mem::take(&mut self.current_captures);
- for capture in &mut current_captures {
- if let Some(first_deref) =
- capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref)
- {
- self.truncate_capture_spans(capture, first_deref);
- capture.place.projections.truncate(first_deref);
- }
- capture.kind = CaptureKind::ByValue;
- }
- self.current_captures = current_captures;
- }
-
- fn minimize_captures(&mut self) {
- self.current_captures.sort_unstable_by_key(|it| it.place.projections.len());
- let mut hash_map = FxHashMap::<HirPlace, usize>::default();
- let result = mem::take(&mut self.current_captures);
- for mut item in result {
- let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
- let mut it = item.place.projections.iter();
- let prev_index = loop {
- if let Some(k) = hash_map.get(&lookup_place) {
- break Some(*k);
- }
- match it.next() {
- Some(it) => {
- lookup_place.projections.push(it.clone());
- }
- None => break None,
- }
- };
- match prev_index {
- Some(p) => {
- let prev_projections_len = self.current_captures[p].place.projections.len();
- self.truncate_capture_spans(&mut item, prev_projections_len);
- self.current_captures[p].span_stacks.extend(item.span_stacks);
- let len = self.current_captures[p].place.projections.len();
- let kind_after_truncate =
- item.place.capture_kind_of_truncated_place(item.kind, len);
- self.current_captures[p].kind =
- cmp::max(kind_after_truncate, self.current_captures[p].kind);
- }
- None => {
- hash_map.insert(item.place.clone(), self.current_captures.len());
- self.current_captures.push(item);
- }
- }
- }
- }
-
- fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
- let adjustments_count =
- self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
- place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref));
- self.current_capture_span_stack
- .extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat)));
- 'reset_span_stack: {
- match &self.body[tgt_pat] {
- Pat::Missing | Pat::Wild => (),
- Pat::Tuple { args, ellipsis } => {
- let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
- let field_count = match self.result[tgt_pat].kind(Interner) {
- TyKind::Tuple(_, s) => s.len(Interner),
- _ => break 'reset_span_stack,
- };
- let fields = 0..field_count;
- let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
- for (&arg, i) in it {
- let mut p = place.clone();
- self.current_capture_span_stack.push(MirSpan::PatId(arg));
- p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
- tuple: TupleId(!0), // dummy this, as its unused anyways
- index: i as u32,
- })));
- self.consume_with_pat(p, arg);
- self.current_capture_span_stack.pop();
- }
- }
- Pat::Or(pats) => {
- for pat in pats.iter() {
- self.consume_with_pat(place.clone(), *pat);
- }
- }
- Pat::Record { args, .. } => {
- let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
- break 'reset_span_stack;
- };
- match variant {
- VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
- self.consume_place(place)
- }
- VariantId::StructId(s) => {
- let vd = s.fields(self.db);
- for field_pat in args.iter() {
- let arg = field_pat.pat;
- let Some(local_id) = vd.field(&field_pat.name) else {
- continue;
- };
- let mut p = place.clone();
- self.current_capture_span_stack.push(MirSpan::PatId(arg));
- p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
- parent: variant,
- local_id,
- })));
- self.consume_with_pat(p, arg);
- self.current_capture_span_stack.pop();
- }
- }
- }
- }
- Pat::Range { .. } | Pat::Slice { .. } | Pat::ConstBlock(_) | Pat::Lit(_) => {
- self.consume_place(place)
- }
- Pat::Path(path) => {
- if self.inside_assignment {
- self.mutate_path_pat(path, tgt_pat);
- }
- self.consume_place(place);
- }
- &Pat::Bind { id, subpat: _ } => {
- let mode = self.result.binding_modes[tgt_pat];
- let capture_kind = match mode {
- BindingMode::Move => {
- self.consume_place(place);
- break 'reset_span_stack;
- }
- BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
- BindingMode::Ref(Mutability::Mut) => {
- BorrowKind::Mut { kind: MutBorrowKind::Default }
- }
- };
- self.current_capture_span_stack.push(MirSpan::BindingId(id));
- self.add_capture(place, CaptureKind::ByRef(capture_kind));
- self.current_capture_span_stack.pop();
- }
- Pat::TupleStruct { path: _, args, ellipsis } => {
- let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
- break 'reset_span_stack;
- };
- match variant {
- VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
- self.consume_place(place)
- }
- VariantId::StructId(s) => {
- let vd = s.fields(self.db);
- let (al, ar) =
- args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
- let fields = vd.fields().iter();
- let it = al
- .iter()
- .zip(fields.clone())
- .chain(ar.iter().rev().zip(fields.rev()));
- for (&arg, (i, _)) in it {
- let mut p = place.clone();
- self.current_capture_span_stack.push(MirSpan::PatId(arg));
- p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
- parent: variant,
- local_id: i,
- })));
- self.consume_with_pat(p, arg);
- self.current_capture_span_stack.pop();
- }
- }
- }
- }
- Pat::Ref { pat, mutability: _ } => {
- self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat));
- place.projections.push(ProjectionElem::Deref);
- self.consume_with_pat(place, *pat);
- self.current_capture_span_stack.pop();
- }
- Pat::Box { .. } => (), // not supported
- &Pat::Expr(expr) => {
- self.consume_place(place);
- let pat_capture_span_stack = mem::take(&mut self.current_capture_span_stack);
- let old_inside_assignment = mem::replace(&mut self.inside_assignment, false);
- let lhs_place = self.place_of_expr(expr);
- self.mutate_expr(expr, lhs_place);
- self.inside_assignment = old_inside_assignment;
- self.current_capture_span_stack = pat_capture_span_stack;
- }
- }
- }
- self.current_capture_span_stack
- .truncate(self.current_capture_span_stack.len() - adjustments_count);
- }
-
- fn consume_exprs(&mut self, exprs: impl Iterator<Item = ExprId>) {
- for expr in exprs {
- self.consume_expr(expr);
- }
- }
-
- fn closure_kind(&self) -> FnTrait {
- let mut r = FnTrait::Fn;
- for it in &self.current_captures {
- r = cmp::min(
- r,
- match &it.kind {
- CaptureKind::ByRef(BorrowKind::Mut { .. }) => FnTrait::FnMut,
- CaptureKind::ByRef(BorrowKind::Shallow | BorrowKind::Shared) => FnTrait::Fn,
- CaptureKind::ByValue => FnTrait::FnOnce,
- },
- )
- }
- r
- }
-
- fn analyze_closure(&mut self, closure: ClosureId) -> FnTrait {
- let InternedClosure(_, root) = self.db.lookup_intern_closure(closure.into());
- self.current_closure = Some(closure);
- let Expr::Closure { body, capture_by, .. } = &self.body[root] else {
- unreachable!("Closure expression id is always closure");
- };
- self.consume_expr(*body);
- for item in &self.current_captures {
- if matches!(
- item.kind,
- CaptureKind::ByRef(BorrowKind::Mut {
- kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow
- })
- ) && !item.place.projections.contains(&ProjectionElem::Deref)
- {
- // FIXME: remove the `mutated_bindings_in_closure` completely and add proper fake reads in
- // MIR. I didn't do that due duplicate diagnostics.
- self.result.mutated_bindings_in_closure.insert(item.place.local);
- }
- }
- self.restrict_precision_for_unsafe();
- // `closure_kind` should be done before adjust_for_move_closure
- // If there exists pre-deduced kind of a closure, use it instead of one determined by capture, as rustc does.
- // rustc also does diagnostics here if the latter is not a subtype of the former.
- let closure_kind = self
- .result
- .closure_info
- .get(&closure)
- .map_or_else(|| self.closure_kind(), |info| info.1);
- match capture_by {
- CaptureBy::Value => self.adjust_for_move_closure(),
- CaptureBy::Ref => (),
- }
- self.minimize_captures();
- self.strip_captures_ref_span();
- let result = mem::take(&mut self.current_captures);
- let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
- self.result.closure_info.insert(closure, (captures, closure_kind));
- closure_kind
- }
-
- fn strip_captures_ref_span(&mut self) {
- // FIXME: Borrow checker won't allow without this.
- let mut captures = std::mem::take(&mut self.current_captures);
- for capture in &mut captures {
- if matches!(capture.kind, CaptureKind::ByValue) {
- for span_stack in &mut capture.span_stacks {
- if span_stack[span_stack.len() - 1].is_ref_span(self.body) {
- span_stack.truncate(span_stack.len() - 1);
- }
- }
- }
- }
- self.current_captures = captures;
- }
-
- pub(crate) fn infer_closures(&mut self) {
- let deferred_closures = self.sort_closures();
- for (closure, exprs) in deferred_closures.into_iter().rev() {
- self.current_captures = vec![];
- let kind = self.analyze_closure(closure);
-
- for (derefed_callee, callee_ty, params, expr) in exprs {
- if let &Expr::Call { callee, .. } = &self.body[expr] {
- let mut adjustments =
- self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec();
- self.write_fn_trait_method_resolution(
- kind,
- &derefed_callee,
- &mut adjustments,
- &callee_ty,
- ¶ms,
- expr,
- );
- self.result.expr_adjustments.insert(callee, adjustments.into_boxed_slice());
- }
- }
- }
- }
-
- /// We want to analyze some closures before others, to have a correct analysis:
- /// * We should analyze nested closures before the parent, since the parent should capture some of
- /// the things that its children captures.
- /// * If a closure calls another closure, we need to analyze the callee, to find out how we should
- /// capture it (e.g. by move for FnOnce)
+ /// If there is no expected signature, then we will convert the
+ /// types that the user gave into a signature.
///
- /// These dependencies are collected in the main inference. We do a topological sort in this function. It
- /// will consume the `deferred_closures` field and return its content in a sorted vector.
- fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> {
- let mut deferred_closures = mem::take(&mut self.deferred_closures);
- let mut dependents_count: FxHashMap<ClosureId, usize> =
- deferred_closures.keys().map(|it| (*it, 0)).collect();
- for deps in self.closure_dependencies.values() {
- for dep in deps {
- *dependents_count.entry(*dep).or_default() += 1;
+ /// Also, record this closure signature for later.
+ fn supplied_sig_of_closure(
+ &mut self,
+ decl_inputs: &[Option<TypeRefId>],
+ decl_output: Option<TypeRefId>,
+ ) -> PolyFnSig<'db> {
+ let interner = self.table.interner;
+
+ let supplied_return = match decl_output {
+ Some(output) => {
+ let output = self.make_body_ty(output);
+ self.process_user_written_ty(output).to_nextsolver(interner)
}
+ None => self.table.next_ty_var(),
+ };
+ // First, convert the types that the user supplied (if any).
+ let supplied_arguments = decl_inputs.iter().map(|&input| match input {
+ Some(input) => {
+ let input = self.make_body_ty(input);
+ self.process_user_written_ty(input).to_nextsolver(interner)
+ }
+ None => self.table.next_ty_var(),
+ });
+
+ Binder::dummy(interner.mk_fn_sig(
+ supplied_arguments,
+ supplied_return,
+ false,
+ Safety::Safe,
+ FnAbi::RustCall,
+ ))
+ }
+
+ /// Converts the types that the user supplied, in case that doing
+ /// so should yield an error, but returns back a signature where
+ /// all parameters are of type `ty::Error`.
+ fn error_sig_of_closure(
+ &mut self,
+ decl_inputs: &[Option<TypeRefId>],
+ decl_output: Option<TypeRefId>,
+ ) -> PolyFnSig<'db> {
+ let interner = self.table.interner;
+ let err_ty = Ty::new_error(interner, ErrorGuaranteed);
+
+ if let Some(output) = decl_output {
+ self.make_body_ty(output);
}
- let mut queue: Vec<_> =
- deferred_closures.keys().copied().filter(|it| dependents_count[it] == 0).collect();
- let mut result = vec![];
- while let Some(it) = queue.pop() {
- if let Some(d) = deferred_closures.remove(&it) {
- result.push((it, d));
+ let supplied_arguments = decl_inputs.iter().map(|&input| match input {
+ Some(input) => {
+ self.make_body_ty(input);
+ err_ty
}
- for dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) {
- let cnt = dependents_count.get_mut(dep).unwrap();
- *cnt -= 1;
- if *cnt == 0 {
- queue.push(*dep);
- }
- }
- }
- assert!(deferred_closures.is_empty(), "we should have analyzed all closures");
+ None => err_ty,
+ });
+
+ let result = Binder::dummy(interner.mk_fn_sig(
+ supplied_arguments,
+ err_ty,
+ false,
+ Safety::Safe,
+ FnAbi::RustCall,
+ ));
+
+ debug!("supplied_sig_of_closure: result={:?}", result);
+
result
}
- pub(super) fn add_current_closure_dependency(&mut self, dep: ClosureId) {
- if let Some(c) = self.current_closure
- && !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep)
- {
- self.closure_dependencies.entry(c).or_default().push(dep);
- }
-
- fn dep_creates_cycle(
- closure_dependencies: &FxHashMap<ClosureId, Vec<ClosureId>>,
- visited: &mut FxHashSet<ClosureId>,
- from: ClosureId,
- to: ClosureId,
- ) -> bool {
- if !visited.insert(from) {
- return false;
- }
-
- if from == to {
- return true;
- }
-
- if let Some(deps) = closure_dependencies.get(&to) {
- for dep in deps {
- if dep_creates_cycle(closure_dependencies, visited, from, *dep) {
- return true;
- }
- }
- }
-
- false
- }
+ fn closure_sigs(&self, bound_sig: PolyFnSig<'db>) -> ClosureSignatures<'db> {
+ let liberated_sig = bound_sig.skip_binder();
+ // FIXME: When we lower HRTB we'll need to actually liberate regions here.
+ ClosureSignatures { bound_sig, liberated_sig }
}
}
-
-/// Call this only when the last span in the stack isn't a split.
-fn apply_adjusts_to_place(
- current_capture_span_stack: &mut Vec<MirSpan>,
- mut r: HirPlace,
- adjustments: &[Adjustment],
-) -> Option<HirPlace> {
- let span = *current_capture_span_stack.last().expect("empty capture span stack");
- for adj in adjustments {
- match &adj.kind {
- Adjust::Deref(None) => {
- current_capture_span_stack.push(span);
- r.projections.push(ProjectionElem::Deref);
- }
- _ => return None,
- }
- }
- Some(r)
-}
diff --git a/crates/hir-ty/src/infer/closure/analysis.rs b/crates/hir-ty/src/infer/closure/analysis.rs
new file mode 100644
index 0000000..fd14b9e
--- /dev/null
+++ b/crates/hir-ty/src/infer/closure/analysis.rs
@@ -0,0 +1,1298 @@
+//! Post-inference closure analysis: captures and closure kind.
+
+use std::{cmp, convert::Infallible, mem};
+
+use chalk_ir::{
+ BoundVar, DebruijnIndex, Mutability, TyKind,
+ fold::{FallibleTypeFolder, TypeFoldable},
+};
+use either::Either;
+use hir_def::{
+ DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId,
+ expr_store::path::Path,
+ hir::{
+ Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId,
+ Statement, UnaryOp,
+ },
+ item_tree::FieldsShape,
+ lang_item::LangItem,
+ resolver::ValueNs,
+};
+use hir_expand::name::Name;
+use intern::sym;
+use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::{SmallVec, smallvec};
+use stdx::{format_to, never};
+use syntax::utils::is_raw_identifier;
+
+use crate::db::InternedClosureId;
+use crate::infer::InferenceContext;
+use crate::{
+ Adjust, Adjustment, Binders, BindingMode, ClosureId, Interner, Substitution, Ty, TyExt,
+ db::{HirDatabase, InternedClosure},
+ error_lifetime, from_placeholder_idx,
+ generics::Generics,
+ make_binders,
+ mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
+ traits::FnTrait,
+ utils,
+};
+
+// The below functions handle capture and closure kind (Fn, FnMut, ..)
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(crate) struct HirPlace {
+ pub(crate) local: BindingId,
+ pub(crate) projections: Vec<ProjectionElem<Infallible, Ty>>,
+}
+
+impl HirPlace {
+ fn ty(&self, ctx: &mut InferenceContext<'_>) -> Ty {
+ let mut ty = ctx.table.resolve_completely(ctx.result[self.local].clone());
+ for p in &self.projections {
+ ty = p.projected_ty(
+ ty,
+ ctx.db,
+ |_, _, _| {
+ unreachable!("Closure field only happens in MIR");
+ },
+ ctx.owner.module(ctx.db).krate(),
+ );
+ }
+ ty
+ }
+
+ fn capture_kind_of_truncated_place(
+ &self,
+ mut current_capture: CaptureKind,
+ len: usize,
+ ) -> CaptureKind {
+ if let CaptureKind::ByRef(BorrowKind::Mut {
+ kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow,
+ }) = current_capture
+ && self.projections[len..].contains(&ProjectionElem::Deref)
+ {
+ current_capture =
+ CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture });
+ }
+ current_capture
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+pub enum CaptureKind {
+ ByRef(BorrowKind),
+ ByValue,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct CapturedItem {
+ pub(crate) place: HirPlace,
+ pub(crate) kind: CaptureKind,
+ /// The inner vec is the stacks; the outer vec is for each capture reference.
+ ///
+ /// Even though we always report only the last span (i.e. the most inclusive span),
+ /// we need to keep them all, since when a closure occurs inside a closure, we
+ /// copy all captures of the inner closure to the outer closure, and then we may
+ /// truncate them, and we want the correct span to be reported.
+ span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
+ pub(crate) ty: Binders<Ty>,
+}
+
+impl CapturedItem {
+ pub fn local(&self) -> BindingId {
+ self.place.local
+ }
+
+ /// Returns whether this place has any field (aka. non-deref) projections.
+ pub fn has_field_projections(&self) -> bool {
+ self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref))
+ }
+
+ pub fn ty(&self, db: &dyn HirDatabase, subst: &Substitution) -> Ty {
+ self.ty.clone().substitute(Interner, &utils::ClosureSubst(subst).parent_subst(db))
+ }
+
+ pub fn kind(&self) -> CaptureKind {
+ self.kind
+ }
+
+ pub fn spans(&self) -> SmallVec<[MirSpan; 3]> {
+ self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect()
+ }
+
+ /// Converts the place to a name that can be inserted into source code.
+ pub fn place_to_name(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
+ let body = db.body(owner);
+ let mut result = body[self.place.local].name.as_str().to_owned();
+ for proj in &self.place.projections {
+ match proj {
+ ProjectionElem::Deref => {}
+ ProjectionElem::Field(Either::Left(f)) => {
+ let variant_data = f.parent.fields(db);
+ match variant_data.shape {
+ FieldsShape::Record => {
+ result.push('_');
+ result.push_str(variant_data.fields()[f.local_id].name.as_str())
+ }
+ FieldsShape::Tuple => {
+ let index =
+ variant_data.fields().iter().position(|it| it.0 == f.local_id);
+ if let Some(index) = index {
+ format_to!(result, "_{index}");
+ }
+ }
+ FieldsShape::Unit => {}
+ }
+ }
+ ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index),
+ &ProjectionElem::ClosureField(field) => format_to!(result, "_{field}"),
+ ProjectionElem::Index(_)
+ | ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Subslice { .. }
+ | ProjectionElem::OpaqueCast(_) => {
+ never!("Not happen in closure capture");
+ continue;
+ }
+ }
+ }
+ if is_raw_identifier(&result, owner.module(db).krate().data(db).edition) {
+ result.insert_str(0, "r#");
+ }
+ result
+ }
+
+ pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
+ let body = db.body(owner);
+ let krate = owner.krate(db);
+ let edition = krate.data(db).edition;
+ let mut result = body[self.place.local].name.display(db, edition).to_string();
+ for proj in &self.place.projections {
+ match proj {
+ // In source code autoderef kicks in.
+ ProjectionElem::Deref => {}
+ ProjectionElem::Field(Either::Left(f)) => {
+ let variant_data = f.parent.fields(db);
+ match variant_data.shape {
+ FieldsShape::Record => format_to!(
+ result,
+ ".{}",
+ variant_data.fields()[f.local_id].name.display(db, edition)
+ ),
+ FieldsShape::Tuple => format_to!(
+ result,
+ ".{}",
+ variant_data
+ .fields()
+ .iter()
+ .position(|it| it.0 == f.local_id)
+ .unwrap_or_default()
+ ),
+ FieldsShape::Unit => {}
+ }
+ }
+ ProjectionElem::Field(Either::Right(f)) => {
+ let field = f.index;
+ format_to!(result, ".{field}");
+ }
+ &ProjectionElem::ClosureField(field) => {
+ format_to!(result, ".{field}");
+ }
+ ProjectionElem::Index(_)
+ | ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Subslice { .. }
+ | ProjectionElem::OpaqueCast(_) => {
+ never!("Not happen in closure capture");
+ continue;
+ }
+ }
+ }
+ let final_derefs_count = self
+ .place
+ .projections
+ .iter()
+ .rev()
+ .take_while(|proj| matches!(proj, ProjectionElem::Deref))
+ .count();
+ result.insert_str(0, &"*".repeat(final_derefs_count));
+ result
+ }
+
+ pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
+ let body = db.body(owner);
+ let krate = owner.krate(db);
+ let edition = krate.data(db).edition;
+ let mut result = body[self.place.local].name.display(db, edition).to_string();
+ let mut field_need_paren = false;
+ for proj in &self.place.projections {
+ match proj {
+ ProjectionElem::Deref => {
+ result = format!("*{result}");
+ field_need_paren = true;
+ }
+ ProjectionElem::Field(Either::Left(f)) => {
+ if field_need_paren {
+ result = format!("({result})");
+ }
+ let variant_data = f.parent.fields(db);
+ let field = match variant_data.shape {
+ FieldsShape::Record => {
+ variant_data.fields()[f.local_id].name.as_str().to_owned()
+ }
+ FieldsShape::Tuple => variant_data
+ .fields()
+ .iter()
+ .position(|it| it.0 == f.local_id)
+ .unwrap_or_default()
+ .to_string(),
+ FieldsShape::Unit => "[missing field]".to_owned(),
+ };
+ result = format!("{result}.{field}");
+ field_need_paren = false;
+ }
+ ProjectionElem::Field(Either::Right(f)) => {
+ let field = f.index;
+ if field_need_paren {
+ result = format!("({result})");
+ }
+ result = format!("{result}.{field}");
+ field_need_paren = false;
+ }
+ &ProjectionElem::ClosureField(field) => {
+ if field_need_paren {
+ result = format!("({result})");
+ }
+ result = format!("{result}.{field}");
+ field_need_paren = false;
+ }
+ ProjectionElem::Index(_)
+ | ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Subslice { .. }
+ | ProjectionElem::OpaqueCast(_) => {
+ never!("Not happen in closure capture");
+ continue;
+ }
+ }
+ }
+ result
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(crate) struct CapturedItemWithoutTy {
+ pub(crate) place: HirPlace,
+ pub(crate) kind: CaptureKind,
+ /// The inner vec is the stacks; the outer vec is for each capture reference.
+ pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
+}
+
+impl CapturedItemWithoutTy {
+ fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem {
+ let ty = self.place.ty(ctx);
+ let ty = match &self.kind {
+ CaptureKind::ByValue => ty,
+ CaptureKind::ByRef(bk) => {
+ let m = match bk {
+ BorrowKind::Mut { .. } => Mutability::Mut,
+ _ => Mutability::Not,
+ };
+ TyKind::Ref(m, error_lifetime(), ty).intern(Interner)
+ }
+ };
+ return CapturedItem {
+ place: self.place,
+ kind: self.kind,
+ span_stacks: self.span_stacks,
+ ty: replace_placeholder_with_binder(ctx, ty),
+ };
+
+ fn replace_placeholder_with_binder(ctx: &mut InferenceContext<'_>, ty: Ty) -> Binders<Ty> {
+ struct Filler<'a> {
+ db: &'a dyn HirDatabase,
+ generics: &'a Generics,
+ }
+ impl FallibleTypeFolder<Interner> for Filler<'_> {
+ type Error = ();
+
+ fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn try_fold_free_placeholder_const(
+ &mut self,
+ ty: chalk_ir::Ty<Interner>,
+ idx: chalk_ir::PlaceholderIndex,
+ outer_binder: DebruijnIndex,
+ ) -> Result<chalk_ir::Const<Interner>, Self::Error> {
+ let x = from_placeholder_idx(self.db, idx).0;
+ let Some(idx) = self.generics.type_or_const_param_idx(x) else {
+ return Err(());
+ };
+ Ok(BoundVar::new(outer_binder, idx).to_const(Interner, ty))
+ }
+
+ fn try_fold_free_placeholder_ty(
+ &mut self,
+ idx: chalk_ir::PlaceholderIndex,
+ outer_binder: DebruijnIndex,
+ ) -> std::result::Result<Ty, Self::Error> {
+ let x = from_placeholder_idx(self.db, idx).0;
+ let Some(idx) = self.generics.type_or_const_param_idx(x) else {
+ return Err(());
+ };
+ Ok(BoundVar::new(outer_binder, idx).to_ty(Interner))
+ }
+ }
+ let filler = &mut Filler { db: ctx.db, generics: ctx.generics() };
+ let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
+ make_binders(ctx.db, filler.generics, result)
+ }
+ }
+}
+
+impl InferenceContext<'_> {
+ fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
+ let r = self.place_of_expr_without_adjust(tgt_expr)?;
+ let adjustments =
+ self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default();
+ apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
+ }
+
+ /// Pushes the span into `current_capture_span_stack`, *without clearing it first*.
+ fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option<HirPlace> {
+ if path.type_anchor().is_some() {
+ return None;
+ }
+ let hygiene = self.body.expr_or_pat_path_hygiene(id);
+ self.resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).and_then(|result| {
+ match result {
+ ValueNs::LocalBinding(binding) => {
+ let mir_span = match id {
+ ExprOrPatId::ExprId(id) => MirSpan::ExprId(id),
+ ExprOrPatId::PatId(id) => MirSpan::PatId(id),
+ };
+ self.current_capture_span_stack.push(mir_span);
+ Some(HirPlace { local: binding, projections: Vec::new() })
+ }
+ _ => None,
+ }
+ })
+ }
+
+ /// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
+ fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
+ self.current_capture_span_stack.clear();
+ match &self.body[tgt_expr] {
+ Expr::Path(p) => {
+ let resolver_guard =
+ self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
+ let result = self.path_place(p, tgt_expr.into());
+ self.resolver.reset_to_guard(resolver_guard);
+ return result;
+ }
+ Expr::Field { expr, name: _ } => {
+ let mut place = self.place_of_expr(*expr)?;
+ let field = self.result.field_resolution(tgt_expr)?;
+ self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
+ place.projections.push(ProjectionElem::Field(field));
+ return Some(place);
+ }
+ Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
+ if matches!(
+ self.expr_ty_after_adjustments(*expr).kind(Interner),
+ TyKind::Ref(..) | TyKind::Raw(..)
+ ) {
+ let mut place = self.place_of_expr(*expr)?;
+ self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
+ place.projections.push(ProjectionElem::Deref);
+ return Some(place);
+ }
+ }
+ _ => (),
+ }
+ None
+ }
+
+ fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) {
+ self.current_captures.push(CapturedItemWithoutTy {
+ place,
+ kind,
+ span_stacks: smallvec![self.current_capture_span_stack.iter().copied().collect()],
+ });
+ }
+
+ fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) {
+ // The first span is the identifier, and it must always remain.
+ truncate_to += 1;
+ for span_stack in &mut capture.span_stacks {
+ let mut remained = truncate_to;
+ let mut actual_truncate_to = 0;
+ for &span in &*span_stack {
+ actual_truncate_to += 1;
+ if !span.is_ref_span(self.body) {
+ remained -= 1;
+ if remained == 0 {
+ break;
+ }
+ }
+ }
+ if actual_truncate_to < span_stack.len()
+ && span_stack[actual_truncate_to].is_ref_span(self.body)
+ {
+ // Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect.
+ actual_truncate_to += 1;
+ }
+ span_stack.truncate(actual_truncate_to);
+ }
+ }
+
+ fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
+ if let Some(place) = place {
+ self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared));
+ }
+ self.walk_expr(expr);
+ }
+
+ fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) {
+ if self.is_upvar(&place) {
+ self.push_capture(place, kind);
+ }
+ }
+
+ fn mutate_path_pat(&mut self, path: &Path, id: PatId) {
+ if let Some(place) = self.path_place(path, id.into()) {
+ self.add_capture(
+ place,
+ CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
+ );
+ self.current_capture_span_stack.pop(); // Remove the pattern span.
+ }
+ }
+
+ fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
+ if let Some(place) = place {
+ self.add_capture(
+ place,
+ CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
+ );
+ }
+ self.walk_expr(expr);
+ }
+
+ fn consume_expr(&mut self, expr: ExprId) {
+ if let Some(place) = self.place_of_expr(expr) {
+ self.consume_place(place);
+ }
+ self.walk_expr(expr);
+ }
+
+ fn consume_place(&mut self, place: HirPlace) {
+ if self.is_upvar(&place) {
+ let ty = place.ty(self);
+ let kind = if self.is_ty_copy(ty) {
+ CaptureKind::ByRef(BorrowKind::Shared)
+ } else {
+ CaptureKind::ByValue
+ };
+ self.push_capture(place, kind);
+ }
+ }
+
+ fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
+ if let Some((last, rest)) = adjustment.split_last() {
+ match &last.kind {
+ Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
+ self.walk_expr_with_adjust(tgt_expr, rest)
+ }
+ Adjust::Deref(Some(m)) => match m.0 {
+ Some(m) => {
+ self.ref_capture_with_adjusts(m, tgt_expr, rest);
+ }
+ None => unreachable!(),
+ },
+ Adjust::Borrow(b) => {
+ self.ref_capture_with_adjusts(b.mutability(), tgt_expr, rest);
+ }
+ }
+ } else {
+ self.walk_expr_without_adjust(tgt_expr);
+ }
+ }
+
+ fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) {
+ let capture_kind = match m {
+ Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
+ Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
+ };
+ if let Some(place) = self.place_of_expr_without_adjust(tgt_expr)
+ && let Some(place) =
+ apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest)
+ {
+ self.add_capture(place, capture_kind);
+ }
+ self.walk_expr_with_adjust(tgt_expr, rest);
+ }
+
+ fn walk_expr(&mut self, tgt_expr: ExprId) {
+ if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) {
+ // FIXME: this take is completely unneeded, and just is here to make borrow checker
+ // happy. Remove it if you can.
+ let x_taken = mem::take(it);
+ self.walk_expr_with_adjust(tgt_expr, &x_taken);
+ *self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken;
+ } else {
+ self.walk_expr_without_adjust(tgt_expr);
+ }
+ }
+
+ fn walk_expr_without_adjust(&mut self, tgt_expr: ExprId) {
+ match &self.body[tgt_expr] {
+ Expr::OffsetOf(_) => (),
+ Expr::InlineAsm(e) => e.operands.iter().for_each(|(_, op)| match op {
+ AsmOperand::In { expr, .. }
+ | AsmOperand::Out { expr: Some(expr), .. }
+ | AsmOperand::InOut { expr, .. } => self.walk_expr_without_adjust(*expr),
+ AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
+ self.walk_expr_without_adjust(*in_expr);
+ if let Some(out_expr) = out_expr {
+ self.walk_expr_without_adjust(*out_expr);
+ }
+ }
+ AsmOperand::Out { expr: None, .. }
+ | AsmOperand::Const(_)
+ | AsmOperand::Label(_)
+ | AsmOperand::Sym(_) => (),
+ }),
+ Expr::If { condition, then_branch, else_branch } => {
+ self.consume_expr(*condition);
+ self.consume_expr(*then_branch);
+ if let &Some(expr) = else_branch {
+ self.consume_expr(expr);
+ }
+ }
+ Expr::Async { statements, tail, .. }
+ | Expr::Unsafe { statements, tail, .. }
+ | Expr::Block { statements, tail, .. } => {
+ for s in statements.iter() {
+ match s {
+ Statement::Let { pat, type_ref: _, initializer, else_branch } => {
+ if let Some(else_branch) = else_branch {
+ self.consume_expr(*else_branch);
+ }
+ if let Some(initializer) = initializer {
+ if else_branch.is_some() {
+ self.consume_expr(*initializer);
+ } else {
+ self.walk_expr(*initializer);
+ }
+ if let Some(place) = self.place_of_expr(*initializer) {
+ self.consume_with_pat(place, *pat);
+ }
+ }
+ }
+ Statement::Expr { expr, has_semi: _ } => {
+ self.consume_expr(*expr);
+ }
+ Statement::Item(_) => (),
+ }
+ }
+ if let Some(tail) = tail {
+ self.consume_expr(*tail);
+ }
+ }
+ Expr::Call { callee, args } => {
+ self.consume_expr(*callee);
+ self.consume_exprs(args.iter().copied());
+ }
+ Expr::MethodCall { receiver, args, .. } => {
+ self.consume_expr(*receiver);
+ self.consume_exprs(args.iter().copied());
+ }
+ Expr::Match { expr, arms } => {
+ for arm in arms.iter() {
+ self.consume_expr(arm.expr);
+ if let Some(guard) = arm.guard {
+ self.consume_expr(guard);
+ }
+ }
+ self.walk_expr(*expr);
+ if let Some(discr_place) = self.place_of_expr(*expr)
+ && self.is_upvar(&discr_place)
+ {
+ let mut capture_mode = None;
+ for arm in arms.iter() {
+ self.walk_pat(&mut capture_mode, arm.pat);
+ }
+ if let Some(c) = capture_mode {
+ self.push_capture(discr_place, c);
+ }
+ }
+ }
+ Expr::Break { expr, label: _ }
+ | Expr::Return { expr }
+ | Expr::Yield { expr }
+ | Expr::Yeet { expr } => {
+ if let &Some(expr) = expr {
+ self.consume_expr(expr);
+ }
+ }
+ &Expr::Become { expr } => {
+ self.consume_expr(expr);
+ }
+ Expr::RecordLit { fields, spread, .. } => {
+ if let &Some(expr) = spread {
+ self.consume_expr(expr);
+ }
+ self.consume_exprs(fields.iter().map(|it| it.expr));
+ }
+ Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
+ Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
+ if matches!(
+ self.expr_ty_after_adjustments(*expr).kind(Interner),
+ TyKind::Ref(..) | TyKind::Raw(..)
+ ) {
+ self.select_from_expr(*expr);
+ } else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
+ let mutability = 'b: {
+ if let Some(deref_trait) =
+ self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
+ && let Some(deref_fn) = deref_trait
+ .trait_items(self.db)
+ .method_by_name(&Name::new_symbol_root(sym::deref_mut))
+ {
+ break 'b deref_fn == f;
+ }
+ false
+ };
+ let place = self.place_of_expr(*expr);
+ if mutability {
+ self.mutate_expr(*expr, place);
+ } else {
+ self.ref_expr(*expr, place);
+ }
+ } else {
+ self.select_from_expr(*expr);
+ }
+ }
+ Expr::Let { pat, expr } => {
+ self.walk_expr(*expr);
+ if let Some(place) = self.place_of_expr(*expr) {
+ self.consume_with_pat(place, *pat);
+ }
+ }
+ Expr::UnaryOp { expr, op: _ }
+ | Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
+ | Expr::Await { expr }
+ | Expr::Loop { body: expr, label: _ }
+ | Expr::Box { expr }
+ | Expr::Cast { expr, type_ref: _ } => {
+ self.consume_expr(*expr);
+ }
+ Expr::Ref { expr, rawness: _, mutability } => {
+ // We need to do this before we push the span so the order will be correct.
+ let place = self.place_of_expr(*expr);
+ self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
+ match mutability {
+ hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr, place),
+ hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr, place),
+ }
+ }
+ Expr::BinaryOp { lhs, rhs, op } => {
+ let Some(op) = op else {
+ return;
+ };
+ if matches!(op, BinaryOp::Assignment { .. }) {
+ let place = self.place_of_expr(*lhs);
+ self.mutate_expr(*lhs, place);
+ self.consume_expr(*rhs);
+ return;
+ }
+ self.consume_expr(*lhs);
+ self.consume_expr(*rhs);
+ }
+ Expr::Range { lhs, rhs, range_type: _ } => {
+ if let &Some(expr) = lhs {
+ self.consume_expr(expr);
+ }
+ if let &Some(expr) = rhs {
+ self.consume_expr(expr);
+ }
+ }
+ Expr::Index { base, index } => {
+ self.select_from_expr(*base);
+ self.consume_expr(*index);
+ }
+ Expr::Closure { .. } => {
+ let ty = self.expr_ty(tgt_expr);
+ let TyKind::Closure(id, _) = ty.kind(Interner) else {
+ never!("closure type is always closure");
+ return;
+ };
+ let (captures, _) =
+ self.result.closure_info.get(id).expect(
+ "We sort closures, so we should always have data for inner closures",
+ );
+ let mut cc = mem::take(&mut self.current_captures);
+ cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
+ CapturedItemWithoutTy {
+ place: it.place.clone(),
+ kind: it.kind,
+ span_stacks: it.span_stacks.clone(),
+ }
+ }));
+ self.current_captures = cc;
+ }
+ Expr::Array(Array::ElementList { elements: exprs }) | Expr::Tuple { exprs } => {
+ self.consume_exprs(exprs.iter().copied())
+ }
+ &Expr::Assignment { target, value } => {
+ self.walk_expr(value);
+ let resolver_guard =
+ self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
+ match self.place_of_expr(value) {
+ Some(rhs_place) => {
+ self.inside_assignment = true;
+ self.consume_with_pat(rhs_place, target);
+ self.inside_assignment = false;
+ }
+ None => self.body.walk_pats(target, &mut |pat| match &self.body[pat] {
+ Pat::Path(path) => self.mutate_path_pat(path, pat),
+ &Pat::Expr(expr) => {
+ let place = self.place_of_expr(expr);
+ self.mutate_expr(expr, place);
+ }
+ _ => {}
+ }),
+ }
+ self.resolver.reset_to_guard(resolver_guard);
+ }
+
+ Expr::Missing
+ | Expr::Continue { .. }
+ | Expr::Path(_)
+ | Expr::Literal(_)
+ | Expr::Const(_)
+ | Expr::Underscore => (),
+ }
+ }
+
+ fn walk_pat(&mut self, result: &mut Option<CaptureKind>, pat: PatId) {
+ let mut update_result = |ck: CaptureKind| match result {
+ Some(r) => {
+ *r = cmp::max(*r, ck);
+ }
+ None => *result = Some(ck),
+ };
+
+ self.walk_pat_inner(
+ pat,
+ &mut update_result,
+ BorrowKind::Mut { kind: MutBorrowKind::Default },
+ );
+ }
+
+ fn walk_pat_inner(
+ &mut self,
+ p: PatId,
+ update_result: &mut impl FnMut(CaptureKind),
+ mut for_mut: BorrowKind,
+ ) {
+ match &self.body[p] {
+ Pat::Ref { .. }
+ | Pat::Box { .. }
+ | Pat::Missing
+ | Pat::Wild
+ | Pat::Tuple { .. }
+ | Pat::Expr(_)
+ | Pat::Or(_) => (),
+ Pat::TupleStruct { .. } | Pat::Record { .. } => {
+ if let Some(variant) = self.result.variant_resolution_for_pat(p) {
+ let adt = variant.adt_id(self.db);
+ let is_multivariant = match adt {
+ hir_def::AdtId::EnumId(e) => e.enum_variants(self.db).variants.len() != 1,
+ _ => false,
+ };
+ if is_multivariant {
+ update_result(CaptureKind::ByRef(BorrowKind::Shared));
+ }
+ }
+ }
+ Pat::Slice { .. }
+ | Pat::ConstBlock(_)
+ | Pat::Path(_)
+ | Pat::Lit(_)
+ | Pat::Range { .. } => {
+ update_result(CaptureKind::ByRef(BorrowKind::Shared));
+ }
+ Pat::Bind { id, .. } => match self.result.binding_modes[p] {
+ crate::BindingMode::Move => {
+ if self.is_ty_copy(self.result.type_of_binding[*id].clone()) {
+ update_result(CaptureKind::ByRef(BorrowKind::Shared));
+ } else {
+ update_result(CaptureKind::ByValue);
+ }
+ }
+ crate::BindingMode::Ref(r) => match r {
+ Mutability::Mut => update_result(CaptureKind::ByRef(for_mut)),
+ Mutability::Not => update_result(CaptureKind::ByRef(BorrowKind::Shared)),
+ },
+ },
+ }
+ if self.result.pat_adjustments.get(&p).is_some_and(|it| !it.is_empty()) {
+ for_mut = BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture };
+ }
+ self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
+ }
+
+ fn expr_ty(&self, expr: ExprId) -> Ty {
+ self.result[expr].clone()
+ }
+
+ fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
+ let mut ty = None;
+ if let Some(it) = self.result.expr_adjustments.get(&e)
+ && let Some(it) = it.last()
+ {
+ ty = Some(it.target.clone());
+ }
+ ty.unwrap_or_else(|| self.expr_ty(e))
+ }
+
+ fn is_upvar(&self, place: &HirPlace) -> bool {
+ if let Some(c) = self.current_closure {
+ let InternedClosure(_, root) = self.db.lookup_intern_closure(c);
+ return self.body.is_binding_upvar(place.local, root);
+ }
+ false
+ }
+
+ fn is_ty_copy(&mut self, ty: Ty) -> bool {
+ if let TyKind::Closure(id, _) = ty.kind(Interner) {
+ // FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
+ // should probably let chalk know which closures are copy, but I don't know how doing it
+ // without creating query cycles.
+ return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true);
+ }
+ self.table.resolve_completely(ty).is_copy(self.db, self.owner)
+ }
+
+ fn select_from_expr(&mut self, expr: ExprId) {
+ self.walk_expr(expr);
+ }
+
+ fn restrict_precision_for_unsafe(&mut self) {
+ // FIXME: Borrow checker problems without this.
+ let mut current_captures = std::mem::take(&mut self.current_captures);
+ for capture in &mut current_captures {
+ let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
+ if ty.as_raw_ptr().is_some() || ty.is_union() {
+ capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
+ self.truncate_capture_spans(capture, 0);
+ capture.place.projections.truncate(0);
+ continue;
+ }
+ for (i, p) in capture.place.projections.iter().enumerate() {
+ ty = p.projected_ty(
+ ty,
+ self.db,
+ |_, _, _| {
+ unreachable!("Closure field only happens in MIR");
+ },
+ self.owner.module(self.db).krate(),
+ );
+ if ty.as_raw_ptr().is_some() || ty.is_union() {
+ capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
+ self.truncate_capture_spans(capture, i + 1);
+ capture.place.projections.truncate(i + 1);
+ break;
+ }
+ }
+ }
+ self.current_captures = current_captures;
+ }
+
+ fn adjust_for_move_closure(&mut self) {
+ // FIXME: Borrow checker won't allow without this.
+ let mut current_captures = std::mem::take(&mut self.current_captures);
+ for capture in &mut current_captures {
+ if let Some(first_deref) =
+ capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref)
+ {
+ self.truncate_capture_spans(capture, first_deref);
+ capture.place.projections.truncate(first_deref);
+ }
+ capture.kind = CaptureKind::ByValue;
+ }
+ self.current_captures = current_captures;
+ }
+
+ fn minimize_captures(&mut self) {
+ self.current_captures.sort_unstable_by_key(|it| it.place.projections.len());
+ let mut hash_map = FxHashMap::<HirPlace, usize>::default();
+ let result = mem::take(&mut self.current_captures);
+ for mut item in result {
+ let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
+ let mut it = item.place.projections.iter();
+ let prev_index = loop {
+ if let Some(k) = hash_map.get(&lookup_place) {
+ break Some(*k);
+ }
+ match it.next() {
+ Some(it) => {
+ lookup_place.projections.push(it.clone());
+ }
+ None => break None,
+ }
+ };
+ match prev_index {
+ Some(p) => {
+ let prev_projections_len = self.current_captures[p].place.projections.len();
+ self.truncate_capture_spans(&mut item, prev_projections_len);
+ self.current_captures[p].span_stacks.extend(item.span_stacks);
+ let len = self.current_captures[p].place.projections.len();
+ let kind_after_truncate =
+ item.place.capture_kind_of_truncated_place(item.kind, len);
+ self.current_captures[p].kind =
+ cmp::max(kind_after_truncate, self.current_captures[p].kind);
+ }
+ None => {
+ hash_map.insert(item.place.clone(), self.current_captures.len());
+ self.current_captures.push(item);
+ }
+ }
+ }
+ }
+
+ fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
+ let adjustments_count =
+ self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
+ place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref));
+ self.current_capture_span_stack
+ .extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat)));
+ 'reset_span_stack: {
+ match &self.body[tgt_pat] {
+ Pat::Missing | Pat::Wild => (),
+ Pat::Tuple { args, ellipsis } => {
+ let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
+ let field_count = match self.result[tgt_pat].kind(Interner) {
+ TyKind::Tuple(_, s) => s.len(Interner),
+ _ => break 'reset_span_stack,
+ };
+ let fields = 0..field_count;
+ let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
+ for (&arg, i) in it {
+ let mut p = place.clone();
+ self.current_capture_span_stack.push(MirSpan::PatId(arg));
+ p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
+ tuple: TupleId(!0), // dummy this, as its unused anyways
+ index: i as u32,
+ })));
+ self.consume_with_pat(p, arg);
+ self.current_capture_span_stack.pop();
+ }
+ }
+ Pat::Or(pats) => {
+ for pat in pats.iter() {
+ self.consume_with_pat(place.clone(), *pat);
+ }
+ }
+ Pat::Record { args, .. } => {
+ let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
+ break 'reset_span_stack;
+ };
+ match variant {
+ VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
+ self.consume_place(place)
+ }
+ VariantId::StructId(s) => {
+ let vd = s.fields(self.db);
+ for field_pat in args.iter() {
+ let arg = field_pat.pat;
+ let Some(local_id) = vd.field(&field_pat.name) else {
+ continue;
+ };
+ let mut p = place.clone();
+ self.current_capture_span_stack.push(MirSpan::PatId(arg));
+ p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
+ parent: variant,
+ local_id,
+ })));
+ self.consume_with_pat(p, arg);
+ self.current_capture_span_stack.pop();
+ }
+ }
+ }
+ }
+ Pat::Range { .. } | Pat::Slice { .. } | Pat::ConstBlock(_) | Pat::Lit(_) => {
+ self.consume_place(place)
+ }
+ Pat::Path(path) => {
+ if self.inside_assignment {
+ self.mutate_path_pat(path, tgt_pat);
+ }
+ self.consume_place(place);
+ }
+ &Pat::Bind { id, subpat: _ } => {
+ let mode = self.result.binding_modes[tgt_pat];
+ let capture_kind = match mode {
+ BindingMode::Move => {
+ self.consume_place(place);
+ break 'reset_span_stack;
+ }
+ BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
+ BindingMode::Ref(Mutability::Mut) => {
+ BorrowKind::Mut { kind: MutBorrowKind::Default }
+ }
+ };
+ self.current_capture_span_stack.push(MirSpan::BindingId(id));
+ self.add_capture(place, CaptureKind::ByRef(capture_kind));
+ self.current_capture_span_stack.pop();
+ }
+ Pat::TupleStruct { path: _, args, ellipsis } => {
+ let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
+ break 'reset_span_stack;
+ };
+ match variant {
+ VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
+ self.consume_place(place)
+ }
+ VariantId::StructId(s) => {
+ let vd = s.fields(self.db);
+ let (al, ar) =
+ args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
+ let fields = vd.fields().iter();
+ let it = al
+ .iter()
+ .zip(fields.clone())
+ .chain(ar.iter().rev().zip(fields.rev()));
+ for (&arg, (i, _)) in it {
+ let mut p = place.clone();
+ self.current_capture_span_stack.push(MirSpan::PatId(arg));
+ p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
+ parent: variant,
+ local_id: i,
+ })));
+ self.consume_with_pat(p, arg);
+ self.current_capture_span_stack.pop();
+ }
+ }
+ }
+ }
+ Pat::Ref { pat, mutability: _ } => {
+ self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat));
+ place.projections.push(ProjectionElem::Deref);
+ self.consume_with_pat(place, *pat);
+ self.current_capture_span_stack.pop();
+ }
+ Pat::Box { .. } => (), // not supported
+ &Pat::Expr(expr) => {
+ self.consume_place(place);
+ let pat_capture_span_stack = mem::take(&mut self.current_capture_span_stack);
+ let old_inside_assignment = mem::replace(&mut self.inside_assignment, false);
+ let lhs_place = self.place_of_expr(expr);
+ self.mutate_expr(expr, lhs_place);
+ self.inside_assignment = old_inside_assignment;
+ self.current_capture_span_stack = pat_capture_span_stack;
+ }
+ }
+ }
+ self.current_capture_span_stack
+ .truncate(self.current_capture_span_stack.len() - adjustments_count);
+ }
+
+ fn consume_exprs(&mut self, exprs: impl Iterator<Item = ExprId>) {
+ for expr in exprs {
+ self.consume_expr(expr);
+ }
+ }
+
+ fn closure_kind(&self) -> FnTrait {
+ let mut r = FnTrait::Fn;
+ for it in &self.current_captures {
+ r = cmp::min(
+ r,
+ match &it.kind {
+ CaptureKind::ByRef(BorrowKind::Mut { .. }) => FnTrait::FnMut,
+ CaptureKind::ByRef(BorrowKind::Shallow | BorrowKind::Shared) => FnTrait::Fn,
+ CaptureKind::ByValue => FnTrait::FnOnce,
+ },
+ )
+ }
+ r
+ }
+
+ fn analyze_closure(&mut self, closure: ClosureId) -> FnTrait {
+ let InternedClosure(_, root) = self.db.lookup_intern_closure(closure.into());
+ self.current_closure = Some(closure.into());
+ let Expr::Closure { body, capture_by, .. } = &self.body[root] else {
+ unreachable!("Closure expression id is always closure");
+ };
+ self.consume_expr(*body);
+ for item in &self.current_captures {
+ if matches!(
+ item.kind,
+ CaptureKind::ByRef(BorrowKind::Mut {
+ kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow
+ })
+ ) && !item.place.projections.contains(&ProjectionElem::Deref)
+ {
+ // FIXME: remove the `mutated_bindings_in_closure` completely and add proper fake reads in
+ // MIR. I didn't do that due duplicate diagnostics.
+ self.result.mutated_bindings_in_closure.insert(item.place.local);
+ }
+ }
+ self.restrict_precision_for_unsafe();
+ // `closure_kind` should be done before adjust_for_move_closure
+ // If there exists pre-deduced kind of a closure, use it instead of one determined by capture, as rustc does.
+ // rustc also does diagnostics here if the latter is not a subtype of the former.
+ let closure_kind = self
+ .result
+ .closure_info
+ .get(&closure)
+ .map_or_else(|| self.closure_kind(), |info| info.1);
+ match capture_by {
+ CaptureBy::Value => self.adjust_for_move_closure(),
+ CaptureBy::Ref => (),
+ }
+ self.minimize_captures();
+ self.strip_captures_ref_span();
+ let result = mem::take(&mut self.current_captures);
+ let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
+ self.result.closure_info.insert(closure, (captures, closure_kind));
+ closure_kind
+ }
+
+ fn strip_captures_ref_span(&mut self) {
+ // FIXME: Borrow checker won't allow without this.
+ let mut captures = std::mem::take(&mut self.current_captures);
+ for capture in &mut captures {
+ if matches!(capture.kind, CaptureKind::ByValue) {
+ for span_stack in &mut capture.span_stacks {
+ if span_stack[span_stack.len() - 1].is_ref_span(self.body) {
+ span_stack.truncate(span_stack.len() - 1);
+ }
+ }
+ }
+ }
+ self.current_captures = captures;
+ }
+
+ pub(crate) fn infer_closures(&mut self) {
+ let deferred_closures = self.sort_closures();
+ for (closure, exprs) in deferred_closures.into_iter().rev() {
+ self.current_captures = vec![];
+ let kind = self.analyze_closure(closure);
+
+ for (derefed_callee, callee_ty, params, expr) in exprs {
+ if let &Expr::Call { callee, .. } = &self.body[expr] {
+ let mut adjustments =
+ self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec();
+ self.write_fn_trait_method_resolution(
+ kind,
+ &derefed_callee,
+ &mut adjustments,
+ &callee_ty,
+ ¶ms,
+ expr,
+ );
+ self.result.expr_adjustments.insert(callee, adjustments.into_boxed_slice());
+ }
+ }
+ }
+ }
+
+ /// We want to analyze some closures before others, to have a correct analysis:
+ /// * We should analyze nested closures before the parent, since the parent should capture some of
+ /// the things that its children captures.
+ /// * If a closure calls another closure, we need to analyze the callee, to find out how we should
+ /// capture it (e.g. by move for FnOnce)
+ ///
+ /// These dependencies are collected in the main inference. We do a topological sort in this function. It
+ /// will consume the `deferred_closures` field and return its content in a sorted vector.
+ fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> {
+ let mut deferred_closures = mem::take(&mut self.deferred_closures);
+ let mut dependents_count: FxHashMap<ClosureId, usize> =
+ deferred_closures.keys().map(|it| ((*it).into(), 0)).collect();
+ for deps in self.closure_dependencies.values() {
+ for dep in deps {
+ *dependents_count.entry((*dep).into()).or_default() += 1;
+ }
+ }
+ let mut queue: Vec<_> = deferred_closures
+ .keys()
+ .copied()
+ .filter(|&it| dependents_count[&it.into()] == 0)
+ .collect();
+ let mut result = vec![];
+ while let Some(it) = queue.pop() {
+ if let Some(d) = deferred_closures.remove(&it) {
+ result.push((it.into(), d));
+ }
+ for &dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) {
+ let cnt = dependents_count.get_mut(&dep.into()).unwrap();
+ *cnt -= 1;
+ if *cnt == 0 {
+ queue.push(dep);
+ }
+ }
+ }
+ assert!(deferred_closures.is_empty(), "we should have analyzed all closures");
+ result
+ }
+
+ pub(crate) fn add_current_closure_dependency(&mut self, dep: InternedClosureId) {
+ if let Some(c) = self.current_closure
+ && !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep)
+ {
+ self.closure_dependencies.entry(c).or_default().push(dep);
+ }
+
+ fn dep_creates_cycle(
+ closure_dependencies: &FxHashMap<InternedClosureId, Vec<InternedClosureId>>,
+ visited: &mut FxHashSet<InternedClosureId>,
+ from: InternedClosureId,
+ to: InternedClosureId,
+ ) -> bool {
+ if !visited.insert(from) {
+ return false;
+ }
+
+ if from == to {
+ return true;
+ }
+
+ if let Some(deps) = closure_dependencies.get(&to) {
+ for dep in deps {
+ if dep_creates_cycle(closure_dependencies, visited, from, *dep) {
+ return true;
+ }
+ }
+ }
+
+ false
+ }
+ }
+}
+
+/// Call this only when the last span in the stack isn't a split.
+fn apply_adjusts_to_place(
+ current_capture_span_stack: &mut Vec<MirSpan>,
+ mut r: HirPlace,
+ adjustments: &[Adjustment],
+) -> Option<HirPlace> {
+ let span = *current_capture_span_stack.last().expect("empty capture span stack");
+ for adj in adjustments {
+ match &adj.kind {
+ Adjust::Deref(None) => {
+ current_capture_span_stack.push(span);
+ r.projections.push(ProjectionElem::Deref);
+ }
+ _ => return None,
+ }
+ }
+ Some(r)
+}
diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs
index df51666..7930d8b 100644
--- a/crates/hir-ty/src/infer/coerce.rs
+++ b/crates/hir-ty/src/infer/coerce.rs
@@ -1,452 +1,388 @@
-//! Coercion logic. Coercions are certain type conversions that can implicitly
-//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions
-//! like going from `&Vec<T>` to `&[T]`.
+//! # Type Coercion
//!
-//! See <https://doc.rust-lang.org/nomicon/coercions.html> and
-//! `rustc_hir_analysis/check/coercion.rs`.
+//! Under certain circumstances we will coerce from one type to another,
+//! for example by auto-borrowing. This occurs in situations where the
+//! compiler has a firm 'expected type' that was supplied from the user,
+//! and where the actual type is similar to that expected type in purpose
+//! but not in representation (so actual subtyping is inappropriate).
+//!
+//! ## Reborrowing
+//!
+//! Note that if we are expecting a reference, we will *reborrow*
+//! even if the argument provided was already a reference. This is
+//! useful for freezing mut things (that is, when the expected type is &T
+//! but you have &mut T) and also for avoiding the linearity
+//! of mut things (when the expected is &mut T and you have &mut T). See
+//! the various `tests/ui/coerce/*.rs` tests for
+//! examples of where this is useful.
+//!
+//! ## Subtle note
+//!
+//! When inferring the generic arguments of functions, the argument
+//! order is relevant, which can lead to the following edge case:
+//!
+//! ```ignore (illustrative)
+//! fn foo<T>(a: T, b: T) {
+//! // ...
+//! }
+//!
+//! foo(&7i32, &mut 7i32);
+//! // This compiles, as we first infer `T` to be `&i32`,
+//! // and then coerce `&mut 7i32` to `&7i32`.
+//!
+//! foo(&mut 7i32, &7i32);
+//! // This does not compile, as we first infer `T` to be `&mut i32`
+//! // and are then unable to coerce `&7i32` to `&mut i32`.
+//! ```
-use std::iter;
-
-use chalk_ir::{BoundVar, Mutability, TyKind, TyVariableKind, cast::Cast};
-use hir_def::{hir::ExprId, lang_item::LangItem};
-use rustc_type_ir::solve::Certainty;
-use stdx::always;
+use chalk_ir::cast::Cast;
+use hir_def::{
+ CallableDefId,
+ hir::{ExprId, ExprOrPatId},
+ lang_item::LangItem,
+ signatures::FunctionSignature,
+};
+use intern::sym;
+use rustc_ast_ir::Mutability;
+use rustc_type_ir::{
+ TypeAndMut,
+ error::TypeError,
+ inherent::{IntoKind, Safety, Ty as _},
+};
+use smallvec::{SmallVec, smallvec};
+use tracing::{debug, instrument};
use triomphe::Arc;
use crate::{
- Canonical, FnAbi, FnPointer, FnSig, Goal, Interner, Lifetime, Substitution, TraitEnvironment,
- Ty, TyBuilder, TyExt,
- autoderef::{Autoderef, AutoderefKind},
- db::HirDatabase,
- infer::{
- Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
- TypeError, TypeMismatch,
+ Adjust, Adjustment, AutoBorrow, Interner, PointerCast, TargetFeatures, TraitEnvironment,
+ autoderef::Autoderef,
+ db::{HirDatabase, InternedClosureId},
+ infer::{AllowTwoPhase, InferenceContext, TypeMismatch, unify::InferenceTable},
+ next_solver::{
+ Binder, CallableIdWrapper, ClauseKind, CoercePredicate, DbInterner, ErrorGuaranteed,
+ GenericArgs, PolyFnSig, PredicateKind, Region, SolverDefId, TraitRef, Ty, TyKind,
+ infer::{
+ DefineOpaqueTypes, InferCtxt, InferOk, InferResult,
+ relate::RelateResult,
+ select::{ImplSource, SelectionError},
+ traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations},
+ },
+ mapping::{ChalkToNextSolver, NextSolverToChalk},
+ obligation_ctxt::ObligationCtxt,
},
- next_solver,
- utils::ClosureSubst,
+ utils::TargetFeatureIsSafeInTarget,
};
-use super::unify::InferenceTable;
-
-pub(crate) type CoerceResult<'db> = Result<InferOk<'db, (Vec<Adjustment>, Ty)>, TypeError>;
-
-/// Do not require any adjustments, i.e. coerce `x -> x`.
-fn identity(_: Ty) -> Vec<Adjustment> {
- vec![]
+struct Coerce<'a, 'b, 'db> {
+ table: &'a mut InferenceTable<'db>,
+ has_errors: &'a mut bool,
+ target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures, TargetFeatureIsSafeInTarget),
+ use_lub: bool,
+ /// Determines whether or not allow_two_phase_borrow is set on any
+ /// autoref adjustments we create while coercing. We don't want to
+ /// allow deref coercions to create two-phase borrows, at least initially,
+ /// but we do need two-phase borrows for function argument reborrows.
+ /// See rust#47489 and rust#48598
+ /// See docs on the "AllowTwoPhase" type for a more detailed discussion
+ allow_two_phase: AllowTwoPhase,
+ /// Whether we allow `NeverToAny` coercions. This is unsound if we're
+ /// coercing a place expression without it counting as a read in the MIR.
+ /// This is a side-effect of HIR not really having a great distinction
+ /// between places and values.
+ coerce_never: bool,
+ cause: ObligationCause,
}
-fn simple(kind: Adjust) -> impl FnOnce(Ty) -> Vec<Adjustment> {
- move |target| vec![Adjustment { kind, target }]
+type CoerceResult<'db> = InferResult<'db, (Vec<Adjustment>, Ty<'db>)>;
+
+/// Coercing a mutable reference to an immutable works, while
+/// coercing `&T` to `&mut T` should be forbidden.
+fn coerce_mutbls<'db>(from_mutbl: Mutability, to_mutbl: Mutability) -> RelateResult<'db, ()> {
+ if from_mutbl >= to_mutbl { Ok(()) } else { Err(TypeError::Mutability) }
}
/// This always returns `Ok(...)`.
fn success<'db>(
adj: Vec<Adjustment>,
- target: Ty,
- goals: Vec<next_solver::Goal<'db, next_solver::Predicate<'db>>>,
+ target: Ty<'db>,
+ obligations: PredicateObligations<'db>,
) -> CoerceResult<'db> {
- Ok(InferOk { goals, value: (adj, target) })
+ Ok(InferOk { value: (adj, target), obligations })
}
-pub(super) enum CoercionCause {
- // FIXME: Make better use of this. Right now things like return and break without a value
- // use it to point to themselves, causing us to report a mismatch on those expressions even
- // though technically they themselves are `!`
- Expr(ExprId),
-}
-
-#[derive(Clone, Debug)]
-pub(super) struct CoerceMany {
- expected_ty: Ty,
- final_ty: Option<Ty>,
- expressions: Vec<ExprId>,
-}
-
-impl CoerceMany {
- pub(super) fn new(expected: Ty) -> Self {
- CoerceMany { expected_ty: expected, final_ty: None, expressions: vec![] }
+impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
+ #[inline]
+ fn set_tainted_by_errors(&mut self) {
+ *self.has_errors = true;
}
- /// Returns the "expected type" with which this coercion was
- /// constructed. This represents the "downward propagated" type
- /// that was given to us at the start of typing whatever construct
- /// we are typing (e.g., the match expression).
- ///
- /// Typically, this is used as the expected type when
- /// type-checking each of the alternative expressions whose types
- /// we are trying to merge.
- pub(super) fn expected_ty(&self) -> Ty {
- self.expected_ty.clone()
+ #[inline]
+ fn interner(&self) -> DbInterner<'db> {
+ self.table.interner
}
- /// Returns the current "merged type", representing our best-guess
- /// at the LUB of the expressions we've seen so far (if any). This
- /// isn't *final* until you call `self.complete()`, which will return
- /// the merged type.
- pub(super) fn merged_ty(&self) -> Ty {
- self.final_ty.clone().unwrap_or_else(|| self.expected_ty.clone())
+ #[inline]
+ fn infer_ctxt(&self) -> &InferCtxt<'db> {
+ &self.table.infer_ctxt
}
- pub(super) fn complete(self, ctx: &mut InferenceContext<'_>) -> Ty {
- if let Some(final_ty) = self.final_ty {
- final_ty
- } else {
- ctx.result.standard_types.never.clone()
- }
- }
-
- pub(super) fn coerce_forced_unit(
+ pub(crate) fn commit_if_ok<T, E>(
&mut self,
- ctx: &mut InferenceContext<'_>,
- cause: CoercionCause,
- ) {
- self.coerce(ctx, None, &ctx.result.standard_types.unit.clone(), cause)
+ f: impl FnOnce(&mut Self) -> Result<T, E>,
+ ) -> Result<T, E> {
+ let snapshot = self.table.snapshot();
+ let result = f(self);
+ match result {
+ Ok(_) => {}
+ Err(_) => {
+ self.table.rollback_to(snapshot);
+ }
+ }
+ result
}
- /// Merge two types from different branches, with possible coercion.
- ///
- /// Mostly this means trying to coerce one to the other, but
- /// - if we have two function types for different functions or closures, we need to
- /// coerce both to function pointers;
- /// - if we were concerned with lifetime subtyping, we'd need to look for a
- /// least upper bound.
- pub(super) fn coerce<'db>(
- &mut self,
- ctx: &mut InferenceContext<'db>,
- expr: Option<ExprId>,
- expr_ty: &Ty,
- cause: CoercionCause,
- ) {
- let expr_ty = ctx.resolve_ty_shallow(expr_ty);
- self.expected_ty = ctx.resolve_ty_shallow(&self.expected_ty);
+ fn unify_raw(&mut self, a: Ty<'db>, b: Ty<'db>) -> InferResult<'db, Ty<'db>> {
+ debug!("unify(a: {:?}, b: {:?}, use_lub: {})", a, b, self.use_lub);
+ self.commit_if_ok(|this| {
+ let at = this.infer_ctxt().at(&this.cause, this.table.param_env);
- // Special case: two function types. Try to coerce both to
- // pointers to have a chance at getting a match. See
- // https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
- let sig = match (self.merged_ty().kind(Interner), expr_ty.kind(Interner)) {
- (TyKind::FnDef(x, _), TyKind::FnDef(y, _))
- if x == y && ctx.table.unify(&self.merged_ty(), &expr_ty) =>
- {
- None
- }
- (TyKind::Closure(x, _), TyKind::Closure(y, _)) if x == y => None,
- (TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
- // FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
- // we should be coercing the closure to a fn pointer of the safety of the FnDef
- cov_mark::hit!(coerce_fn_reification);
- let sig =
- self.merged_ty().callable_sig(ctx.db).expect("FnDef without callable sig");
- Some(sig)
- }
- _ => None,
- };
- if let Some(sig) = sig {
- let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
- let result1 = ctx.table.coerce_inner(self.merged_ty(), &target_ty, CoerceNever::Yes);
- let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty, CoerceNever::Yes);
- if let (Ok(result1), Ok(result2)) = (result1, result2) {
- ctx.table.register_infer_ok(InferOk { value: (), goals: result1.goals });
- for &e in &self.expressions {
- ctx.write_expr_adj(e, result1.value.0.clone().into_boxed_slice());
- }
- ctx.table.register_infer_ok(InferOk { value: (), goals: result2.goals });
- if let Some(expr) = expr {
- ctx.write_expr_adj(expr, result2.value.0.into_boxed_slice());
- self.expressions.push(expr);
- }
- return self.final_ty = Some(target_ty);
- }
- }
-
- // It might not seem like it, but order is important here: If the expected
- // type is a type variable and the new one is `!`, trying it the other
- // way around first would mean we make the type variable `!`, instead of
- // just marking it as possibly diverging.
- //
- // - [Comment from rustc](https://github.com/rust-lang/rust/blob/5ff18d0eaefd1bd9ab8ec33dab2404a44e7631ed/compiler/rustc_hir_typeck/src/coercion.rs#L1334-L1335)
- // First try to coerce the new expression to the type of the previous ones,
- // but only if the new expression has no coercion already applied to it.
- if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr))
- && let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes)
- {
- self.final_ty = Some(res);
- if let Some(expr) = expr {
- self.expressions.push(expr);
- }
- return;
- }
-
- if let Ok((adjustments, res)) =
- ctx.coerce_inner(&self.merged_ty(), &expr_ty, CoerceNever::Yes)
- {
- self.final_ty = Some(res);
- for &e in &self.expressions {
- ctx.write_expr_adj(e, adjustments.clone().into_boxed_slice());
- }
- } else {
- match cause {
- CoercionCause::Expr(id) => {
- ctx.result.type_mismatches.insert(
- id.into(),
- TypeMismatch { expected: self.merged_ty(), actual: expr_ty.clone() },
- );
- }
- }
- cov_mark::hit!(coerce_merge_fail_fallback);
- }
- if let Some(expr) = expr {
- self.expressions.push(expr);
- }
- }
-}
-
-pub fn could_coerce(
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- tys: &Canonical<(Ty, Ty)>,
-) -> bool {
- coerce(db, env, tys).is_ok()
-}
-
-pub(crate) fn coerce(
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- tys: &Canonical<(Ty, Ty)>,
-) -> Result<(Vec<Adjustment>, Ty), TypeError> {
- let mut table = InferenceTable::new(db, env);
- let vars = table.fresh_subst(tys.binders.as_slice(Interner));
- let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
- let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
- let (adjustments, ty) = table.coerce(&ty1_with_vars, &ty2_with_vars, CoerceNever::Yes)?;
- // default any type vars that weren't unified back to their original bound vars
- // (kind of hacky)
- let find_var = |iv| {
- vars.iter(Interner).position(|v| match v.interned() {
- chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
- chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
- chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
- } == Some(iv))
- };
- let fallback = |iv, kind, default, binder| match kind {
- chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
- .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
- chalk_ir::VariableKind::Lifetime => find_var(iv)
- .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
- chalk_ir::VariableKind::Const(ty) => find_var(iv)
- .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
- };
- // FIXME also map the types in the adjustments
- Ok((adjustments, table.resolve_with_fallback(ty, &fallback)))
-}
-
-#[derive(Clone, Copy, PartialEq, Eq)]
-pub(crate) enum CoerceNever {
- Yes,
- No,
-}
-
-impl InferenceContext<'_> {
- /// Unify two types, but may coerce the first one to the second one
- /// using "implicit coercion rules" if needed.
- pub(super) fn coerce(
- &mut self,
- expr: Option<ExprId>,
- from_ty: &Ty,
- to_ty: &Ty,
- // [Comment from rustc](https://github.com/rust-lang/rust/blob/4cc494bbfe9911d24f3ee521f98d5c6bb7e3ffe8/compiler/rustc_hir_typeck/src/coercion.rs#L85-L89)
- // Whether we allow `NeverToAny` coercions. This is unsound if we're
- // coercing a place expression without it counting as a read in the MIR.
- // This is a side-effect of HIR not really having a great distinction
- // between places and values.
- coerce_never: CoerceNever,
- ) -> Result<Ty, TypeError> {
- let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?;
- if let Some(expr) = expr {
- self.write_expr_adj(expr, adjustments.into_boxed_slice());
- }
- Ok(ty)
- }
-
- fn coerce_inner(
- &mut self,
- from_ty: &Ty,
- to_ty: &Ty,
- coerce_never: CoerceNever,
- ) -> Result<(Vec<Adjustment>, Ty), TypeError> {
- let from_ty = self.resolve_ty_shallow(from_ty);
- let to_ty = self.resolve_ty_shallow(to_ty);
- self.table.coerce(&from_ty, &to_ty, coerce_never)
- }
-}
-
-impl<'db> InferenceTable<'db> {
- /// Unify two types, but may coerce the first one to the second one
- /// using "implicit coercion rules" if needed.
- pub(crate) fn coerce(
- &mut self,
- from_ty: &Ty,
- to_ty: &Ty,
- coerce_never: CoerceNever,
- ) -> Result<(Vec<Adjustment>, Ty), TypeError> {
- let from_ty = self.structurally_resolve_type(from_ty);
- let to_ty = self.structurally_resolve_type(to_ty);
- match self.coerce_inner(from_ty, &to_ty, coerce_never) {
- Ok(InferOk { value: (adjustments, ty), goals }) => {
- self.register_infer_ok(InferOk { value: (), goals });
- Ok((adjustments, ty))
- }
- Err(e) => {
- // FIXME deal with error
- Err(e)
- }
- }
- }
-
- fn coerce_inner(
- &mut self,
- from_ty: Ty,
- to_ty: &Ty,
- coerce_never: CoerceNever,
- ) -> CoerceResult<'db> {
- if from_ty.is_never() {
- if let TyKind::InferenceVar(tv, TyVariableKind::General) = to_ty.kind(Interner) {
- self.set_diverging(*tv, TyVariableKind::General);
- }
- if coerce_never == CoerceNever::Yes {
- // Subtle: If we are coercing from `!` to `?T`, where `?T` is an unbound
- // type variable, we want `?T` to fallback to `!` if not
- // otherwise constrained. An example where this arises:
- //
- // let _: Option<?T> = Some({ return; });
- //
- // here, we would coerce from `!` to `?T`.
- return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]);
+ let res = if this.use_lub {
+ at.lub(b, a)
} else {
- return self.unify_and(&from_ty, to_ty, identity);
+ at.sup(DefineOpaqueTypes::Yes, b, a)
+ .map(|InferOk { value: (), obligations }| InferOk { value: b, obligations })
+ };
+
+ // In the new solver, lazy norm may allow us to shallowly equate
+ // more types, but we emit possibly impossible-to-satisfy obligations.
+ // Filter these cases out to make sure our coercion is more accurate.
+ match res {
+ Ok(InferOk { value, obligations }) => {
+ let mut ocx = ObligationCtxt::new(this.infer_ctxt());
+ ocx.register_obligations(obligations);
+ if ocx.select_where_possible().is_empty() {
+ Ok(InferOk { value, obligations: ocx.into_pending_obligations() })
+ } else {
+ Err(TypeError::Mismatch)
+ }
+ }
+ res => res,
+ }
+ })
+ }
+
+ /// Unify two types (using sub or lub).
+ fn unify(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
+ self.unify_raw(a, b)
+ .and_then(|InferOk { value: ty, obligations }| success(vec![], ty, obligations))
+ }
+
+ /// Unify two types (using sub or lub) and produce a specific coercion.
+ fn unify_and(
+ &mut self,
+ a: Ty<'db>,
+ b: Ty<'db>,
+ adjustments: impl IntoIterator<Item = Adjustment>,
+ final_adjustment: Adjust,
+ ) -> CoerceResult<'db> {
+ self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| {
+ success(
+ adjustments
+ .into_iter()
+ .chain(std::iter::once(Adjustment {
+ target: ty.to_chalk(self.interner()),
+ kind: final_adjustment,
+ }))
+ .collect(),
+ ty,
+ obligations,
+ )
+ })
+ }
+
+ #[instrument(skip(self))]
+ fn coerce(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
+ // First, remove any resolved type variables (at the top level, at least):
+ let a = self.table.shallow_resolve(a);
+ let b = self.table.shallow_resolve(b);
+ debug!("Coerce.tys({:?} => {:?})", a, b);
+
+ // Coercing from `!` to any type is allowed:
+ if a.is_never() {
+ // If we're coercing into an inference var, mark it as possibly diverging.
+ // FIXME: rustc does this differently.
+ if let TyKind::Infer(rustc_type_ir::TyVar(b)) = b.kind() {
+ self.table.set_diverging(b.as_u32().into(), chalk_ir::TyVariableKind::General);
+ }
+
+ if self.coerce_never {
+ return success(
+ vec![Adjustment {
+ kind: Adjust::NeverToAny,
+ target: b.to_chalk(self.interner()),
+ }],
+ b,
+ PredicateObligations::new(),
+ );
+ } else {
+ // Otherwise the only coercion we can do is unification.
+ return self.unify(a, b);
}
}
// If we are coercing into a TAIT, coerce into its proxy inference var, instead.
- let mut to_ty = to_ty;
- let _to;
- if let Some(tait_table) = &self.tait_coercion_table
- && let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner)
- && !matches!(from_ty.kind(Interner), TyKind::InferenceVar(..) | TyKind::OpaqueType(..))
- && let Some(ty) = tait_table.get(opaque_ty_id)
+ // FIXME(next-solver): This should not be here. This is not how rustc does thing, and it also not allows us
+ // to normalize opaques defined in our scopes. Instead, we should properly register
+ // `TypingMode::Analysis::defining_opaque_types_and_generators`, and rely on the solver to reveal
+ // them for us (we'll also need some global-like registry for the values, something we cannot
+ // really implement, therefore we can really support only RPITs and ITIAT or the new `#[define_opaque]`
+ // TAIT, not the old global TAIT).
+ let mut b = b;
+ if let Some(tait_table) = &self.table.tait_coercion_table
+ && let TyKind::Alias(rustc_type_ir::Opaque, opaque_ty) = b.kind()
+ && let SolverDefId::InternedOpaqueTyId(opaque_ty_id) = opaque_ty.def_id
+ && !matches!(a.kind(), TyKind::Infer(..) | TyKind::Alias(rustc_type_ir::Opaque, _))
+ && let Some(ty) = tait_table.get(&opaque_ty_id.into())
{
- _to = ty.clone();
- to_ty = &_to;
+ b = ty.to_nextsolver(self.interner());
+ b = self.table.shallow_resolve(b);
+ }
+ let b = b;
+
+ // Coercing *from* an unresolved inference variable means that
+ // we have no information about the source type. This will always
+ // ultimately fall back to some form of subtyping.
+ if a.is_infer() {
+ return self.coerce_from_inference_variable(a, b);
}
// Consider coercing the subtype to a DST
- if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) {
- return Ok(ret);
+ //
+ // NOTE: this is wrapped in a `commit_if_ok` because it creates
+ // a "spurious" type variable, and we don't want to have that
+ // type variable in memory if the coercion fails.
+ let unsize = self.commit_if_ok(|this| this.coerce_unsized(a, b));
+ match unsize {
+ Ok(_) => {
+ debug!("coerce: unsize successful");
+ return unsize;
+ }
+ Err(error) => {
+ debug!(?error, "coerce: unsize failed");
+ }
}
- // Examine the supertype and consider auto-borrowing.
- match to_ty.kind(Interner) {
- TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt),
- TyKind::Ref(mt, lt, _) => return self.coerce_ref(from_ty, to_ty, *mt, lt),
+ // Examine the supertype and consider type-specific coercions, such
+ // as auto-borrowing, coercing pointer mutability, a `dyn*` coercion,
+ // or pin-ergonomics.
+ match b.kind() {
+ TyKind::RawPtr(_, b_mutbl) => {
+ return self.coerce_raw_ptr(a, b, b_mutbl);
+ }
+ TyKind::Ref(r_b, _, mutbl_b) => {
+ return self.coerce_borrowed_pointer(a, b, r_b, mutbl_b);
+ }
_ => {}
}
- match from_ty.kind(Interner) {
+ match a.kind() {
TyKind::FnDef(..) => {
// Function items are coercible to any closure
// type; function pointers are not (that would
// require double indirection).
// Additionally, we permit coercion of function
// items to drop the unsafe qualifier.
- self.coerce_from_fn_item(from_ty, to_ty)
+ self.coerce_from_fn_item(a, b)
}
- TyKind::Function(from_fn_ptr) => {
+ TyKind::FnPtr(a_sig_tys, a_hdr) => {
// We permit coercion of fn pointers to drop the
// unsafe qualifier.
- self.coerce_from_fn_pointer(from_ty.clone(), from_fn_ptr, to_ty)
+ self.coerce_from_fn_pointer(a_sig_tys.with(a_hdr), b)
}
- TyKind::Closure(_, from_substs) => {
+ TyKind::Closure(closure_def_id_a, args_a) => {
// Non-capturing closures are coercible to
// function pointers or unsafe function pointers.
// It cannot convert closures that require unsafe.
- self.coerce_closure_to_fn(from_ty.clone(), from_substs, to_ty)
+ self.coerce_closure_to_fn(a, closure_def_id_a.0, args_a, b)
}
_ => {
// Otherwise, just use unification rules.
- self.unify_and(&from_ty, to_ty, identity)
+ self.unify(a, b)
}
}
}
- /// Unify two types (using sub or lub) and produce a specific coercion.
- fn unify_and<F>(&mut self, t1: &Ty, t2: &Ty, f: F) -> CoerceResult<'db>
- where
- F: FnOnce(Ty) -> Vec<Adjustment>,
- {
- self.try_unify(t1, t2)
- .and_then(|InferOk { goals, .. }| success(f(t1.clone()), t1.clone(), goals))
- }
+ /// Coercing *from* an inference variable. In this case, we have no information
+ /// about the source type, so we can't really do a true coercion and we always
+ /// fall back to subtyping (`unify_and`).
+ fn coerce_from_inference_variable(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
+ debug!("coerce_from_inference_variable(a={:?}, b={:?})", a, b);
+ debug_assert!(a.is_infer() && self.table.shallow_resolve(a) == a);
+ debug_assert!(self.table.shallow_resolve(b) == b);
- fn coerce_ptr(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult<'db> {
- let (is_ref, from_mt, from_inner) = match from_ty.kind(Interner) {
- TyKind::Ref(mt, _, ty) => (true, mt, ty),
- TyKind::Raw(mt, ty) => (false, mt, ty),
- _ => return self.unify_and(&from_ty, to_ty, identity),
- };
+ if b.is_infer() {
+ // Two unresolved type variables: create a `Coerce` predicate.
+ let target_ty = if self.use_lub { self.table.next_ty_var() } else { b };
- coerce_mutabilities(*from_mt, to_mt)?;
+ let mut obligations = PredicateObligations::with_capacity(2);
+ for &source_ty in &[a, b] {
+ if source_ty != target_ty {
+ obligations.push(Obligation::new(
+ self.interner(),
+ self.cause.clone(),
+ self.table.param_env,
+ Binder::dummy(PredicateKind::Coerce(CoercePredicate {
+ a: source_ty,
+ b: target_ty,
+ })),
+ ));
+ }
+ }
- // Check that the types which they point at are compatible.
- let from_raw = TyKind::Raw(to_mt, from_inner.clone()).intern(Interner);
-
- // Although references and raw ptrs have the same
- // representation, we still register an Adjust::DerefRef so that
- // regionck knows that the region for `a` must be valid here.
- if is_ref {
- self.unify_and(&from_raw, to_ty, |target| {
- vec![
- Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
- Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)), target },
- ]
- })
- } else if *from_mt != to_mt {
- self.unify_and(
- &from_raw,
- to_ty,
- simple(Adjust::Pointer(PointerCast::MutToConstPointer)),
- )
+ debug!(
+ "coerce_from_inference_variable: two inference variables, target_ty={:?}, obligations={:?}",
+ target_ty, obligations
+ );
+ success(vec![], target_ty, obligations)
} else {
- self.unify_and(&from_raw, to_ty, identity)
+ // One unresolved type variable: just apply subtyping, we may be able
+ // to do something useful.
+ self.unify(a, b)
}
}
/// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
/// To match `A` with `B`, autoderef will be performed,
/// calling `deref`/`deref_mut` where necessary.
- fn coerce_ref(
+ fn coerce_borrowed_pointer(
&mut self,
- from_ty: Ty,
- to_ty: &Ty,
- to_mt: Mutability,
- to_lt: &Lifetime,
+ a: Ty<'db>,
+ b: Ty<'db>,
+ r_b: Region<'db>,
+ mutbl_b: Mutability,
) -> CoerceResult<'db> {
- let (_from_lt, from_mt) = match from_ty.kind(Interner) {
- TyKind::Ref(mt, lt, _) => {
- coerce_mutabilities(*mt, to_mt)?;
- (lt.clone(), *mt) // clone is probably not good?
+ debug!("coerce_borrowed_pointer(a={:?}, b={:?})", a, b);
+ debug_assert!(self.table.shallow_resolve(a) == a);
+ debug_assert!(self.table.shallow_resolve(b) == b);
+
+ // If we have a parameter of type `&M T_a` and the value
+ // provided is `expr`, we will be adding an implicit borrow,
+ // meaning that we convert `f(expr)` to `f(&M *expr)`. Therefore,
+ // to type check, we will construct the type that `&M*expr` would
+ // yield.
+
+ let (r_a, mt_a) = match a.kind() {
+ TyKind::Ref(r_a, ty, mutbl) => {
+ let mt_a = TypeAndMut::<DbInterner<'db>> { ty, mutbl };
+ coerce_mutbls(mt_a.mutbl, mutbl_b)?;
+ (r_a, mt_a)
}
- _ => return self.unify_and(&from_ty, to_ty, identity),
+ _ => return self.unify(a, b),
};
- // NOTE: this code is mostly copied and adapted from rustc, and
- // currently more complicated than necessary, carrying errors around
- // etc.. This complication will become necessary when we actually track
- // details of coercion errors though, so I think it's useful to leave
- // the structure like it is.
-
- let snapshot = self.snapshot();
-
- let mut autoderef = Autoderef::new(self, from_ty.clone(), false, false);
let mut first_error = None;
+ let mut r_borrow_var = None;
+ let mut autoderef = Autoderef::new(self.table, a);
let mut found = None;
while let Some((referent_ty, autoderefs)) = autoderef.next() {
@@ -456,7 +392,7 @@
continue;
}
- // At this point, we have deref'd `a` to `referent_ty`. So
+ // At this point, we have deref'd `a` to `referent_ty`. So
// imagine we are coercing from `&'a mut Vec<T>` to `&'b mut [T]`.
// In the autoderef loop for `&'a mut Vec<T>`, we would get
// three callbacks:
@@ -478,11 +414,85 @@
// compare those. Note that this means we use the target
// mutability [1], since it may be that we are coercing
// from `&mut T` to `&U`.
- let lt = to_lt; // FIXME: Involve rustc LUB and SUB flag checks
- let derefd_from_ty = TyKind::Ref(to_mt, lt.clone(), referent_ty).intern(Interner);
- match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
- Ok(result) => {
- found = Some(result.map(|()| derefd_from_ty));
+ //
+ // One fine point concerns the region that we use. We
+ // choose the region such that the region of the final
+ // type that results from `unify` will be the region we
+ // want for the autoref:
+ //
+ // - if in sub mode, that means we want to use `'b` (the
+ // region from the target reference) for both
+ // pointers [2]. This is because sub mode (somewhat
+ // arbitrarily) returns the subtype region. In the case
+ // where we are coercing to a target type, we know we
+ // want to use that target type region (`'b`) because --
+ // for the program to type-check -- it must be the
+ // smaller of the two.
+ // - One fine point. It may be surprising that we can
+ // use `'b` without relating `'a` and `'b`. The reason
+ // that this is ok is that what we produce is
+ // effectively a `&'b *x` expression (if you could
+ // annotate the region of a borrow), and regionck has
+ // code that adds edges from the region of a borrow
+ // (`'b`, here) into the regions in the borrowed
+ // expression (`*x`, here). (Search for "link".)
+ // - if in lub mode, things can get fairly complicated. The
+ // easiest thing is just to make a fresh
+ // region variable [4], which effectively means we defer
+ // the decision to region inference (and regionck, which will add
+ // some more edges to this variable). However, this can wind up
+ // creating a crippling number of variables in some cases --
+ // e.g., #32278 -- so we optimize one particular case [3].
+ // Let me try to explain with some examples:
+ // - The "running example" above represents the simple case,
+ // where we have one `&` reference at the outer level and
+ // ownership all the rest of the way down. In this case,
+ // we want `LUB('a, 'b)` as the resulting region.
+ // - However, if there are nested borrows, that region is
+ // too strong. Consider a coercion from `&'a &'x Rc<T>` to
+ // `&'b T`. In this case, `'a` is actually irrelevant.
+ // The pointer we want is `LUB('x, 'b`). If we choose `LUB('a,'b)`
+ // we get spurious errors (`ui/regions-lub-ref-ref-rc.rs`).
+ // (The errors actually show up in borrowck, typically, because
+ // this extra edge causes the region `'a` to be inferred to something
+ // too big, which then results in borrowck errors.)
+ // - We could track the innermost shared reference, but there is already
+ // code in regionck that has the job of creating links between
+ // the region of a borrow and the regions in the thing being
+ // borrowed (here, `'a` and `'x`), and it knows how to handle
+ // all the various cases. So instead we just make a region variable
+ // and let regionck figure it out.
+ let r = if !self.use_lub {
+ r_b // [2] above
+ } else if autoderefs == 1 {
+ r_a // [3] above
+ } else {
+ if r_borrow_var.is_none() {
+ // create var lazily, at most once
+ let r = autoderef.table.next_region_var();
+ r_borrow_var = Some(r); // [4] above
+ }
+ r_borrow_var.unwrap()
+ };
+ let derefd_ty_a = Ty::new_ref(
+ autoderef.table.interner,
+ r,
+ referent_ty,
+ mutbl_b, // [1] above
+ );
+ // We need to construct a new `Coerce` because of lifetimes.
+ let mut coerce = Coerce {
+ table: autoderef.table,
+ has_errors: self.has_errors,
+ target_features: self.target_features,
+ use_lub: self.use_lub,
+ allow_two_phase: self.allow_two_phase,
+ coerce_never: self.coerce_never,
+ cause: self.cause.clone(),
+ };
+ match coerce.unify_raw(derefd_ty_a, b) {
+ Ok(ok) => {
+ found = Some(ok);
break;
}
Err(err) => {
@@ -498,18 +508,24 @@
// (e.g., in example above, the failure from relating `Vec<T>`
// to the target type), since that should be the least
// confusing.
- let InferOk { value: ty, goals } = match found {
- Some(d) => d,
- None => {
- self.rollback_to(snapshot);
- let err = first_error.expect("coerce_borrowed_pointer had no error");
- return Err(err);
+ let Some(InferOk { value: ty, mut obligations }) = found else {
+ if let Some(first_error) = first_error {
+ debug!("coerce_borrowed_pointer: failed with err = {:?}", first_error);
+ return Err(first_error);
+ } else {
+ // This may happen in the new trait solver since autoderef requires
+ // the pointee to be structurally normalizable, or else it'll just bail.
+ // So when we have a type like `&<not well formed>`, then we get no
+ // autoderef steps (even though there should be at least one). That means
+ // we get no type mismatches, since the loop above just exits early.
+ return Err(TypeError::Mismatch);
}
};
- if ty == from_ty && from_mt == Mutability::Not && autoderef.step_count() == 1 {
+
+ if ty == a && mt_a.mutbl.is_not() && autoderef.step_count() == 1 {
// As a special case, if we would produce `&'a *x`, that's
// a total no-op. We end up with the type `&'a T` just as
- // we started with. In that case, just skip it
+ // we started with. In that case, just skip it
// altogether. This is just an optimization.
//
// Note that for `&mut`, we DO want to reborrow --
@@ -518,259 +534,1091 @@
// `self.x` both have `&mut `type would be a move of
// `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
// which is a borrow.
- always!(to_mt == Mutability::Not); // can only coerce &T -> &U
- return success(vec![], ty, goals);
+ assert!(mutbl_b.is_not()); // can only coerce &T -> &U
+ return success(vec![], ty, obligations);
}
- let mut adjustments = auto_deref_adjust_steps(&autoderef);
+ let InferOk { value: mut adjustments, obligations: o } =
+ autoderef.adjust_steps_as_infer_ok();
+ obligations.extend(o);
+
+ // Now apply the autoref. We have to extract the region out of
+ // the final ref type we got.
+ let TyKind::Ref(region, _, _) = ty.kind() else {
+ panic!("expected a ref type, got {:?}", ty);
+ };
adjustments.push(Adjustment {
- kind: Adjust::Borrow(AutoBorrow::Ref(to_lt.clone(), to_mt)),
- target: ty.clone(),
+ kind: Adjust::Borrow(AutoBorrow::Ref(
+ region.to_chalk(self.interner()),
+ mutbl_b.to_chalk(self.interner()),
+ )),
+ target: ty.to_chalk(self.interner()),
});
- success(adjustments, ty, goals)
+ debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments);
+
+ success(adjustments, ty, obligations)
}
- /// Attempts to coerce from the type of a Rust function item into a function pointer.
- fn coerce_from_fn_item(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult<'db> {
- match to_ty.kind(Interner) {
- TyKind::Function(_) => {
- let from_sig = from_ty.callable_sig(self.db).expect("FnDef had no sig");
+ /// Performs [unsized coercion] by emulating a fulfillment loop on a
+ /// `CoerceUnsized` goal until all `CoerceUnsized` and `Unsize` goals
+ /// are successfully selected.
+ ///
+ /// [unsized coercion](https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions)
+ #[instrument(skip(self), level = "debug")]
+ fn coerce_unsized(&mut self, source: Ty<'db>, target: Ty<'db>) -> CoerceResult<'db> {
+ debug!(?source, ?target);
+ debug_assert!(self.table.shallow_resolve(source) == source);
+ debug_assert!(self.table.shallow_resolve(target) == target);
- // FIXME check ABI: Intrinsics are not coercible to function pointers
- // FIXME Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396)
-
- // FIXME rustc normalizes assoc types in the sig here, not sure if necessary
-
- let from_sig = from_sig.to_fn_ptr();
- let from_fn_pointer = TyKind::Function(from_sig.clone()).intern(Interner);
- let ok = self.coerce_from_safe_fn(
- from_fn_pointer.clone(),
- &from_sig,
- to_ty,
- |unsafe_ty| {
- vec![
- Adjustment {
- kind: Adjust::Pointer(PointerCast::ReifyFnPointer),
- target: from_fn_pointer,
- },
- Adjustment {
- kind: Adjust::Pointer(PointerCast::UnsafeFnPointer),
- target: unsafe_ty,
- },
- ]
- },
- simple(Adjust::Pointer(PointerCast::ReifyFnPointer)),
- )?;
-
- Ok(ok)
- }
- _ => self.unify_and(&from_ty, to_ty, identity),
+ // We don't apply any coercions incase either the source or target
+ // aren't sufficiently well known but tend to instead just equate
+ // them both.
+ if source.is_infer() {
+ debug!("coerce_unsized: source is a TyVar, bailing out");
+ return Err(TypeError::Mismatch);
}
- }
+ if target.is_infer() {
+ debug!("coerce_unsized: target is a TyVar, bailing out");
+ return Err(TypeError::Mismatch);
+ }
- fn coerce_from_fn_pointer(
- &mut self,
- from_ty: Ty,
- from_f: &FnPointer,
- to_ty: &Ty,
- ) -> CoerceResult<'db> {
- self.coerce_from_safe_fn(
- from_ty,
- from_f,
- to_ty,
- simple(Adjust::Pointer(PointerCast::UnsafeFnPointer)),
- identity,
- )
- }
-
- fn coerce_from_safe_fn<F, G>(
- &mut self,
- from_ty: Ty,
- from_fn_ptr: &FnPointer,
- to_ty: &Ty,
- to_unsafe: F,
- normal: G,
- ) -> CoerceResult<'db>
- where
- F: FnOnce(Ty) -> Vec<Adjustment>,
- G: FnOnce(Ty) -> Vec<Adjustment>,
- {
- if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner)
- && let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) =
- (from_fn_ptr.sig.safety, to_fn_ptr.sig.safety)
+ // This is an optimization because coercion is one of the most common
+ // operations that we do in typeck, since it happens at every assignment
+ // and call arg (among other positions).
+ //
+ // These targets are known to never be RHS in `LHS: CoerceUnsized<RHS>`.
+ // That's because these are built-in types for which a core-provided impl
+ // doesn't exist, and for which a user-written impl is invalid.
+ //
+ // This is technically incomplete when users write impossible bounds like
+ // `where T: CoerceUnsized<usize>`, for example, but that trait is unstable
+ // and coercion is allowed to be incomplete. The only case where this matters
+ // is impossible bounds.
+ //
+ // Note that some of these types implement `LHS: Unsize<RHS>`, but they
+ // do not implement *`CoerceUnsized`* which is the root obligation of the
+ // check below.
+ match target.kind() {
+ TyKind::Bool
+ | TyKind::Char
+ | TyKind::Int(_)
+ | TyKind::Uint(_)
+ | TyKind::Float(_)
+ | TyKind::Infer(rustc_type_ir::IntVar(_) | rustc_type_ir::FloatVar(_))
+ | TyKind::Str
+ | TyKind::Array(_, _)
+ | TyKind::Slice(_)
+ | TyKind::FnDef(_, _)
+ | TyKind::FnPtr(_, _)
+ | TyKind::Dynamic(_, _)
+ | TyKind::Closure(_, _)
+ | TyKind::CoroutineClosure(_, _)
+ | TyKind::Coroutine(_, _)
+ | TyKind::CoroutineWitness(_, _)
+ | TyKind::Never
+ | TyKind::Tuple(_) => return Err(TypeError::Mismatch),
+ _ => {}
+ }
+ // Additionally, we ignore `&str -> &str` coercions, which happen very
+ // commonly since strings are one of the most used argument types in Rust,
+ // we do coercions when type checking call expressions.
+ if let TyKind::Ref(_, source_pointee, Mutability::Not) = source.kind()
+ && source_pointee.is_str()
+ && let TyKind::Ref(_, target_pointee, Mutability::Not) = target.kind()
+ && target_pointee.is_str()
{
- let from_unsafe =
- TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner);
- return self.unify_and(&from_unsafe, to_ty, to_unsafe);
+ return Err(TypeError::Mismatch);
}
- self.unify_and(&from_ty, to_ty, normal)
+
+ let traits = (
+ LangItem::Unsize.resolve_trait(self.table.db, self.table.trait_env.krate),
+ LangItem::CoerceUnsized.resolve_trait(self.table.db, self.table.trait_env.krate),
+ );
+ let (Some(unsize_did), Some(coerce_unsized_did)) = traits else {
+ debug!("missing Unsize or CoerceUnsized traits");
+ return Err(TypeError::Mismatch);
+ };
+
+ // Note, we want to avoid unnecessary unsizing. We don't want to coerce to
+ // a DST unless we have to. This currently comes out in the wash since
+ // we can't unify [T] with U. But to properly support DST, we need to allow
+ // that, at which point we will need extra checks on the target here.
+
+ // Handle reborrows before selecting `Source: CoerceUnsized<Target>`.
+ let reborrow = match (source.kind(), target.kind()) {
+ (TyKind::Ref(_, ty_a, mutbl_a), TyKind::Ref(_, _, mutbl_b)) => {
+ coerce_mutbls(mutbl_a, mutbl_b)?;
+
+ let r_borrow = self.table.next_region_var();
+
+ // We don't allow two-phase borrows here, at least for initial
+ // implementation. If it happens that this coercion is a function argument,
+ // the reborrow in coerce_borrowed_ptr will pick it up.
+ // let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
+ let mutbl = mutbl_b.to_chalk(self.interner());
+
+ Some((
+ Adjustment {
+ kind: Adjust::Deref(None),
+ target: ty_a.to_chalk(self.interner()),
+ },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(
+ r_borrow.to_chalk(self.interner()),
+ mutbl,
+ )),
+ target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b)
+ .to_chalk(self.interner()),
+ },
+ ))
+ }
+ (TyKind::Ref(_, ty_a, mt_a), TyKind::RawPtr(_, mt_b)) => {
+ coerce_mutbls(mt_a, mt_b)?;
+
+ Some((
+ Adjustment {
+ kind: Adjust::Deref(None),
+ target: ty_a.to_chalk(self.interner()),
+ },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b.to_chalk(self.interner()))),
+ target: Ty::new_ptr(self.interner(), ty_a, mt_b).to_chalk(self.interner()),
+ },
+ ))
+ }
+ _ => None,
+ };
+ let coerce_source =
+ reborrow.as_ref().map_or(source, |(_, r)| r.target.to_nextsolver(self.interner()));
+
+ // Setup either a subtyping or a LUB relationship between
+ // the `CoerceUnsized` target type and the expected type.
+ // We only have the latter, so we use an inference variable
+ // for the former and let type inference do the rest.
+ let coerce_target = self.table.next_ty_var();
+
+ let mut coercion = self.unify_and(
+ coerce_target,
+ target,
+ reborrow.into_iter().flat_map(|(deref, autoref)| [deref, autoref]),
+ Adjust::Pointer(PointerCast::Unsize),
+ )?;
+
+ // Create an obligation for `Source: CoerceUnsized<Target>`.
+ let cause = self.cause.clone();
+
+ // Use a FIFO queue for this custom fulfillment procedure.
+ //
+ // A Vec (or SmallVec) is not a natural choice for a queue. However,
+ // this code path is hot, and this queue usually has a max length of 1
+ // and almost never more than 3. By using a SmallVec we avoid an
+ // allocation, at the (very small) cost of (occasionally) having to
+ // shift subsequent elements down when removing the front element.
+ let mut queue: SmallVec<[PredicateObligation<'db>; 4]> = smallvec![Obligation::new(
+ self.interner(),
+ cause,
+ self.table.param_env,
+ TraitRef::new(
+ self.interner(),
+ coerce_unsized_did.into(),
+ [coerce_source, coerce_target]
+ )
+ )];
+ // Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid
+ // emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where
+ // inference might unify those two inner type variables later.
+ let traits = [coerce_unsized_did, unsize_did];
+ while !queue.is_empty() {
+ let obligation = queue.remove(0);
+ let trait_pred = match obligation.predicate.kind().no_bound_vars() {
+ Some(PredicateKind::Clause(ClauseKind::Trait(trait_pred)))
+ if traits.contains(&trait_pred.def_id().0) =>
+ {
+ self.infer_ctxt().resolve_vars_if_possible(trait_pred)
+ }
+ // Eagerly process alias-relate obligations in new trait solver,
+ // since these can be emitted in the process of solving trait goals,
+ // but we need to constrain vars before processing goals mentioning
+ // them.
+ Some(PredicateKind::AliasRelate(..)) => {
+ let mut ocx = ObligationCtxt::new(self.infer_ctxt());
+ ocx.register_obligation(obligation);
+ if !ocx.select_where_possible().is_empty() {
+ return Err(TypeError::Mismatch);
+ }
+ coercion.obligations.extend(ocx.into_pending_obligations());
+ continue;
+ }
+ _ => {
+ coercion.obligations.push(obligation);
+ continue;
+ }
+ };
+ debug!("coerce_unsized resolve step: {:?}", trait_pred);
+ match self.infer_ctxt().select(&obligation.with(self.interner(), trait_pred)) {
+ // Uncertain or unimplemented.
+ Ok(None) => {
+ if trait_pred.def_id().0 == unsize_did {
+ let self_ty = trait_pred.self_ty();
+ let unsize_ty = trait_pred.trait_ref.args.inner()[1].expect_ty();
+ debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred);
+ match (self_ty.kind(), unsize_ty.kind()) {
+ (TyKind::Infer(rustc_type_ir::TyVar(v)), TyKind::Dynamic(..))
+ if self.table.type_var_is_sized(v) =>
+ {
+ debug!("coerce_unsized: have sized infer {:?}", v);
+ coercion.obligations.push(obligation);
+ // `$0: Unsize<dyn Trait>` where we know that `$0: Sized`, try going
+ // for unsizing.
+ }
+ _ => {
+ // Some other case for `$0: Unsize<Something>`. Note that we
+ // hit this case even if `Something` is a sized type, so just
+ // don't do the coercion.
+ debug!("coerce_unsized: ambiguous unsize");
+ return Err(TypeError::Mismatch);
+ }
+ }
+ } else {
+ debug!("coerce_unsized: early return - ambiguous");
+ if !coerce_source.references_non_lt_error()
+ && !coerce_target.references_non_lt_error()
+ {
+ // rustc always early-returns here, even when the types contains errors. However not bailing
+ // improves error recovery, and while we don't implement generic consts properly, it also helps
+ // correct code.
+ return Err(TypeError::Mismatch);
+ }
+ }
+ }
+ Err(SelectionError::Unimplemented) => {
+ debug!("coerce_unsized: early return - can't prove obligation");
+ return Err(TypeError::Mismatch);
+ }
+
+ Err(SelectionError::TraitDynIncompatible(_)) => {
+ // Dyn compatibility errors in coercion will *always* be due to the
+ // fact that the RHS of the coercion is a non-dyn compatible `dyn Trait`
+ // written in source somewhere (otherwise we will never have lowered
+ // the dyn trait from HIR to middle).
+ //
+ // There's no reason to emit yet another dyn compatibility error,
+ // especially since the span will differ slightly and thus not be
+ // deduplicated at all!
+ self.set_tainted_by_errors();
+ }
+ Err(_err) => {
+ // FIXME: Report an error:
+ // let guar = self.err_ctxt().report_selection_error(
+ // obligation.clone(),
+ // &obligation,
+ // &err,
+ // );
+ self.set_tainted_by_errors();
+ // Treat this like an obligation and follow through
+ // with the unsizing - the lack of a coercion should
+ // be silent, as it causes a type mismatch later.
+ }
+
+ Ok(Some(ImplSource::UserDefined(impl_source))) => {
+ queue.extend(impl_source.nested);
+ }
+ Ok(Some(impl_source)) => queue.extend(impl_source.nested_obligations()),
+ }
+ }
+
+ Ok(coercion)
}
- /// Attempts to coerce from the type of a non-capturing closure into a
- /// function pointer.
+ fn coerce_from_safe_fn(
+ &mut self,
+ fn_ty_a: PolyFnSig<'db>,
+ b: Ty<'db>,
+ adjustment: Option<Adjust>,
+ ) -> CoerceResult<'db> {
+ debug_assert!(self.table.shallow_resolve(b) == b);
+
+ self.commit_if_ok(|this| {
+ if let TyKind::FnPtr(_, hdr_b) = b.kind()
+ && fn_ty_a.safety().is_safe()
+ && !hdr_b.safety.is_safe()
+ {
+ let unsafe_a = Ty::safe_to_unsafe_fn_ty(this.interner(), fn_ty_a);
+ this.unify_and(
+ unsafe_a,
+ b,
+ adjustment.map(|kind| Adjustment {
+ kind,
+ target: Ty::new_fn_ptr(this.interner(), fn_ty_a).to_chalk(this.interner()),
+ }),
+ Adjust::Pointer(PointerCast::UnsafeFnPointer),
+ )
+ } else {
+ let a = Ty::new_fn_ptr(this.interner(), fn_ty_a);
+ match adjustment {
+ Some(adjust) => this.unify_and(a, b, [], adjust),
+ None => this.unify(a, b),
+ }
+ }
+ })
+ }
+
+ fn coerce_from_fn_pointer(&mut self, fn_ty_a: PolyFnSig<'db>, b: Ty<'db>) -> CoerceResult<'db> {
+ debug!(?fn_ty_a, ?b, "coerce_from_fn_pointer");
+ debug_assert!(self.table.shallow_resolve(b) == b);
+
+ self.coerce_from_safe_fn(fn_ty_a, b, None)
+ }
+
+ fn coerce_from_fn_item(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
+ debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b);
+ debug_assert!(self.table.shallow_resolve(a) == a);
+ debug_assert!(self.table.shallow_resolve(b) == b);
+
+ match b.kind() {
+ TyKind::FnPtr(_, b_hdr) => {
+ let a_sig = a.fn_sig(self.interner());
+ if let TyKind::FnDef(def_id, _) = a.kind() {
+ // Intrinsics are not coercible to function pointers
+ if let CallableDefId::FunctionId(def_id) = def_id.0 {
+ if FunctionSignature::is_intrinsic(self.table.db, def_id) {
+ return Err(TypeError::IntrinsicCast);
+ }
+
+ let attrs = self.table.db.attrs(def_id.into());
+ if attrs.by_key(sym::rustc_force_inline).exists() {
+ return Err(TypeError::ForceInlineCast);
+ }
+
+ if b_hdr.safety.is_safe() && attrs.by_key(sym::target_feature).exists() {
+ let fn_target_features =
+ TargetFeatures::from_attrs_no_implications(&attrs);
+ // Allow the coercion if the current function has all the features that would be
+ // needed to call the coercee safely.
+ let (target_features, target_feature_is_safe) =
+ (self.target_features)();
+ if target_feature_is_safe == TargetFeatureIsSafeInTarget::No
+ && !target_features.enabled.is_superset(&fn_target_features.enabled)
+ {
+ return Err(TypeError::TargetFeatureCast(
+ CallableIdWrapper(def_id.into()).into(),
+ ));
+ }
+ }
+ }
+ }
+
+ self.coerce_from_safe_fn(
+ a_sig,
+ b,
+ Some(Adjust::Pointer(PointerCast::ReifyFnPointer)),
+ )
+ }
+ _ => self.unify(a, b),
+ }
+ }
+
+ /// Attempts to coerce from the type of a non-capturing closure
+ /// into a function pointer.
fn coerce_closure_to_fn(
&mut self,
- from_ty: Ty,
- from_substs: &Substitution,
- to_ty: &Ty,
+ a: Ty<'db>,
+ _closure_def_id_a: InternedClosureId,
+ args_a: GenericArgs<'db>,
+ b: Ty<'db>,
) -> CoerceResult<'db> {
- match to_ty.kind(Interner) {
- // if from_substs is non-capturing (FIXME)
- TyKind::Function(fn_ty) => {
+ debug_assert!(self.table.shallow_resolve(a) == a);
+ debug_assert!(self.table.shallow_resolve(b) == b);
+
+ match b.kind() {
+ // FIXME: We need to have an `upvars_mentioned()` query:
+ // At this point we haven't done capture analysis, which means
+ // that the ClosureArgs just contains an inference variable instead
+ // of tuple of captured types.
+ //
+ // All we care here is if any variable is being captured and not the exact paths,
+ // so we check `upvars_mentioned` for root variables being captured.
+ TyKind::FnPtr(_, hdr) =>
+ // if self
+ // .db
+ // .upvars_mentioned(closure_def_id_a.expect_local())
+ // .is_none_or(|u| u.is_empty()) =>
+ {
// We coerce the closure, which has fn type
// `extern "rust-call" fn((arg0,arg1,...)) -> _`
// to
// `fn(arg0,arg1,...) -> _`
// or
// `unsafe fn(arg0,arg1,...) -> _`
- let safety = fn_ty.sig.safety;
- let pointer_ty = coerce_closure_fn_ty(from_substs, safety);
+ let safety = hdr.safety;
+ let closure_sig = args_a.closure_sig_untupled().map_bound(|mut sig| {
+ sig.safety = hdr.safety;
+ sig
+ });
+ let pointer_ty = Ty::new_fn_ptr(self.interner(), closure_sig);
+ debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})", a, b, pointer_ty);
self.unify_and(
- &pointer_ty,
- to_ty,
- simple(Adjust::Pointer(PointerCast::ClosureFnPointer(safety))),
+ pointer_ty,
+ b,
+ [],
+ Adjust::Pointer(PointerCast::ClosureFnPointer(
+ safety.to_chalk(self.interner()),
+ )),
)
}
- _ => self.unify_and(&from_ty, to_ty, identity),
+ _ => self.unify(a, b),
}
}
- /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>`
+ fn coerce_raw_ptr(&mut self, a: Ty<'db>, b: Ty<'db>, mutbl_b: Mutability) -> CoerceResult<'db> {
+ debug!("coerce_raw_ptr(a={:?}, b={:?})", a, b);
+ debug_assert!(self.table.shallow_resolve(a) == a);
+ debug_assert!(self.table.shallow_resolve(b) == b);
+
+ let (is_ref, mt_a) = match a.kind() {
+ TyKind::Ref(_, ty, mutbl) => (true, TypeAndMut::<DbInterner<'db>> { ty, mutbl }),
+ TyKind::RawPtr(ty, mutbl) => (false, TypeAndMut { ty, mutbl }),
+ _ => return self.unify(a, b),
+ };
+ coerce_mutbls(mt_a.mutbl, mutbl_b)?;
+
+ // Check that the types which they point at are compatible.
+ let a_raw = Ty::new_ptr(self.interner(), mt_a.ty, mutbl_b);
+ // Although references and raw ptrs have the same
+ // representation, we still register an Adjust::DerefRef so that
+ // regionck knows that the region for `a` must be valid here.
+ if is_ref {
+ self.unify_and(
+ a_raw,
+ b,
+ [Adjustment {
+ kind: Adjust::Deref(None),
+ target: mt_a.ty.to_chalk(self.interner()),
+ }],
+ Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b.to_chalk(self.interner()))),
+ )
+ } else if mt_a.mutbl != mutbl_b {
+ self.unify_and(a_raw, b, [], Adjust::Pointer(PointerCast::MutToConstPointer))
+ } else {
+ self.unify(a_raw, b)
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub(crate) enum CoerceNever {
+ No,
+ Yes,
+}
+
+impl<'db> InferenceContext<'db> {
+ /// Attempt to coerce an expression to a type, and return the
+ /// adjusted type of the expression, if successful.
+ /// Adjustments are only recorded if the coercion succeeded.
+ /// The expressions *must not* have any preexisting adjustments.
+ pub(crate) fn coerce(
+ &mut self,
+ expr: ExprOrPatId,
+ expr_ty: Ty<'db>,
+ mut target: Ty<'db>,
+ allow_two_phase: AllowTwoPhase,
+ coerce_never: CoerceNever,
+ ) -> RelateResult<'db, Ty<'db>> {
+ let source = self.table.try_structurally_resolve_type(expr_ty);
+ target = self.table.try_structurally_resolve_type(target);
+ debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target);
+
+ let cause = ObligationCause::new();
+ let krate = self.krate();
+ let mut coerce = Coerce {
+ table: &mut self.table,
+ has_errors: &mut self.result.has_errors,
+ cause,
+ allow_two_phase,
+ coerce_never: matches!(coerce_never, CoerceNever::Yes),
+ use_lub: false,
+ target_features: &mut || {
+ Self::target_features(self.db, &self.target_features, self.owner, krate)
+ },
+ };
+ let ok = coerce.commit_if_ok(|coerce| coerce.coerce(source, target))?;
+
+ let (adjustments, _) = self.table.register_infer_ok(ok);
+ match expr {
+ ExprOrPatId::ExprId(expr) => self.write_expr_adj(expr, adjustments.into_boxed_slice()),
+ ExprOrPatId::PatId(pat) => self
+ .write_pat_adj(pat, adjustments.into_iter().map(|adjust| adjust.target).collect()),
+ }
+ Ok(target)
+ }
+
+ /// Given some expressions, their known unified type and another expression,
+ /// tries to unify the types, potentially inserting coercions on any of the
+ /// provided expressions and returns their LUB (aka "common supertype").
///
- /// See: <https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html>
- fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> CoerceResult<'db> {
- // These 'if' statements require some explanation.
- // The `CoerceUnsized` trait is special - it is only
- // possible to write `impl CoerceUnsized<B> for A` where
- // A and B have 'matching' fields. This rules out the following
- // two types of blanket impls:
- //
- // `impl<T> CoerceUnsized<T> for SomeType`
- // `impl<T> CoerceUnsized<SomeType> for T`
- //
- // Both of these trigger a special `CoerceUnsized`-related error (E0376)
- //
- // We can take advantage of this fact to avoid performing unnecessary work.
- // If either `source` or `target` is a type variable, then any applicable impl
- // would need to be generic over the self-type (`impl<T> CoerceUnsized<SomeType> for T`)
- // or generic over the `CoerceUnsized` type parameter (`impl<T> CoerceUnsized<T> for
- // SomeType`).
- //
- // However, these are exactly the kinds of impls which are forbidden by
- // the compiler! Therefore, we can be sure that coercion will always fail
- // when either the source or target type is a type variable. This allows us
- // to skip performing any trait selection, and immediately bail out.
- if from_ty.is_ty_var() {
- return Err(TypeError);
- }
- if to_ty.is_ty_var() {
- return Err(TypeError);
+ /// This is really an internal helper. From outside the coercion
+ /// module, you should instantiate a `CoerceMany` instance.
+ fn try_find_coercion_lub(
+ &mut self,
+ exprs: &[ExprId],
+ prev_ty: Ty<'db>,
+ new: ExprId,
+ new_ty: Ty<'db>,
+ ) -> RelateResult<'db, Ty<'db>> {
+ let prev_ty = self.table.try_structurally_resolve_type(prev_ty);
+ let new_ty = self.table.try_structurally_resolve_type(new_ty);
+ debug!(
+ "coercion::try_find_coercion_lub({:?}, {:?}, exprs={:?} exprs)",
+ prev_ty,
+ new_ty,
+ exprs.len()
+ );
+
+ // The following check fixes #88097, where the compiler erroneously
+ // attempted to coerce a closure type to itself via a function pointer.
+ if prev_ty == new_ty {
+ return Ok(prev_ty);
}
- // Handle reborrows before trying to solve `Source: CoerceUnsized<Target>`.
- let reborrow = match (from_ty.kind(Interner), to_ty.kind(Interner)) {
- (TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
- coerce_mutabilities(*from_mt, to_mt)?;
+ let is_force_inline = |ty: Ty<'db>| {
+ if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(did)), _) = ty.kind() {
+ self.db.attrs(did.into()).by_key(sym::rustc_force_inline).exists()
+ } else {
+ false
+ }
+ };
+ if is_force_inline(prev_ty) || is_force_inline(new_ty) {
+ return Err(TypeError::ForceInlineCast);
+ }
- let lt = self.new_lifetime_var();
- Some((
- Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
- Adjustment {
- kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), to_mt)),
- target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner),
+ // Special-case that coercion alone cannot handle:
+ // Function items or non-capturing closures of differing IDs or GenericArgs.
+ let (a_sig, b_sig) = {
+ let is_capturing_closure = |_ty: Ty<'db>| {
+ // FIXME:
+ // if let TyKind::Closure(closure_def_id, _args) = ty.kind() {
+ // self.db.upvars_mentioned(closure_def_id.expect_local()).is_some()
+ // } else {
+ // false
+ // }
+ false
+ };
+ if is_capturing_closure(prev_ty) || is_capturing_closure(new_ty) {
+ (None, None)
+ } else {
+ match (prev_ty.kind(), new_ty.kind()) {
+ (TyKind::FnDef(..), TyKind::FnDef(..)) => {
+ // Don't reify if the function types have a LUB, i.e., they
+ // are the same function and their parameters have a LUB.
+ match self.table.commit_if_ok(|table| {
+ // We need to eagerly handle nested obligations due to lazy norm.
+ let mut ocx = ObligationCtxt::new(&table.infer_ctxt);
+ let value =
+ ocx.lub(&ObligationCause::new(), table.param_env, prev_ty, new_ty)?;
+ if ocx.select_where_possible().is_empty() {
+ Ok(InferOk { value, obligations: ocx.into_pending_obligations() })
+ } else {
+ Err(TypeError::Mismatch)
+ }
+ }) {
+ // We have a LUB of prev_ty and new_ty, just return it.
+ Ok(ok) => return Ok(self.table.register_infer_ok(ok)),
+ Err(_) => (
+ Some(prev_ty.fn_sig(self.table.interner)),
+ Some(new_ty.fn_sig(self.table.interner)),
+ ),
+ }
+ }
+ (TyKind::Closure(_, args), TyKind::FnDef(..)) => {
+ let b_sig = new_ty.fn_sig(self.table.interner);
+ let a_sig = args.closure_sig_untupled().map_bound(|mut sig| {
+ sig.safety = b_sig.safety();
+ sig
+ });
+ (Some(a_sig), Some(b_sig))
+ }
+ (TyKind::FnDef(..), TyKind::Closure(_, args)) => {
+ let a_sig = prev_ty.fn_sig(self.table.interner);
+ let b_sig = args.closure_sig_untupled().map_bound(|mut sig| {
+ sig.safety = a_sig.safety();
+ sig
+ });
+ (Some(a_sig), Some(b_sig))
+ }
+ (TyKind::Closure(_, args_a), TyKind::Closure(_, args_b)) => {
+ (Some(args_a.closure_sig_untupled()), Some(args_b.closure_sig_untupled()))
+ }
+ _ => (None, None),
+ }
+ }
+ };
+ if let (Some(a_sig), Some(b_sig)) = (a_sig, b_sig) {
+ // The signature must match.
+ let sig = self
+ .table
+ .infer_ctxt
+ .at(&ObligationCause::new(), self.table.param_env)
+ .lub(a_sig, b_sig)
+ .map(|ok| self.table.register_infer_ok(ok))?;
+
+ // Reify both sides and return the reified fn pointer type.
+ let fn_ptr = Ty::new_fn_ptr(self.table.interner, sig);
+ let prev_adjustment = match prev_ty.kind() {
+ TyKind::Closure(..) => Adjust::Pointer(PointerCast::ClosureFnPointer(
+ a_sig.safety().to_chalk(self.table.interner),
+ )),
+ TyKind::FnDef(..) => Adjust::Pointer(PointerCast::ReifyFnPointer),
+ _ => panic!("should not try to coerce a {prev_ty:?} to a fn pointer"),
+ };
+ let next_adjustment = match new_ty.kind() {
+ TyKind::Closure(..) => Adjust::Pointer(PointerCast::ClosureFnPointer(
+ b_sig.safety().to_chalk(self.table.interner),
+ )),
+ TyKind::FnDef(..) => Adjust::Pointer(PointerCast::ReifyFnPointer),
+ _ => panic!("should not try to coerce a {new_ty:?} to a fn pointer"),
+ };
+ for &expr in exprs {
+ self.write_expr_adj(
+ expr,
+ Box::new([Adjustment {
+ kind: prev_adjustment.clone(),
+ target: fn_ptr.to_chalk(self.table.interner),
+ }]),
+ );
+ }
+ self.write_expr_adj(
+ new,
+ Box::new([Adjustment {
+ kind: next_adjustment,
+ target: fn_ptr.to_chalk(self.table.interner),
+ }]),
+ );
+ return Ok(fn_ptr);
+ }
+
+ // Configure a Coerce instance to compute the LUB.
+ // We don't allow two-phase borrows on any autorefs this creates since we
+ // probably aren't processing function arguments here and even if we were,
+ // they're going to get autorefed again anyway and we can apply 2-phase borrows
+ // at that time.
+ //
+ // NOTE: we set `coerce_never` to `true` here because coercion LUBs only
+ // operate on values and not places, so a never coercion is valid.
+ let krate = self.krate();
+ let mut coerce = Coerce {
+ table: &mut self.table,
+ has_errors: &mut self.result.has_errors,
+ cause: ObligationCause::new(),
+ allow_two_phase: AllowTwoPhase::No,
+ coerce_never: true,
+ use_lub: true,
+ target_features: &mut || {
+ Self::target_features(self.db, &self.target_features, self.owner, krate)
+ },
+ };
+
+ // First try to coerce the new expression to the type of the previous ones,
+ // but only if the new expression has no coercion already applied to it.
+ let mut first_error = None;
+ if !self.result.expr_adjustments.contains_key(&new) {
+ let result = coerce.commit_if_ok(|coerce| coerce.coerce(new_ty, prev_ty));
+ match result {
+ Ok(ok) => {
+ let (adjustments, target) = self.table.register_infer_ok(ok);
+ self.write_expr_adj(new, adjustments.into_boxed_slice());
+ debug!(
+ "coercion::try_find_coercion_lub: was able to coerce from new type {:?} to previous type {:?} ({:?})",
+ new_ty, prev_ty, target
+ );
+ return Ok(target);
+ }
+ Err(e) => first_error = Some(e),
+ }
+ }
+
+ match coerce.commit_if_ok(|coerce| coerce.coerce(prev_ty, new_ty)) {
+ Err(_) => {
+ // Avoid giving strange errors on failed attempts.
+ if let Some(e) = first_error {
+ Err(e)
+ } else {
+ Err(self
+ .table
+ .commit_if_ok(|table| {
+ table
+ .infer_ctxt
+ .at(&ObligationCause::new(), table.param_env)
+ .lub(prev_ty, new_ty)
+ })
+ .unwrap_err())
+ }
+ }
+ Ok(ok) => {
+ let (adjustments, target) = self.table.register_infer_ok(ok);
+ for &expr in exprs {
+ self.write_expr_adj(expr, adjustments.as_slice().into());
+ }
+ debug!(
+ "coercion::try_find_coercion_lub: was able to coerce previous type {:?} to new type {:?} ({:?})",
+ prev_ty, new_ty, target
+ );
+ Ok(target)
+ }
+ }
+ }
+}
+
+/// CoerceMany encapsulates the pattern you should use when you have
+/// many expressions that are all getting coerced to a common
+/// type. This arises, for example, when you have a match (the result
+/// of each arm is coerced to a common type). It also arises in less
+/// obvious places, such as when you have many `break foo` expressions
+/// that target the same loop, or the various `return` expressions in
+/// a function.
+///
+/// The basic protocol is as follows:
+///
+/// - Instantiate the `CoerceMany` with an initial `expected_ty`.
+/// This will also serve as the "starting LUB". The expectation is
+/// that this type is something which all of the expressions *must*
+/// be coercible to. Use a fresh type variable if needed.
+/// - For each expression whose result is to be coerced, invoke `coerce()` with.
+/// - In some cases we wish to coerce "non-expressions" whose types are implicitly
+/// unit. This happens for example if you have a `break` with no expression,
+/// or an `if` with no `else`. In that case, invoke `coerce_forced_unit()`.
+/// - `coerce()` and `coerce_forced_unit()` may report errors. They hide this
+/// from you so that you don't have to worry your pretty head about it.
+/// But if an error is reported, the final type will be `err`.
+/// - Invoking `coerce()` may cause us to go and adjust the "adjustments" on
+/// previously coerced expressions.
+/// - When all done, invoke `complete()`. This will return the LUB of
+/// all your expressions.
+/// - WARNING: I don't believe this final type is guaranteed to be
+/// related to your initial `expected_ty` in any particular way,
+/// although it will typically be a subtype, so you should check it.
+/// - Invoking `complete()` may cause us to go and adjust the "adjustments" on
+/// previously coerced expressions.
+///
+/// Example:
+///
+/// ```ignore (illustrative)
+/// let mut coerce = CoerceMany::new(expected_ty);
+/// for expr in exprs {
+/// let expr_ty = fcx.check_expr_with_expectation(expr, expected);
+/// coerce.coerce(fcx, &cause, expr, expr_ty);
+/// }
+/// let final_ty = coerce.complete(fcx);
+/// ```
+#[derive(Debug, Clone)]
+pub(crate) struct CoerceMany<'db, 'exprs> {
+ expected_ty: Ty<'db>,
+ final_ty: Option<Ty<'db>>,
+ expressions: Expressions<'exprs>,
+ pushed: usize,
+}
+
+/// The type of a `CoerceMany` that is storing up the expressions into
+/// a buffer. We use this for things like `break`.
+pub(crate) type DynamicCoerceMany<'db> = CoerceMany<'db, 'db>;
+
+#[derive(Debug, Clone)]
+enum Expressions<'exprs> {
+ Dynamic(SmallVec<[ExprId; 4]>),
+ UpFront(&'exprs [ExprId]),
+}
+
+impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
+ /// The usual case; collect the set of expressions dynamically.
+ /// If the full set of coercion sites is known before hand,
+ /// consider `with_coercion_sites()` instead to avoid allocation.
+ pub(crate) fn new(expected_ty: Ty<'db>) -> Self {
+ Self::make(expected_ty, Expressions::Dynamic(SmallVec::new()))
+ }
+
+ /// As an optimization, you can create a `CoerceMany` with a
+ /// preexisting slice of expressions. In this case, you are
+ /// expected to pass each element in the slice to `coerce(...)` in
+ /// order. This is used with arrays in particular to avoid
+ /// needlessly cloning the slice.
+ pub(crate) fn with_coercion_sites(
+ expected_ty: Ty<'db>,
+ coercion_sites: &'exprs [ExprId],
+ ) -> Self {
+ Self::make(expected_ty, Expressions::UpFront(coercion_sites))
+ }
+
+ fn make(expected_ty: Ty<'db>, expressions: Expressions<'exprs>) -> Self {
+ CoerceMany { expected_ty, final_ty: None, expressions, pushed: 0 }
+ }
+
+ /// Returns the "expected type" with which this coercion was
+ /// constructed. This represents the "downward propagated" type
+ /// that was given to us at the start of typing whatever construct
+ /// we are typing (e.g., the match expression).
+ ///
+ /// Typically, this is used as the expected type when
+ /// type-checking each of the alternative expressions whose types
+ /// we are trying to merge.
+ pub(crate) fn expected_ty(&self) -> Ty<'db> {
+ self.expected_ty
+ }
+
+ /// Returns the current "merged type", representing our best-guess
+ /// at the LUB of the expressions we've seen so far (if any). This
+ /// isn't *final* until you call `self.complete()`, which will return
+ /// the merged type.
+ pub(crate) fn merged_ty(&self) -> Ty<'db> {
+ self.final_ty.unwrap_or(self.expected_ty)
+ }
+
+ /// Indicates that the value generated by `expression`, which is
+ /// of type `expression_ty`, is one of the possibilities that we
+ /// could coerce from. This will record `expression`, and later
+ /// calls to `coerce` may come back and add adjustments and things
+ /// if necessary.
+ pub(crate) fn coerce(
+ &mut self,
+ icx: &mut InferenceContext<'db>,
+ cause: &ObligationCause,
+ expression: ExprId,
+ expression_ty: Ty<'db>,
+ ) {
+ self.coerce_inner(icx, cause, expression, expression_ty, false, false)
+ }
+
+ /// Indicates that one of the inputs is a "forced unit". This
+ /// occurs in a case like `if foo { ... };`, where the missing else
+ /// generates a "forced unit". Another example is a `loop { break;
+ /// }`, where the `break` has no argument expression. We treat
+ /// these cases slightly differently for error-reporting
+ /// purposes. Note that these tend to correspond to cases where
+ /// the `()` expression is implicit in the source, and hence we do
+ /// not take an expression argument.
+ ///
+ /// The `augment_error` gives you a chance to extend the error
+ /// message, in case any results (e.g., we use this to suggest
+ /// removing a `;`).
+ pub(crate) fn coerce_forced_unit(
+ &mut self,
+ icx: &mut InferenceContext<'db>,
+ expr: ExprId,
+ cause: &ObligationCause,
+ label_unit_as_expected: bool,
+ ) {
+ self.coerce_inner(
+ icx,
+ cause,
+ expr,
+ icx.result.standard_types.unit.to_nextsolver(icx.table.interner),
+ true,
+ label_unit_as_expected,
+ )
+ }
+
+ /// The inner coercion "engine". If `expression` is `None`, this
+ /// is a forced-unit case, and hence `expression_ty` must be
+ /// `Nil`.
+ pub(crate) fn coerce_inner(
+ &mut self,
+ icx: &mut InferenceContext<'db>,
+ cause: &ObligationCause,
+ expression: ExprId,
+ mut expression_ty: Ty<'db>,
+ force_unit: bool,
+ label_expression_as_expected: bool,
+ ) {
+ // Incorporate whatever type inference information we have
+ // until now; in principle we might also want to process
+ // pending obligations, but doing so should only improve
+ // compatibility (hopefully that is true) by helping us
+ // uncover never types better.
+ if expression_ty.is_ty_var() {
+ expression_ty = icx.shallow_resolve(expression_ty);
+ }
+
+ let (expected, found) = if label_expression_as_expected {
+ // In the case where this is a "forced unit", like
+ // `break`, we want to call the `()` "expected"
+ // since it is implied by the syntax.
+ // (Note: not all force-units work this way.)"
+ (expression_ty, self.merged_ty())
+ } else {
+ // Otherwise, the "expected" type for error
+ // reporting is the current unification type,
+ // which is basically the LUB of the expressions
+ // we've seen so far (combined with the expected
+ // type)
+ (self.merged_ty(), expression_ty)
+ };
+
+ // Handle the actual type unification etc.
+ let result = if !force_unit {
+ if self.pushed == 0 {
+ // Special-case the first expression we are coercing.
+ // To be honest, I'm not entirely sure why we do this.
+ // We don't allow two-phase borrows, see comment in try_find_coercion_lub for why
+ icx.coerce(
+ expression.into(),
+ expression_ty,
+ self.expected_ty,
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ )
+ } else {
+ match self.expressions {
+ Expressions::Dynamic(ref exprs) => icx.try_find_coercion_lub(
+ exprs,
+ self.merged_ty(),
+ expression,
+ expression_ty,
+ ),
+ Expressions::UpFront(coercion_sites) => icx.try_find_coercion_lub(
+ &coercion_sites[0..self.pushed],
+ self.merged_ty(),
+ expression,
+ expression_ty,
+ ),
+ }
+ }
+ } else {
+ // this is a hack for cases where we default to `()` because
+ // the expression etc has been omitted from the source. An
+ // example is an `if let` without an else:
+ //
+ // if let Some(x) = ... { }
+ //
+ // we wind up with a second match arm that is like `_ =>
+ // ()`. That is the case we are considering here. We take
+ // a different path to get the right "expected, found"
+ // message and so forth (and because we know that
+ // `expression_ty` will be unit).
+ //
+ // Another example is `break` with no argument expression.
+ assert!(expression_ty.is_unit(), "if let hack without unit type");
+ icx.table
+ .infer_ctxt
+ .at(cause, icx.table.param_env)
+ .eq(
+ // needed for tests/ui/type-alias-impl-trait/issue-65679-inst-opaque-ty-from-val-twice.rs
+ DefineOpaqueTypes::Yes,
+ expected,
+ found,
+ )
+ .map(|infer_ok| {
+ icx.table.register_infer_ok(infer_ok);
+ expression_ty
+ })
+ };
+
+ debug!(?result);
+ match result {
+ Ok(v) => {
+ self.final_ty = Some(v);
+ match self.expressions {
+ Expressions::Dynamic(ref mut buffer) => buffer.push(expression),
+ Expressions::UpFront(coercion_sites) => {
+ // if the user gave us an array to validate, check that we got
+ // the next expression in the list, as expected
+ assert_eq!(coercion_sites[self.pushed], expression);
+ }
+ }
+ }
+ Err(_coercion_error) => {
+ // Mark that we've failed to coerce the types here to suppress
+ // any superfluous errors we might encounter while trying to
+ // emit or provide suggestions on how to fix the initial error.
+ icx.set_tainted_by_errors();
+
+ self.final_ty = Some(Ty::new_error(icx.table.interner, ErrorGuaranteed));
+
+ icx.result.type_mismatches.insert(
+ expression.into(),
+ if label_expression_as_expected {
+ TypeMismatch {
+ expected: found.to_chalk(icx.table.interner),
+ actual: expected.to_chalk(icx.table.interner),
+ }
+ } else {
+ TypeMismatch {
+ expected: expected.to_chalk(icx.table.interner),
+ actual: found.to_chalk(icx.table.interner),
+ }
},
- ))
+ );
}
- (TyKind::Ref(from_mt, _, from_inner), &TyKind::Raw(to_mt, _)) => {
- coerce_mutabilities(*from_mt, to_mt)?;
+ }
- Some((
- Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
- Adjustment {
- kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)),
- target: TyKind::Raw(to_mt, from_inner.clone()).intern(Interner),
- },
- ))
- }
- _ => None,
- };
- let coerce_from =
- reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone());
+ self.pushed += 1;
+ }
- let krate = self.trait_env.krate;
- let coerce_unsized_trait = match LangItem::CoerceUnsized.resolve_trait(self.db, krate) {
- Some(trait_) => trait_,
- _ => return Err(TypeError),
- };
-
- let coerce_unsized_tref = {
- let b = TyBuilder::trait_ref(self.db, coerce_unsized_trait);
- if b.remaining() != 2 {
- // The CoerceUnsized trait should have two generic params: Self and T.
- return Err(TypeError);
- }
- b.push(coerce_from).push(to_ty.clone()).build()
- };
-
- let goal: Goal = coerce_unsized_tref.cast(Interner);
-
- self.commit_if_ok(|table| match table.solve_obligation(goal) {
- Ok(Certainty::Yes) => Ok(()),
- _ => Err(TypeError),
- })?;
-
- let unsize =
- Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target: to_ty.clone() };
- let adjustments = match reborrow {
- None => vec![unsize],
- Some((deref, autoref)) => vec![deref, autoref, unsize],
- };
- success(adjustments, to_ty.clone(), vec![])
+ pub(crate) fn complete(self, icx: &mut InferenceContext<'db>) -> Ty<'db> {
+ if let Some(final_ty) = self.final_ty {
+ final_ty
+ } else {
+ // If we only had inputs that were of type `!` (or no
+ // inputs at all), then the final type is `!`.
+ assert_eq!(self.pushed, 0);
+ icx.result.standard_types.never.to_nextsolver(icx.table.interner)
+ }
}
}
-fn coerce_closure_fn_ty(closure_substs: &Substitution, safety: chalk_ir::Safety) -> Ty {
- let closure_sig = ClosureSubst(closure_substs).sig_ty().clone();
- match closure_sig.kind(Interner) {
- TyKind::Function(fn_ty) => TyKind::Function(FnPointer {
- num_binders: fn_ty.num_binders,
- sig: FnSig { safety, abi: FnAbi::Rust, variadic: fn_ty.sig.variadic },
- substitution: fn_ty.substitution.clone(),
- })
- .intern(Interner),
- _ => TyKind::Error.intern(Interner),
- }
+pub fn could_coerce(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &crate::Canonical<(crate::Ty, crate::Ty)>,
+) -> bool {
+ coerce(db, env, tys).is_ok()
}
-fn safe_to_unsafe_fn_ty(fn_ty: FnPointer) -> FnPointer {
- FnPointer {
- num_binders: fn_ty.num_binders,
- sig: FnSig { safety: chalk_ir::Safety::Unsafe, ..fn_ty.sig },
- substitution: fn_ty.substitution,
- }
-}
+fn coerce<'db>(
+ db: &'db dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &crate::Canonical<(crate::Ty, crate::Ty)>,
+) -> Result<(Vec<Adjustment>, crate::Ty), TypeError<DbInterner<'db>>> {
+ let mut table = InferenceTable::new(db, env);
+ let vars = table.fresh_subst(tys.binders.as_slice(Interner));
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
-fn coerce_mutabilities(from: Mutability, to: Mutability) -> Result<(), TypeError> {
- match (from, to) {
- (Mutability::Mut, Mutability::Mut | Mutability::Not)
- | (Mutability::Not, Mutability::Not) => Ok(()),
- (Mutability::Not, Mutability::Mut) => Err(TypeError),
- }
-}
+ let cause = ObligationCause::new();
+ // FIXME: Target features.
+ let target_features = TargetFeatures::default();
+ let mut coerce = Coerce {
+ table: &mut table,
+ has_errors: &mut false,
+ cause,
+ allow_two_phase: AllowTwoPhase::No,
+ coerce_never: true,
+ use_lub: false,
+ target_features: &mut || (&target_features, TargetFeatureIsSafeInTarget::No),
+ };
+ let InferOk { value: (adjustments, ty), obligations } = coerce.coerce(
+ ty1_with_vars.to_nextsolver(coerce.table.interner),
+ ty2_with_vars.to_nextsolver(coerce.table.interner),
+ )?;
+ table.register_predicates(obligations);
-pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec<Adjustment> {
- let steps = autoderef.steps();
- let targets =
- steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty()));
- steps
- .iter()
- .map(|(kind, _source)| match kind {
- // We do not know what kind of deref we require at this point yet
- AutoderefKind::Overloaded => Some(OverloadedDeref(None)),
- AutoderefKind::Builtin => None,
- })
- .zip(targets)
- .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target })
- .collect()
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ let find_var = |iv| {
+ vars.iter(Interner).position(|v| match v.interned() {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ } == Some(iv))
+ };
+ let fallback = |iv, kind, default, binder| match kind {
+ chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
+ .map_or(default, |i| crate::BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Lifetime => find_var(iv).map_or(default, |i| {
+ crate::BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)
+ }),
+ chalk_ir::VariableKind::Const(ty) => find_var(iv).map_or(default, |i| {
+ crate::BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)
+ }),
+ };
+ // FIXME also map the types in the adjustments
+ Ok((adjustments, table.resolve_with_fallback(ty.to_chalk(table.interner), &fallback)))
}
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 0a58ea1..c5a51df 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -1,12 +1,10 @@
//! Type inference for expressions.
-use std::{
- iter::{repeat, repeat_with},
- mem,
-};
+use std::{iter::repeat_with, mem};
use chalk_ir::{DebruijnIndex, Mutability, TyVariableKind, cast::Cast};
use either::Either;
+use hir_def::hir::ClosureKind;
use hir_def::{
BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId,
expr_store::path::{GenericArg, GenericArgs, Path},
@@ -19,19 +17,23 @@
};
use hir_expand::name::Name;
use intern::sym;
+use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
use stdx::always;
use syntax::ast::RangeOp;
+use tracing::debug;
+use crate::autoderef::overloaded_deref_ty;
+use crate::next_solver::ErrorGuaranteed;
+use crate::next_solver::infer::DefineOpaqueTypes;
+use crate::next_solver::obligation_ctxt::ObligationCtxt;
use crate::{
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, DeclContext,
DeclOrigin, IncorrectGenericsLenKind, Interner, LifetimeElisionKind, Rawness, Scalar,
- Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
- autoderef::{Autoderef, builtin_deref, deref_by_trait},
- consteval,
+ Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, consteval,
generics::generics,
infer::{
- BreakableKind,
- coerce::{CoerceMany, CoerceNever, CoercionCause},
+ AllowTwoPhase, BreakableKind,
+ coerce::{CoerceMany, CoerceNever},
find_continuable,
pat::contains_explicit_ref_binding,
},
@@ -42,7 +44,10 @@
},
mapping::{ToChalk, from_chalk},
method_resolution::{self, VisibleFromModule},
- next_solver::mapping::ChalkToNextSolver,
+ next_solver::{
+ infer::traits::ObligationCause,
+ mapping::{ChalkToNextSolver, NextSolverToChalk},
+ },
primitive::{self, UintTy},
static_lifetime, to_chalk_trait_id,
traits::FnTrait,
@@ -50,7 +55,7 @@
use super::{
BreakableContext, Diverges, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
- cast::CastCheck, coerce::auto_deref_adjust_steps, find_breakable,
+ cast::CastCheck, find_breakable,
};
#[derive(Clone, Copy, PartialEq, Eq)]
@@ -59,7 +64,7 @@
No,
}
-impl InferenceContext<'_> {
+impl<'db> InferenceContext<'db> {
pub(crate) fn infer_expr(
&mut self,
tgt_expr: ExprId,
@@ -98,8 +103,14 @@
} else {
CoerceNever::No
};
- match self.coerce(Some(expr), &ty, &target, coerce_never) {
- Ok(res) => res,
+ match self.coerce(
+ expr.into(),
+ ty.to_nextsolver(self.table.interner),
+ target.to_nextsolver(self.table.interner),
+ AllowTwoPhase::No,
+ coerce_never,
+ ) {
+ Ok(res) => res.to_chalk(self.table.interner),
Err(_) => {
self.result.type_mismatches.insert(
expr.into(),
@@ -260,8 +271,15 @@
}
if let Some(target) = expected.only_has_type(&mut self.table) {
- self.coerce(Some(expr), &ty, &target, CoerceNever::Yes)
- .expect("never-to-any coercion should always succeed")
+ self.coerce(
+ expr.into(),
+ ty.to_nextsolver(self.table.interner),
+ target.to_nextsolver(self.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ )
+ .expect("never-to-any coercion should always succeed")
+ .to_chalk(self.table.interner)
} else {
ty
}
@@ -304,27 +322,41 @@
let then_ty = self.infer_expr_inner(then_branch, expected, ExprIsRead::Yes);
let then_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
- let mut coerce = CoerceMany::new(expected.coercion_target_type(&mut self.table));
- coerce.coerce(self, Some(then_branch), &then_ty, CoercionCause::Expr(then_branch));
+ let mut coercion_sites = [then_branch, tgt_expr];
+ if let Some(else_branch) = else_branch {
+ coercion_sites[1] = else_branch;
+ }
+ let mut coerce = CoerceMany::with_coercion_sites(
+ expected
+ .coercion_target_type(&mut self.table)
+ .to_nextsolver(self.table.interner),
+ &coercion_sites,
+ );
+ coerce.coerce(
+ self,
+ &ObligationCause::new(),
+ then_branch,
+ then_ty.to_nextsolver(self.table.interner),
+ );
match else_branch {
Some(else_branch) => {
let else_ty = self.infer_expr_inner(else_branch, expected, ExprIsRead::Yes);
let else_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
coerce.coerce(
self,
- Some(else_branch),
- &else_ty,
- CoercionCause::Expr(else_branch),
+ &ObligationCause::new(),
+ else_branch,
+ else_ty.to_nextsolver(self.table.interner),
);
self.diverges = condition_diverges | then_diverges & else_diverges;
}
None => {
- coerce.coerce_forced_unit(self, CoercionCause::Expr(tgt_expr));
+ coerce.coerce_forced_unit(self, tgt_expr, &ObligationCause::new(), true);
self.diverges = condition_diverges;
}
}
- coerce.complete(self)
+ coerce.complete(self).to_chalk(self.table.interner)
}
&Expr::Let { pat, expr } => {
let child_is_read = if self.pat_guaranteed_to_constitute_read_for_never(pat) {
@@ -377,7 +409,15 @@
}
}
Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => self
- .infer_closure(body, args, ret_type, arg_types, *closure_kind, tgt_expr, expected),
+ .infer_closure(
+ *body,
+ args,
+ *ret_type,
+ arg_types,
+ *closure_kind,
+ tgt_expr,
+ expected,
+ ),
Expr::Call { callee, args, .. } => self.infer_call(tgt_expr, *callee, args, expected),
Expr::MethodCall { receiver, args, method_name, generic_args } => self
.infer_method_call(
@@ -416,7 +456,7 @@
}
_ => self.table.new_type_var(),
};
- let mut coerce = CoerceMany::new(result_ty);
+ let mut coerce = CoerceMany::new(result_ty.to_nextsolver(self.table.interner));
for arm in arms.iter() {
if let Some(guard_expr) = arm.guard {
@@ -431,12 +471,17 @@
let arm_ty = self.infer_expr_inner(arm.expr, &expected, ExprIsRead::Yes);
all_arms_diverge &= self.diverges;
- coerce.coerce(self, Some(arm.expr), &arm_ty, CoercionCause::Expr(arm.expr));
+ coerce.coerce(
+ self,
+ &ObligationCause::new(),
+ arm.expr,
+ arm_ty.to_nextsolver(self.table.interner),
+ );
}
self.diverges = matchee_diverges | all_arms_diverge;
- coerce.complete(self)
+ coerce.complete(self).to_chalk(self.table.interner)
}
}
Expr::Path(p) => self.infer_expr_path(p, tgt_expr.into(), tgt_expr),
@@ -454,7 +499,7 @@
let val_ty = if let Some(expr) = expr {
let opt_coerce_to = match find_breakable(&mut self.breakables, label) {
Some(ctxt) => match &ctxt.coerce {
- Some(coerce) => coerce.expected_ty(),
+ Some(coerce) => coerce.expected_ty().to_chalk(self.table.interner),
None => {
self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
expr: tgt_expr,
@@ -478,11 +523,12 @@
match find_breakable(&mut self.breakables, label) {
Some(ctxt) => match ctxt.coerce.take() {
Some(mut coerce) => {
- let cause = match expr {
- Some(expr) => CoercionCause::Expr(expr),
- None => CoercionCause::Expr(tgt_expr),
- };
- coerce.coerce(self, expr, &val_ty, cause);
+ coerce.coerce(
+ self,
+ &ObligationCause::new(),
+ expr.unwrap_or(tgt_expr),
+ val_ty.to_nextsolver(self.table.interner),
+ );
// Avoiding borrowck
let ctxt = find_breakable(&mut self.breakables, label)
@@ -514,7 +560,13 @@
);
} else {
let unit = self.result.standard_types.unit.clone();
- let _ = self.coerce(Some(tgt_expr), &unit, &yield_ty, CoerceNever::Yes);
+ let _ = self.coerce(
+ tgt_expr.into(),
+ unit.to_nextsolver(self.table.interner),
+ yield_ty.to_nextsolver(self.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ );
}
resume_ty
} else {
@@ -670,11 +722,23 @@
Substitution::empty(Interner),
);
}
- if let Some(derefed) = builtin_deref(self.table.db, &inner_ty, true) {
- self.table.structurally_resolve_type(derefed)
+ if let Some(derefed) =
+ inner_ty.to_nextsolver(self.table.interner).builtin_deref(self.db, true)
+ {
+ self.table
+ .structurally_resolve_type(&derefed.to_chalk(self.table.interner))
} else {
- deref_by_trait(&mut self.table, inner_ty, false)
- .unwrap_or_else(|| self.err_ty())
+ let infer_ok = overloaded_deref_ty(
+ &self.table,
+ inner_ty.to_nextsolver(self.table.interner),
+ );
+ match infer_ok {
+ Some(infer_ok) => self
+ .table
+ .register_infer_ok(infer_ok)
+ .to_chalk(self.table.interner),
+ None => self.err_ty(),
+ }
}
}
UnaryOp::Neg => {
@@ -1010,11 +1074,23 @@
CallableSig::from_def(this.db, *def, parameters).to_fn_ptr(),
)
.intern(Interner);
- _ = this.coerce(Some(expr), &ty, &fnptr_ty, CoerceNever::Yes);
+ _ = this.coerce(
+ expr.into(),
+ ty.to_nextsolver(this.table.interner),
+ fnptr_ty.to_nextsolver(this.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ );
}
TyKind::Ref(mutbl, _, base_ty) => {
let ptr_ty = TyKind::Raw(*mutbl, base_ty.clone()).intern(Interner);
- _ = this.coerce(Some(expr), &ty, &ptr_ty, CoerceNever::Yes);
+ _ = this.coerce(
+ expr.into(),
+ ty.to_nextsolver(this.table.interner),
+ ptr_ty.to_nextsolver(this.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ );
}
_ => {}
}
@@ -1092,15 +1168,23 @@
let ret_ty = self.table.new_type_var();
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
- let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(ret_ty.clone()));
+ let prev_ret_coercion = self
+ .return_coercion
+ .replace(CoerceMany::new(ret_ty.to_nextsolver(self.table.interner)));
// FIXME: We should handle async blocks like we handle closures
let expected = &Expectation::has_type(ret_ty);
let (_, inner_ty) = self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
let ty = this.infer_block(tgt_expr, *id, statements, *tail, None, expected);
if let Some(target) = expected.only_has_type(&mut this.table) {
- match this.coerce(Some(tgt_expr), &ty, &target, CoerceNever::Yes) {
- Ok(res) => res,
+ match this.coerce(
+ tgt_expr.into(),
+ ty.to_nextsolver(this.table.interner),
+ target.to_nextsolver(this.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ ) {
+ Ok(res) => res.to_chalk(this.table.interner),
Err(_) => {
this.result.type_mismatches.insert(
tgt_expr.into(),
@@ -1209,13 +1293,21 @@
(elem_ty, consteval::usize_const(self.db, Some(0), krate))
}
Array::ElementList { elements, .. } => {
- let mut coerce = CoerceMany::new(elem_ty);
+ let mut coerce = CoerceMany::with_coercion_sites(
+ elem_ty.to_nextsolver(self.table.interner),
+ elements,
+ );
for &expr in elements.iter() {
let cur_elem_ty = self.infer_expr_inner(expr, &expected, ExprIsRead::Yes);
- coerce.coerce(self, Some(expr), &cur_elem_ty, CoercionCause::Expr(expr));
+ coerce.coerce(
+ self,
+ &ObligationCause::new(),
+ expr,
+ cur_elem_ty.to_nextsolver(self.table.interner),
+ );
}
(
- coerce.complete(self),
+ coerce.complete(self).to_chalk(self.table.interner),
consteval::usize_const(self.db, Some(elements.len() as u128), krate),
)
}
@@ -1254,11 +1346,17 @@
.return_coercion
.as_mut()
.expect("infer_return called outside function body")
- .expected_ty();
+ .expected_ty()
+ .to_chalk(self.table.interner);
let return_expr_ty =
self.infer_expr_inner(expr, &Expectation::HasType(ret_ty), ExprIsRead::Yes);
let mut coerce_many = self.return_coercion.take().unwrap();
- coerce_many.coerce(self, Some(expr), &return_expr_ty, CoercionCause::Expr(expr));
+ coerce_many.coerce(
+ self,
+ &ObligationCause::new(),
+ expr,
+ return_expr_ty.to_nextsolver(self.table.interner),
+ );
self.return_coercion = Some(coerce_many);
}
@@ -1269,7 +1367,7 @@
self.infer_return(expr);
} else {
let mut coerce = self.return_coercion.take().unwrap();
- coerce.coerce_forced_unit(self, CoercionCause::Expr(ret));
+ coerce.coerce_forced_unit(self, ret, &ObligationCause::new(), true);
self.return_coercion = Some(coerce);
}
}
@@ -1286,7 +1384,7 @@
fn infer_expr_become(&mut self, expr: ExprId) -> Ty {
match &self.return_coercion {
Some(return_coercion) => {
- let ret_ty = return_coercion.expected_ty();
+ let ret_ty = return_coercion.expected_ty().to_chalk(self.table.interner);
let call_expr_ty = self.infer_expr_inner(
expr,
@@ -1540,9 +1638,10 @@
};
if this
.coerce(
- Some(expr),
- &this.result.standard_types.unit.clone(),
- &t,
+ expr.into(),
+ this.result.standard_types.unit.to_nextsolver(this.table.interner),
+ t.to_nextsolver(this.table.interner),
+ AllowTwoPhase::No,
coerce_never,
)
.is_err()
@@ -1563,6 +1662,7 @@
});
self.resolver.reset_to_guard(g);
if let Some(prev_env) = prev_env {
+ self.table.param_env = prev_env.env.to_nextsolver(self.table.interner);
self.table.trait_env = prev_env;
}
@@ -1574,50 +1674,49 @@
receiver_ty: &Ty,
name: &Name,
) -> Option<(Ty, Either<FieldId, TupleFieldId>, Vec<Adjustment>, bool)> {
- let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone(), false, false);
+ let interner = self.table.interner;
+ let mut autoderef = self.table.autoderef(receiver_ty.to_nextsolver(self.table.interner));
let mut private_field = None;
let res = autoderef.by_ref().find_map(|(derefed_ty, _)| {
- let (field_id, parameters) = match derefed_ty.kind(Interner) {
- TyKind::Tuple(_, substs) => {
+ let (field_id, parameters) = match derefed_ty.kind() {
+ crate::next_solver::TyKind::Tuple(substs) => {
return name.as_tuple_index().and_then(|idx| {
- substs
- .as_slice(Interner)
- .get(idx)
- .map(|a| a.assert_ty_ref(Interner))
- .cloned()
- .map(|ty| {
- (
- Either::Right(TupleFieldId {
- tuple: TupleId(
- self.tuple_field_accesses_rev
- .insert_full(substs.clone())
- .0
- as u32,
- ),
- index: idx as u32,
- }),
- ty,
- )
- })
+ substs.as_slice().get(idx).copied().map(|ty| {
+ (
+ Either::Right(TupleFieldId {
+ tuple: TupleId(
+ self.tuple_field_accesses_rev
+ .insert_full(substs.to_chalk(interner))
+ .0 as u32,
+ ),
+ index: idx as u32,
+ }),
+ ty.to_chalk(interner),
+ )
+ })
});
}
- &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref parameters) => {
- let local_id = s.fields(self.db).field(name)?;
- let field = FieldId { parent: s.into(), local_id };
- (field, parameters.clone())
- }
- &TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), ref parameters) => {
- let local_id = u.fields(self.db).field(name)?;
- let field = FieldId { parent: u.into(), local_id };
- (field, parameters.clone())
- }
+ crate::next_solver::TyKind::Adt(adt, parameters) => match adt.def_id().0 {
+ hir_def::AdtId::StructId(s) => {
+ let local_id = s.fields(self.db).field(name)?;
+ let field = FieldId { parent: s.into(), local_id };
+ (field, parameters)
+ }
+ hir_def::AdtId::UnionId(u) => {
+ let local_id = u.fields(self.db).field(name)?;
+ let field = FieldId { parent: u.into(), local_id };
+ (field, parameters)
+ }
+ hir_def::AdtId::EnumId(_) => return None,
+ },
_ => return None,
};
+ let parameters: crate::Substitution = parameters.to_chalk(interner);
let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
.is_visible_from(self.db, self.resolver.module());
if !is_visible {
if private_field.is_none() {
- private_field = Some((field_id, parameters));
+ private_field = Some((field_id, parameters.clone()));
}
return None;
}
@@ -1629,14 +1728,14 @@
Some(match res {
Some((field_id, ty)) => {
- let adjustments = auto_deref_adjust_steps(&autoderef);
+ let adjustments = autoderef.adjust_steps();
let ty = self.process_remote_user_written_ty(ty);
(ty, field_id, adjustments, true)
}
None => {
let (field_id, subst) = private_field?;
- let adjustments = auto_deref_adjust_steps(&autoderef);
+ let adjustments = autoderef.adjust_steps();
let ty = self.db.field_types(field_id.parent)[field_id.local_id]
.clone()
.substitute(Interner, &subst);
@@ -1725,11 +1824,13 @@
expected: &Expectation,
) -> Ty {
let callee_ty = self.infer_expr(callee, &Expectation::none(), ExprIsRead::Yes);
- let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false, true);
+ let interner = self.table.interner;
+ let mut derefs = self.table.autoderef(callee_ty.to_nextsolver(interner));
let (res, derefed_callee) = loop {
let Some((callee_deref_ty, _)) = derefs.next() else {
break (None, callee_ty.clone());
};
+ let callee_deref_ty = callee_deref_ty.to_chalk(interner);
if let Some(res) = derefs.table.callable_sig(&callee_deref_ty, args.len()) {
break (Some(res), callee_deref_ty);
}
@@ -1740,28 +1841,30 @@
derefed_callee.callable_sig(self.db).is_some_and(|sig| sig.is_varargs) || res.is_none();
let (param_tys, ret_ty) = match res {
Some((func, params, ret_ty)) => {
- let mut adjustments = auto_deref_adjust_steps(&derefs);
- if let TyKind::Closure(c, _) =
- self.table.resolve_completely(callee_ty.clone()).kind(Interner)
- {
- self.add_current_closure_dependency(*c);
- self.deferred_closures.entry(*c).or_default().push((
- derefed_callee.clone(),
- callee_ty.clone(),
- params.clone(),
- tgt_expr,
- ));
- }
+ let params_chalk =
+ params.iter().map(|param| param.to_chalk(interner)).collect::<Vec<_>>();
+ let mut adjustments = derefs.adjust_steps();
if let Some(fn_x) = func {
self.write_fn_trait_method_resolution(
fn_x,
&derefed_callee,
&mut adjustments,
&callee_ty,
- ¶ms,
+ ¶ms_chalk,
tgt_expr,
);
}
+ if let &TyKind::Closure(c, _) =
+ self.table.resolve_completely(callee_ty.clone()).kind(Interner)
+ {
+ self.add_current_closure_dependency(c.into());
+ self.deferred_closures.entry(c.into()).or_default().push((
+ derefed_callee.clone(),
+ callee_ty.clone(),
+ params_chalk,
+ tgt_expr,
+ ));
+ }
self.write_expr_adj(callee, adjustments.into_boxed_slice());
(params, ret_ty)
}
@@ -1770,7 +1873,7 @@
call_expr: tgt_expr,
found: callee_ty.clone(),
});
- (Vec::new(), self.err_ty())
+ (Vec::new(), crate::next_solver::Ty::new_error(interner, ErrorGuaranteed))
}
};
let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
@@ -1791,29 +1894,24 @@
tgt_expr: ExprId,
args: &[ExprId],
callee_ty: Ty,
- param_tys: &[Ty],
- ret_ty: Ty,
+ param_tys: &[crate::next_solver::Ty<'db>],
+ ret_ty: crate::next_solver::Ty<'db>,
indices_to_skip: &[u32],
is_varargs: bool,
expected: &Expectation,
) -> Ty {
self.register_obligations_for_call(&callee_ty);
- let expected_inputs = self.expected_inputs_for_expected_output(
- expected,
- ret_ty.clone(),
- param_tys.to_owned(),
- );
-
self.check_call_arguments(
tgt_expr,
- args,
- &expected_inputs,
param_tys,
+ ret_ty,
+ expected,
+ args,
indices_to_skip,
is_varargs,
);
- self.normalize_associated_types_in(ret_ty)
+ self.table.normalize_associated_types_in_ns(ret_ty).to_chalk(self.table.interner)
}
fn infer_method_call(
@@ -1826,6 +1924,21 @@
expected: &Expectation,
) -> Ty {
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none(), ExprIsRead::Yes);
+ let receiver_ty = self.table.structurally_resolve_type(&receiver_ty);
+
+ if matches!(
+ receiver_ty.kind(Interner),
+ TyKind::Error | TyKind::InferenceVar(_, TyVariableKind::General)
+ ) {
+ // Don't probe on error type, or on a fully unresolved infer var.
+ // FIXME: Emit an error if we're probing on an infer var (type annotations needed).
+ for &arg in args {
+ // Make sure we infer and record the arguments.
+ self.infer_expr_no_expect(arg, ExprIsRead::Yes);
+ }
+ return receiver_ty;
+ }
+
let canonicalized_receiver =
self.canonicalize(receiver_ty.clone().to_nextsolver(self.table.interner));
@@ -1918,14 +2031,21 @@
tgt_expr,
args,
callee_ty,
- sig.params().get(strip_first as usize..).unwrap_or(&[]),
- sig.ret().clone(),
+ &sig.params()
+ .get(strip_first as usize..)
+ .unwrap_or(&[])
+ .iter()
+ .map(|param| param.to_nextsolver(self.table.interner))
+ .collect::<Vec<_>>(),
+ sig.ret().to_nextsolver(self.table.interner),
&[],
true,
expected,
),
None => {
- self.check_call_arguments(tgt_expr, args, &[], &[], &[], true);
+ for &arg in args.iter() {
+ self.infer_expr_no_expect(arg, ExprIsRead::Yes);
+ }
self.err_ty()
}
}
@@ -1944,151 +2064,252 @@
) -> Ty {
let method_ty = method_ty.substitute(Interner, &substs);
self.register_obligations_for_call(&method_ty);
+ let interner = self.table.interner;
let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) =
match method_ty.callable_sig(self.db) {
Some(sig) => (
if !sig.params().is_empty() {
- (sig.params()[0].clone(), sig.params()[1..].to_vec())
+ (
+ sig.params()[0].to_nextsolver(interner),
+ sig.params()[1..]
+ .iter()
+ .map(|param| param.to_nextsolver(interner))
+ .collect(),
+ )
} else {
- (self.err_ty(), Vec::new())
+ (crate::next_solver::Ty::new_error(interner, ErrorGuaranteed), Vec::new())
},
- sig.ret().clone(),
+ sig.ret().to_nextsolver(interner),
sig.is_varargs,
),
None => {
- let formal_receiver_ty = self.table.new_type_var();
- let ret_ty = self.table.new_type_var();
+ let formal_receiver_ty = self.table.next_ty_var();
+ let ret_ty = self.table.next_ty_var();
((formal_receiver_ty, Vec::new()), ret_ty, true)
}
};
- self.unify(&formal_receiver_ty, &receiver_ty);
+ self.table.unify_ns(formal_receiver_ty, receiver_ty.to_nextsolver(interner));
- let expected_inputs =
- self.expected_inputs_for_expected_output(expected, ret_ty.clone(), param_tys.clone());
-
- self.check_call_arguments(tgt_expr, args, &expected_inputs, ¶m_tys, &[], is_varargs);
- self.normalize_associated_types_in(ret_ty)
+ self.check_call_arguments(tgt_expr, ¶m_tys, ret_ty, expected, args, &[], is_varargs);
+ self.table.normalize_associated_types_in_ns(ret_ty).to_chalk(interner)
}
- fn expected_inputs_for_expected_output(
+ /// Generic function that factors out common logic from function calls,
+ /// method calls and overloaded operators.
+ pub(in super::super) fn check_call_arguments(
&mut self,
- expected_output: &Expectation,
- output: Ty,
- inputs: Vec<Ty>,
- ) -> Vec<Ty> {
- if let Some(expected_ty) = expected_output.only_has_type(&mut self.table) {
- self.table.fudge_inference(|table| {
- if table.try_unify(&expected_ty, &output).is_ok() {
- table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
- chalk_ir::VariableKind::Ty(tk) => var.to_ty(Interner, tk).cast(Interner),
- chalk_ir::VariableKind::Lifetime => {
- var.to_lifetime(Interner).cast(Interner)
- }
- chalk_ir::VariableKind::Const(ty) => {
- var.to_const(Interner, ty).cast(Interner)
- }
- })
- } else {
- Vec::new()
- }
- })
- } else {
- Vec::new()
- }
- }
-
- fn check_call_arguments(
- &mut self,
- expr: ExprId,
- args: &[ExprId],
- expected_inputs: &[Ty],
- param_tys: &[Ty],
+ call_expr: ExprId,
+ // Types (as defined in the *signature* of the target function)
+ formal_input_tys: &[crate::next_solver::Ty<'db>],
+ formal_output: crate::next_solver::Ty<'db>,
+ // Expected output from the parent expression or statement
+ expectation: &Expectation,
+ // The expressions for each provided argument
+ provided_args: &[ExprId],
skip_indices: &[u32],
- ignore_arg_param_mismatch: bool,
+ // Whether the function is variadic, for example when imported from C
+ c_variadic: bool,
) {
- let arg_count_mismatch =
- !ignore_arg_param_mismatch && args.len() != param_tys.len() + skip_indices.len();
- if arg_count_mismatch {
- self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
- call_expr: expr,
- expected: param_tys.len() + skip_indices.len(),
- found: args.len(),
- });
+ let interner = self.table.interner;
+
+ // First, let's unify the formal method signature with the expectation eagerly.
+ // We use this to guide coercion inference; it's output is "fudged" which means
+ // any remaining type variables are assigned to new, unrelated variables. This
+ // is because the inference guidance here is only speculative.
+ let formal_output = self.table.resolve_vars_with_obligations(formal_output);
+ let expected_input_tys: Option<Vec<_>> = expectation
+ .only_has_type(&mut self.table)
+ .and_then(|expected_output| {
+ self.table
+ .infer_ctxt
+ .fudge_inference_if_ok(|| {
+ let mut ocx = ObligationCtxt::new(&self.table.infer_ctxt);
+
+ // Attempt to apply a subtyping relationship between the formal
+ // return type (likely containing type variables if the function
+ // is polymorphic) and the expected return type.
+ // No argument expectations are produced if unification fails.
+ let origin = ObligationCause::new();
+ ocx.sup(
+ &origin,
+ self.table.param_env,
+ expected_output.to_nextsolver(interner),
+ formal_output,
+ )?;
+ if !ocx.select_where_possible().is_empty() {
+ return Err(crate::next_solver::TypeError::Mismatch);
+ }
+
+ // Record all the argument types, with the args
+ // produced from the above subtyping unification.
+ Ok(Some(
+ formal_input_tys
+ .iter()
+ .map(|&ty| self.table.infer_ctxt.resolve_vars_if_possible(ty))
+ .collect(),
+ ))
+ })
+ .ok()
+ })
+ .unwrap_or_default();
+
+ // If there are no external expectations at the call site, just use the types from the function defn
+ let expected_input_tys = if let Some(expected_input_tys) = &expected_input_tys {
+ assert_eq!(expected_input_tys.len(), formal_input_tys.len());
+ expected_input_tys
+ } else {
+ formal_input_tys
};
- // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
+ let minimum_input_count = expected_input_tys.len();
+ let provided_arg_count = provided_args.len() - skip_indices.len();
+
+ // Keep track of whether we *could possibly* be satisfied, i.e. whether we're on the happy path
+ // if the wrong number of arguments were supplied, we CAN'T be satisfied,
+ // and if we're c_variadic, the supplied arguments must be >= the minimum count from the function
+ // otherwise, they need to be identical, because rust doesn't currently support variadic functions
+ let args_count_matches = if c_variadic {
+ provided_arg_count >= minimum_input_count
+ } else {
+ provided_arg_count == minimum_input_count
+ };
+
+ if !args_count_matches {
+ self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
+ call_expr,
+ expected: expected_input_tys.len() + skip_indices.len(),
+ found: provided_args.len(),
+ });
+ }
+
+ // We introduce a helper function to demand that a given argument satisfy a given input
+ // This is more complicated than just checking type equality, as arguments could be coerced
+ // This version writes those types back so further type checking uses the narrowed types
+ let demand_compatible = |this: &mut InferenceContext<'db>, idx| {
+ let formal_input_ty: crate::next_solver::Ty<'db> = formal_input_tys[idx];
+ let expected_input_ty: crate::next_solver::Ty<'db> = expected_input_tys[idx];
+ let provided_arg = provided_args[idx];
+
+ debug!("checking argument {}: {:?} = {:?}", idx, provided_arg, formal_input_ty);
+
+ // We're on the happy path here, so we'll do a more involved check and write back types
+ // To check compatibility, we'll do 3 things:
+ // 1. Unify the provided argument with the expected type
+ let expectation = Expectation::rvalue_hint(this, expected_input_ty.to_chalk(interner));
+
+ let checked_ty = this
+ .infer_expr_inner(provided_arg, &expectation, ExprIsRead::Yes)
+ .to_nextsolver(interner);
+
+ // 2. Coerce to the most detailed type that could be coerced
+ // to, which is `expected_ty` if `rvalue_hint` returns an
+ // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise.
+ let coerced_ty = expectation
+ .only_has_type(&mut this.table)
+ .map(|it| it.to_nextsolver(interner))
+ .unwrap_or(formal_input_ty);
+
+ // Cause selection errors caused by resolving a single argument to point at the
+ // argument and not the call. This lets us customize the span pointed to in the
+ // fulfillment error to be more accurate.
+ let coerced_ty = this.table.resolve_vars_with_obligations(coerced_ty);
+
+ let coerce_never = if this
+ .expr_guaranteed_to_constitute_read_for_never(provided_arg, ExprIsRead::Yes)
+ {
+ CoerceNever::Yes
+ } else {
+ CoerceNever::No
+ };
+ let coerce_error = this
+ .coerce(
+ provided_arg.into(),
+ checked_ty,
+ coerced_ty,
+ AllowTwoPhase::Yes,
+ coerce_never,
+ )
+ .err();
+ if coerce_error.is_some() {
+ return Err((coerce_error, coerced_ty, checked_ty));
+ }
+
+ // 3. Check if the formal type is actually equal to the checked one
+ // and register any such obligations for future type checks.
+ let formal_ty_error = this
+ .table
+ .infer_ctxt
+ .at(&ObligationCause::new(), this.table.param_env)
+ .eq(DefineOpaqueTypes::Yes, formal_input_ty, coerced_ty);
+
+ // If neither check failed, the types are compatible
+ match formal_ty_error {
+ Ok(crate::next_solver::infer::InferOk { obligations, value: () }) => {
+ this.table.register_predicates(obligations);
+ Ok(())
+ }
+ Err(err) => Err((Some(err), coerced_ty, checked_ty)),
+ }
+ };
+
+ // Check the arguments.
// We do this in a pretty awful way: first we type-check any arguments
// that are not closures, then we type-check the closures. This is so
// that we have more information about the types of arguments when we
// type-check the functions. This isn't really the right way to do this.
for check_closures in [false, true] {
- let mut skip_indices = skip_indices.iter().copied().fuse().peekable();
- let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
- let expected_iter = expected_inputs
- .iter()
- .cloned()
- .chain(param_iter.clone().skip(expected_inputs.len()));
- for (idx, ((&arg, param_ty), expected_ty)) in
- args.iter().zip(param_iter).zip(expected_iter).enumerate()
- {
- let is_closure = matches!(&self.body[arg], Expr::Closure { .. });
+ // More awful hacks: before we check argument types, try to do
+ // an "opportunistic" trait resolution of any trait bounds on
+ // the call. This helps coercions.
+ if check_closures {
+ self.table.select_obligations_where_possible();
+ }
+
+ let mut skip_indices = skip_indices.iter().copied();
+ // Check each argument, to satisfy the input it was provided for
+ // Visually, we're traveling down the diagonal of the compatibility matrix
+ for (idx, arg) in provided_args.iter().enumerate() {
+ if skip_indices.clone().next() == Some(idx as u32) {
+ skip_indices.next();
+ continue;
+ }
+
+ // For this check, we do *not* want to treat async coroutine closures (async blocks)
+ // as proper closures. Doing so would regress type inference when feeding
+ // the return value of an argument-position async block to an argument-position
+ // closure wrapped in a block.
+ // See <https://github.com/rust-lang/rust/issues/112225>.
+ let is_closure = if let Expr::Closure { closure_kind, .. } = self.body[*arg] {
+ !matches!(closure_kind, ClosureKind::Coroutine(_))
+ } else {
+ false
+ };
if is_closure != check_closures {
continue;
}
- while skip_indices.peek().is_some_and(|&i| i < idx as u32) {
- skip_indices.next();
- }
- if skip_indices.peek().copied() == Some(idx as u32) {
+ if idx >= minimum_input_count {
+ // Make sure we've checked this expr at least once.
+ self.infer_expr_no_expect(*arg, ExprIsRead::Yes);
continue;
}
- // the difference between param_ty and expected here is that
- // expected is the parameter when the expected *return* type is
- // taken into account. So in `let _: &[i32] = identity(&[1, 2])`
- // the expected type is already `&[i32]`, whereas param_ty is
- // still an unbound type variable. We don't always want to force
- // the parameter to coerce to the expected type (for example in
- // `coerce_unsize_expected_type_4`).
- let param_ty = self.normalize_associated_types_in(param_ty);
- let expected_ty = self.normalize_associated_types_in(expected_ty);
- let expected = Expectation::rvalue_hint(self, expected_ty);
- // infer with the expected type we have...
- let ty = self.infer_expr_inner(arg, &expected, ExprIsRead::Yes);
-
- // then coerce to either the expected type or just the formal parameter type
- let coercion_target = if let Some(ty) = expected.only_has_type(&mut self.table) {
- // if we are coercing to the expectation, unify with the
- // formal parameter type to connect everything
- self.unify(&ty, ¶m_ty);
- ty
- } else {
- param_ty
- };
- // The function signature may contain some unknown types, so we need to insert
- // type vars here to avoid type mismatch false positive.
- let coercion_target = self.insert_type_vars(coercion_target);
-
- // Any expression that produces a value of type `!` must have diverged,
- // unless it's a place expression that isn't being read from, in which case
- // diverging would be unsound since we may never actually read the `!`.
- // e.g. `let _ = *never_ptr;` with `never_ptr: *const !`.
- let coerce_never =
- if self.expr_guaranteed_to_constitute_read_for_never(arg, ExprIsRead::Yes) {
- CoerceNever::Yes
- } else {
- CoerceNever::No
- };
- if self.coerce(Some(arg), &ty, &coercion_target, coerce_never).is_err()
- && !arg_count_mismatch
+ if let Err((_error, expected, found)) = demand_compatible(self, idx)
+ && args_count_matches
{
+ // Don't report type mismatches if there is a mismatch in args count.
self.result.type_mismatches.insert(
- arg.into(),
- TypeMismatch { expected: coercion_target, actual: ty.clone() },
+ (*arg).into(),
+ TypeMismatch {
+ expected: expected.to_chalk(interner),
+ actual: found.to_chalk(interner),
+ },
);
}
}
}
+
+ if !args_count_matches {}
}
fn substs_for_method_call(
@@ -2448,10 +2669,22 @@
cb: impl FnOnce(&mut Self) -> T,
) -> (Option<Ty>, T) {
self.breakables.push({
- BreakableContext { kind, may_break: false, coerce: ty.map(CoerceMany::new), label }
+ BreakableContext {
+ kind,
+ may_break: false,
+ coerce: ty.map(|ty| CoerceMany::new(ty.to_nextsolver(self.table.interner))),
+ label,
+ }
});
let res = cb(self);
let ctx = self.breakables.pop().expect("breakable stack broken");
- (if ctx.may_break { ctx.coerce.map(|ctx| ctx.complete(self)) } else { None }, res)
+ (
+ if ctx.may_break {
+ ctx.coerce.map(|ctx| ctx.complete(self).to_chalk(self.table.interner))
+ } else {
+ None
+ },
+ res,
+ )
}
}
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index 6781bc8..6e11fa9 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -10,6 +10,8 @@
use hir_expand::name::Name;
use stdx::TupleExt;
+use crate::infer::AllowTwoPhase;
+use crate::next_solver::mapping::{ChalkToNextSolver, NextSolverToChalk};
use crate::{
DeclContext, DeclOrigin, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty,
TyBuilder, TyExt, TyKind,
@@ -303,16 +305,15 @@
Pat::Path(path) => {
let ty = self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty());
let ty_inserted_vars = self.insert_type_vars_shallow(ty.clone());
- match self.table.coerce(&expected, &ty_inserted_vars, CoerceNever::Yes) {
- Ok((adjustments, coerced_ty)) => {
- if !adjustments.is_empty() {
- self.result
- .pat_adjustments
- .entry(pat)
- .or_default()
- .extend(adjustments.into_iter().map(|adjust| adjust.target));
- }
- self.write_pat_ty(pat, coerced_ty);
+ match self.coerce(
+ pat.into(),
+ expected.to_nextsolver(self.table.interner),
+ ty_inserted_vars.to_nextsolver(self.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ ) {
+ Ok(coerced_ty) => {
+ self.write_pat_ty(pat, coerced_ty.to_chalk(self.table.interner));
return self.pat_ty_after_adjustment(pat);
}
Err(_) => {
@@ -387,8 +388,14 @@
);
// We are returning early to avoid the unifiability check below.
let lhs_ty = self.insert_type_vars_shallow(result);
- let ty = match self.coerce(None, &expected, &lhs_ty, CoerceNever::Yes) {
- Ok(ty) => ty,
+ let ty = match self.coerce(
+ pat.into(),
+ expected.to_nextsolver(self.table.interner),
+ lhs_ty.to_nextsolver(self.table.interner),
+ AllowTwoPhase::No,
+ CoerceNever::Yes,
+ ) {
+ Ok(ty) => ty.to_chalk(self.table.interner),
Err(_) => {
self.result.type_mismatches.insert(
pat.into(),
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index 19b83d3..1687857 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -1,6 +1,6 @@
//! Unification and canonicalization logic.
-use std::{fmt, mem};
+use std::fmt;
use chalk_ir::{
CanonicalVarKind, FloatTy, IntTy, TyVariableKind, cast::Cast, fold::TypeFoldable,
@@ -11,31 +11,37 @@
use hir_expand::name::Name;
use intern::sym;
use rustc_hash::{FxHashMap, FxHashSet};
-use rustc_next_trait_solver::solve::HasChanged;
-use rustc_type_ir::inherent::IntoKind;
+use rustc_type_ir::inherent::Ty as _;
use rustc_type_ir::{
- AliasRelationDirection, FloatVid, IntVid, TyVid,
- inherent::{Span, Term as _},
+ FloatVid, IntVid, TyVid, TypeVisitableExt,
+ inherent::{IntoKind, Span, Term as _},
relate::{Relate, solver_relating::RelateExt},
- solve::{Certainty, NoSolution},
+ solve::{Certainty, GoalSource, NoSolution},
};
-use rustc_type_ir::{TypeSuperFoldable, TypeVisitableExt};
use smallvec::SmallVec;
use triomphe::Arc;
-use super::{InferOk, InferResult, InferenceContext, TypeError};
+use super::{InferResult, InferenceContext, TypeError};
+use crate::next_solver::ErrorGuaranteed;
use crate::{
- AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue, DebruijnIndex, DomainGoal,
- GenericArg, GenericArgData, Goal, GoalData, InEnvironment, InferenceVar, Interner, Lifetime,
- OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Substitution, TraitEnvironment,
- TraitRef, Ty, TyBuilder, TyExt, TyKind, VariableKind, WhereClause,
+ AliasTy, BoundVar, Canonical, Const, ConstValue, DebruijnIndex, GenericArg, GenericArgData,
+ Goal, GoalData, InEnvironment, InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind,
+ ProjectionTy, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+ VariableKind, WhereClause,
consteval::unknown_const,
db::HirDatabase,
fold_generic_args, fold_tys_and_consts,
+ next_solver::infer::InferOk,
next_solver::{
- self, Binder, DbInterner, Predicate, PredicateKind, SolverDefIds, Term,
- infer::{DbInternerInferExt, InferCtxt, snapshot::CombinedSnapshot},
- mapping::{ChalkToNextSolver, InferenceVarExt, NextSolverToChalk},
+ self, ClauseKind, DbInterner, ParamEnv, Predicate, PredicateKind, SolverDefIds, Term,
+ fulfill::FulfillmentCtxt,
+ infer::{
+ DbInternerInferExt, InferCtxt,
+ snapshot::CombinedSnapshot,
+ traits::{Obligation, ObligationCause},
+ },
+ inspect::{InspectConfig, InspectGoal, ProofTreeVisitor},
+ mapping::{ChalkToNextSolver, NextSolverToChalk},
},
to_chalk_trait_id,
traits::{
@@ -50,43 +56,64 @@
{
self.table.canonicalize(t)
}
+}
- pub(super) fn clauses_for_self_ty(
- &mut self,
- self_ty: InferenceVar,
- ) -> SmallVec<[WhereClause; 4]> {
- self.table.resolve_obligations_as_possible();
+struct NestedObligationsForSelfTy<'a, 'db> {
+ ctx: &'a InferenceTable<'db>,
+ self_ty: TyVid,
+ root_cause: &'a ObligationCause,
+ obligations_for_self_ty: &'a mut SmallVec<[Obligation<'db, Predicate<'db>>; 4]>,
+}
- let root = InferenceVar::from_vid(self.table.infer_ctxt.root_var(self_ty.to_vid()));
- let pending_obligations = mem::take(&mut self.table.pending_obligations);
- let obligations = pending_obligations
- .iter()
- .filter_map(|obligation| match obligation.to_chalk(self.table.interner).goal.data(Interner) {
- GoalData::DomainGoal(DomainGoal::Holds(clause)) => {
- let ty = match clause {
- WhereClause::AliasEq(AliasEq {
- alias: AliasTy::Projection(projection),
- ..
- }) => projection.self_type_parameter(self.db),
- WhereClause::Implemented(trait_ref) => {
- trait_ref.self_type_parameter(Interner)
- }
- WhereClause::TypeOutlives(to) => to.ty.clone(),
- _ => return None,
- };
- let ty = self.resolve_ty_shallow(&ty);
- if matches!(ty.kind(Interner), TyKind::InferenceVar(iv, TyVariableKind::General) if *iv == root) {
- Some(clause.clone())
- } else {
- None
- }
- }
- _ => None,
- })
- .collect();
- self.table.pending_obligations = pending_obligations;
+impl<'a, 'db> ProofTreeVisitor<'db> for NestedObligationsForSelfTy<'a, 'db> {
+ type Result = ();
- obligations
+ fn config(&self) -> InspectConfig {
+ // Using an intentionally low depth to minimize the chance of future
+ // breaking changes in case we adapt the approach later on. This also
+ // avoids any hangs for exponentially growing proof trees.
+ InspectConfig { max_depth: 5 }
+ }
+
+ fn visit_goal(&mut self, inspect_goal: &InspectGoal<'_, 'db>) {
+ // No need to walk into goal subtrees that certainly hold, since they
+ // wouldn't then be stalled on an infer var.
+ if inspect_goal.result() == Ok(Certainty::Yes) {
+ return;
+ }
+
+ let db = self.ctx.interner;
+ let goal = inspect_goal.goal();
+ if self.ctx.predicate_has_self_ty(goal.predicate, self.self_ty)
+ // We do not push the instantiated forms of goals as it would cause any
+ // aliases referencing bound vars to go from having escaping bound vars to
+ // being able to be normalized to an inference variable.
+ //
+ // This is mostly just a hack as arbitrary nested goals could still contain
+ // such aliases while having a different `GoalSource`. Closure signature inference
+ // however can't really handle *every* higher ranked `Fn` goal also being present
+ // in the form of `?c: Fn<(<?x as Trait<'!a>>::Assoc)`.
+ //
+ // This also just better matches the behaviour of the old solver where we do not
+ // encounter instantiated forms of goals, only nested goals that referred to bound
+ // vars from instantiated goals.
+ && !matches!(inspect_goal.source(), GoalSource::InstantiateHigherRanked)
+ {
+ self.obligations_for_self_ty.push(Obligation::new(
+ db,
+ self.root_cause.clone(),
+ goal.param_env,
+ goal.predicate,
+ ));
+ }
+
+ // If there's a unique way to prove a given goal, recurse into
+ // that candidate. This means that for `impl<F: FnOnce(u32)> Trait<F> for () {}`
+ // and a `(): Trait<?0>` goal we recurse into the impl and look at
+ // the nested `?0: FnOnce(u32)` goal.
+ if let Some(candidate) = inspect_goal.unique_applicable_candidate() {
+ candidate.visit_nested_no_probe(self)
+ }
}
}
@@ -119,7 +146,7 @@
let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars);
let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars);
- table.resolve_obligations_as_possible();
+ table.select_obligations_where_possible();
table.propagate_diverging_flag();
let ty1_with_vars = table.resolve_completely(ty1_with_vars);
let ty2_with_vars = table.resolve_completely(ty2_with_vars);
@@ -186,35 +213,117 @@
}
#[derive(Clone)]
-pub(crate) struct InferenceTable<'a> {
- pub(crate) db: &'a dyn HirDatabase,
- pub(crate) interner: DbInterner<'a>,
+pub(crate) struct InferenceTable<'db> {
+ pub(crate) db: &'db dyn HirDatabase,
+ pub(crate) interner: DbInterner<'db>,
pub(crate) trait_env: Arc<TraitEnvironment>,
+ pub(crate) param_env: ParamEnv<'db>,
pub(crate) tait_coercion_table: Option<FxHashMap<OpaqueTyId, Ty>>,
- pub(crate) infer_ctxt: InferCtxt<'a>,
+ pub(crate) infer_ctxt: InferCtxt<'db>,
diverging_tys: FxHashSet<Ty>,
- pending_obligations: Vec<next_solver::Goal<'a, next_solver::Predicate<'a>>>,
+ pub(super) fulfillment_cx: FulfillmentCtxt<'db>,
}
-pub(crate) struct InferenceTableSnapshot<'a> {
+pub(crate) struct InferenceTableSnapshot<'db> {
ctxt_snapshot: CombinedSnapshot,
+ obligations: FulfillmentCtxt<'db>,
diverging_tys: FxHashSet<Ty>,
- pending_obligations: Vec<next_solver::Goal<'a, next_solver::Predicate<'a>>>,
}
-impl<'a> InferenceTable<'a> {
- pub(crate) fn new(db: &'a dyn HirDatabase, trait_env: Arc<TraitEnvironment>) -> Self {
+impl<'db> InferenceTable<'db> {
+ pub(crate) fn new(db: &'db dyn HirDatabase, trait_env: Arc<TraitEnvironment>) -> Self {
let interner = DbInterner::new_with(db, Some(trait_env.krate), trait_env.block);
+ let infer_ctxt = interner.infer_ctxt().build(rustc_type_ir::TypingMode::Analysis {
+ defining_opaque_types_and_generators: SolverDefIds::new_from_iter(interner, []),
+ });
InferenceTable {
db,
interner,
+ param_env: trait_env.env.to_nextsolver(interner),
trait_env,
tait_coercion_table: None,
- infer_ctxt: interner.infer_ctxt().build(rustc_type_ir::TypingMode::Analysis {
- defining_opaque_types_and_generators: SolverDefIds::new_from_iter(interner, []),
- }),
+ fulfillment_cx: FulfillmentCtxt::new(&infer_ctxt),
+ infer_ctxt,
diverging_tys: FxHashSet::default(),
- pending_obligations: Vec::new(),
+ }
+ }
+
+ pub(crate) fn type_var_is_sized(&self, self_ty: TyVid) -> bool {
+ let Some(sized_did) = LangItem::Sized.resolve_trait(self.db, self.trait_env.krate) else {
+ return true;
+ };
+ self.obligations_for_self_ty(self_ty).into_iter().any(|obligation| {
+ match obligation.predicate.kind().skip_binder() {
+ crate::next_solver::PredicateKind::Clause(
+ crate::next_solver::ClauseKind::Trait(data),
+ ) => data.def_id().0 == sized_did,
+ _ => false,
+ }
+ })
+ }
+
+ pub(super) fn obligations_for_self_ty(
+ &self,
+ self_ty: TyVid,
+ ) -> SmallVec<[Obligation<'db, Predicate<'db>>; 4]> {
+ let obligations = self.fulfillment_cx.pending_obligations();
+ let mut obligations_for_self_ty = SmallVec::new();
+ for obligation in obligations {
+ let mut visitor = NestedObligationsForSelfTy {
+ ctx: self,
+ self_ty,
+ obligations_for_self_ty: &mut obligations_for_self_ty,
+ root_cause: &obligation.cause,
+ };
+
+ let goal = obligation.as_goal();
+ self.infer_ctxt.visit_proof_tree(goal, &mut visitor);
+ }
+
+ obligations_for_self_ty.retain_mut(|obligation| {
+ obligation.predicate = self.infer_ctxt.resolve_vars_if_possible(obligation.predicate);
+ !obligation.predicate.has_placeholders()
+ });
+ obligations_for_self_ty
+ }
+
+ fn predicate_has_self_ty(&self, predicate: Predicate<'db>, expected_vid: TyVid) -> bool {
+ match predicate.kind().skip_binder() {
+ PredicateKind::Clause(ClauseKind::Trait(data)) => {
+ self.type_matches_expected_vid(expected_vid, data.self_ty())
+ }
+ PredicateKind::Clause(ClauseKind::Projection(data)) => {
+ self.type_matches_expected_vid(expected_vid, data.projection_term.self_ty())
+ }
+ PredicateKind::Clause(ClauseKind::ConstArgHasType(..))
+ | PredicateKind::Subtype(..)
+ | PredicateKind::Coerce(..)
+ | PredicateKind::Clause(ClauseKind::RegionOutlives(..))
+ | PredicateKind::Clause(ClauseKind::TypeOutlives(..))
+ | PredicateKind::Clause(ClauseKind::WellFormed(..))
+ | PredicateKind::DynCompatible(..)
+ | PredicateKind::NormalizesTo(..)
+ | PredicateKind::AliasRelate(..)
+ | PredicateKind::Clause(ClauseKind::ConstEvaluatable(..))
+ | PredicateKind::ConstEquate(..)
+ | PredicateKind::Clause(ClauseKind::HostEffect(..))
+ | PredicateKind::Clause(ClauseKind::UnstableFeature(_))
+ | PredicateKind::Ambiguous => false,
+ }
+ }
+
+ fn type_matches_expected_vid(
+ &self,
+ expected_vid: TyVid,
+ ty: crate::next_solver::Ty<'db>,
+ ) -> bool {
+ let ty = self.shallow_resolve(ty);
+
+ match ty.kind() {
+ crate::next_solver::TyKind::Infer(rustc_type_ir::TyVar(found_vid)) => {
+ self.infer_ctxt.root_var(expected_vid) == self.infer_ctxt.root_var(found_vid)
+ }
+ _ => false,
}
}
@@ -288,13 +397,13 @@
.intern(Interner)
}
- pub(crate) fn canonicalize<T>(&mut self, t: T) -> rustc_type_ir::Canonical<DbInterner<'a>, T>
+ pub(crate) fn canonicalize<T>(&mut self, t: T) -> rustc_type_ir::Canonical<DbInterner<'db>, T>
where
- T: rustc_type_ir::TypeFoldable<DbInterner<'a>>,
+ T: rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
// try to resolve obligations before canonicalizing, since this might
// result in new knowledge about variables
- self.resolve_obligations_as_possible();
+ self.select_obligations_where_possible();
self.infer_ctxt.canonicalize_response(t)
}
@@ -306,19 +415,24 @@
/// to do it as well.
pub(crate) fn normalize_associated_types_in<T, U>(&mut self, ty: T) -> T
where
- T: ChalkToNextSolver<'a, U>,
- U: NextSolverToChalk<'a, T> + rustc_type_ir::TypeFoldable<DbInterner<'a>>,
+ T: ChalkToNextSolver<'db, U>,
+ U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
self.normalize_associated_types_in_ns(ty.to_nextsolver(self.interner))
.to_chalk(self.interner)
}
+ // FIXME: We should get rid of this method. We cannot deeply normalize during inference, only when finishing.
+ // Inference should use shallow normalization (`try_structurally_resolve_type()`) only, when needed.
pub(crate) fn normalize_associated_types_in_ns<T>(&mut self, ty: T) -> T
where
- T: rustc_type_ir::TypeFoldable<DbInterner<'a>>,
+ T: rustc_type_ir::TypeFoldable<DbInterner<'db>> + Clone,
{
let ty = self.resolve_vars_with_obligations(ty);
- ty.fold_with(&mut Normalizer { table: self })
+ self.infer_ctxt
+ .at(&ObligationCause::new(), self.param_env)
+ .deeply_normalize(ty.clone())
+ .unwrap_or(ty)
}
/// Works almost same as [`Self::normalize_associated_types_in`], but this also resolves shallow
@@ -388,8 +502,8 @@
pub(crate) fn normalize_alias_ty(
&mut self,
- alias: crate::next_solver::Ty<'a>,
- ) -> crate::next_solver::Ty<'a> {
+ alias: crate::next_solver::Ty<'db>,
+ ) -> crate::next_solver::Ty<'db> {
let infer_term = self.infer_ctxt.next_ty_var();
let obligation = crate::next_solver::Predicate::new(
self.interner,
@@ -430,6 +544,10 @@
self.new_var(TyVariableKind::General, false)
}
+ pub(crate) fn next_ty_var(&mut self) -> crate::next_solver::Ty<'db> {
+ self.infer_ctxt.next_ty_var()
+ }
+
pub(crate) fn new_integer_var(&mut self) -> Ty {
self.new_var(TyVariableKind::Integer, false)
}
@@ -454,6 +572,10 @@
var.to_lifetime(Interner)
}
+ pub(crate) fn next_region_var(&mut self) -> crate::next_solver::Region<'db> {
+ self.infer_ctxt.next_region_var()
+ }
+
pub(crate) fn resolve_with_fallback<T>(
&mut self,
t: T,
@@ -488,10 +610,10 @@
pub(crate) fn instantiate_canonical_ns<T>(
&mut self,
- canonical: rustc_type_ir::Canonical<DbInterner<'a>, T>,
+ canonical: rustc_type_ir::Canonical<DbInterner<'db>, T>,
) -> T
where
- T: rustc_type_ir::TypeFoldable<DbInterner<'a>>,
+ T: rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
self.infer_ctxt.instantiate_canonical(&canonical).0
}
@@ -513,8 +635,8 @@
pub(crate) fn resolve_completely<T, U>(&mut self, t: T) -> T
where
- T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + ChalkToNextSolver<'a, U>,
- U: NextSolverToChalk<'a, T> + rustc_type_ir::TypeFoldable<DbInterner<'a>>,
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + ChalkToNextSolver<'db, U>,
+ U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
let t = self.resolve_with_fallback(t, &|_, _, d, _| d);
let t = self.normalize_associated_types_in(t);
@@ -566,7 +688,7 @@
}
/// Unify two relatable values (e.g. `Ty`) and register new trait goals that arise from that.
- pub(crate) fn unify<T: ChalkToNextSolver<'a, U>, U: Relate<DbInterner<'a>>>(
+ pub(crate) fn unify<T: ChalkToNextSolver<'db, U>, U: Relate<DbInterner<'db>>>(
&mut self,
ty1: &T,
ty2: &T,
@@ -575,12 +697,20 @@
Ok(r) => r,
Err(_) => return false,
};
- self.register_infer_ok(result);
+ self.register_obligations(result.goals);
+ true
+ }
+
+ pub(crate) fn unify_ns<T: Relate<DbInterner<'db>>>(&mut self, lhs: T, rhs: T) -> bool {
+ let Ok(infer_ok) = self.try_unify_ns(lhs, rhs) else {
+ return false;
+ };
+ self.register_obligations(infer_ok.goals);
true
}
/// Unify two relatable values (e.g. `Ty`) and check whether trait goals which arise from that could be fulfilled
- pub(crate) fn unify_deeply<T: ChalkToNextSolver<'a, U>, U: Relate<DbInterner<'a>>>(
+ pub(crate) fn unify_deeply<T: ChalkToNextSolver<'db, U>, U: Relate<DbInterner<'db>>>(
&mut self,
ty1: &T,
ty2: &T,
@@ -596,18 +726,27 @@
/// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
/// caller needs to deal with them.
- pub(crate) fn try_unify<T: ChalkToNextSolver<'a, U>, U: Relate<DbInterner<'a>>>(
+ pub(crate) fn try_unify<T: ChalkToNextSolver<'db, U>, U: Relate<DbInterner<'db>>>(
&mut self,
t1: &T,
t2: &T,
- ) -> InferResult<'a, ()> {
- let param_env = self.trait_env.env.to_nextsolver(self.interner);
+ ) -> InferResult<'db, ()> {
let lhs = t1.to_nextsolver(self.interner);
let rhs = t2.to_nextsolver(self.interner);
+ self.try_unify_ns(lhs, rhs)
+ }
+
+ /// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
+ /// caller needs to deal with them.
+ pub(crate) fn try_unify_ns<T: Relate<DbInterner<'db>>>(
+ &mut self,
+ lhs: T,
+ rhs: T,
+ ) -> InferResult<'db, ()> {
let variance = rustc_type_ir::Variance::Invariant;
let span = crate::next_solver::Span::dummy();
- match self.infer_ctxt.relate(param_env, lhs, variance, rhs, span) {
- Ok(goals) => Ok(InferOk { goals, value: () }),
+ match self.infer_ctxt.relate(self.param_env, lhs, variance, rhs, span) {
+ Ok(goals) => Ok(crate::infer::InferOk { goals, value: () }),
Err(_) => Err(TypeError),
}
}
@@ -616,17 +755,19 @@
/// otherwise, return ty.
#[tracing::instrument(skip(self))]
pub(crate) fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty {
- if !ty.data(Interner).flags.intersects(chalk_ir::TypeFlags::HAS_FREE_LOCAL_NAMES) {
- return ty.clone();
- }
- self.infer_ctxt
- .resolve_vars_if_possible(ty.to_nextsolver(self.interner))
- .to_chalk(self.interner)
+ self.shallow_resolve(ty.to_nextsolver(self.interner)).to_chalk(self.interner)
+ }
+
+ pub(crate) fn shallow_resolve(
+ &self,
+ ty: crate::next_solver::Ty<'db>,
+ ) -> crate::next_solver::Ty<'db> {
+ self.infer_ctxt.shallow_resolve(ty)
}
pub(crate) fn resolve_vars_with_obligations<T>(&mut self, t: T) -> T
where
- T: rustc_type_ir::TypeFoldable<DbInterner<'a>>,
+ T: rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
use rustc_type_ir::TypeVisitableExt;
@@ -640,7 +781,7 @@
return t;
}
- self.resolve_obligations_as_possible();
+ self.select_obligations_where_possible();
self.infer_ctxt.resolve_vars_if_possible(t)
}
@@ -659,42 +800,58 @@
.to_chalk(self.interner)
}
- fn structurally_normalize_term(&mut self, term: Term<'a>) -> Term<'a> {
- if term.to_alias_term().is_none() {
- return term;
- }
-
- let new_infer = self.infer_ctxt.next_term_var_of_kind(term);
-
- self.register_obligation(Predicate::new(
- self.interner,
- Binder::dummy(PredicateKind::AliasRelate(
- term,
- new_infer,
- AliasRelationDirection::Equate,
- )),
- ));
- self.resolve_obligations_as_possible();
- let res = self.infer_ctxt.resolve_vars_if_possible(new_infer);
- if res == new_infer { term } else { res }
+ fn structurally_normalize_term(&mut self, term: Term<'db>) -> Term<'db> {
+ self.infer_ctxt
+ .at(&ObligationCause::new(), self.param_env)
+ .structurally_normalize_term(term, &mut self.fulfillment_cx)
+ .unwrap_or(term)
}
- pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot<'a> {
+ /// Try to resolve `ty` to a structural type, normalizing aliases.
+ ///
+ /// In case there is still ambiguity, the returned type may be an inference
+ /// variable. This is different from `structurally_resolve_type` which errors
+ /// in this case.
+ pub(crate) fn try_structurally_resolve_type(
+ &mut self,
+ ty: crate::next_solver::Ty<'db>,
+ ) -> crate::next_solver::Ty<'db> {
+ if let crate::next_solver::TyKind::Alias(..) = ty.kind() {
+ // We need to use a separate variable here as otherwise the temporary for
+ // `self.fulfillment_cx.borrow_mut()` is alive in the `Err` branch, resulting
+ // in a reentrant borrow, causing an ICE.
+ let result = self
+ .infer_ctxt
+ .at(&ObligationCause::misc(), self.param_env)
+ .structurally_normalize_ty(ty, &mut self.fulfillment_cx);
+ match result {
+ Ok(normalized_ty) => normalized_ty,
+ Err(_errors) => crate::next_solver::Ty::new_error(self.interner, ErrorGuaranteed),
+ }
+ } else {
+ self.resolve_vars_with_obligations(ty)
+ }
+ }
+
+ pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot<'db> {
let ctxt_snapshot = self.infer_ctxt.start_snapshot();
let diverging_tys = self.diverging_tys.clone();
- let pending_obligations = self.pending_obligations.clone();
- InferenceTableSnapshot { ctxt_snapshot, pending_obligations, diverging_tys }
+ let obligations = self.fulfillment_cx.clone();
+ InferenceTableSnapshot { ctxt_snapshot, diverging_tys, obligations }
}
#[tracing::instrument(skip_all)]
- pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot<'a>) {
+ pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot<'db>) {
self.infer_ctxt.rollback_to(snapshot.ctxt_snapshot);
self.diverging_tys = snapshot.diverging_tys;
- self.pending_obligations = snapshot.pending_obligations;
+ self.fulfillment_cx = snapshot.obligations;
}
#[tracing::instrument(skip_all)]
- pub(crate) fn run_in_snapshot<T>(&mut self, f: impl FnOnce(&mut InferenceTable<'_>) -> T) -> T {
+ pub(crate) fn run_in_snapshot<T>(
+ &mut self,
+ f: impl FnOnce(&mut InferenceTable<'db>) -> T,
+ ) -> T {
let snapshot = self.snapshot();
let result = f(self);
self.rollback_to(snapshot);
@@ -703,7 +860,7 @@
pub(crate) fn commit_if_ok<T, E>(
&mut self,
- f: impl FnOnce(&mut InferenceTable<'_>) -> Result<T, E>,
+ f: impl FnOnce(&mut InferenceTable<'db>) -> Result<T, E>,
) -> Result<T, E> {
let snapshot = self.snapshot();
let result = f(self);
@@ -735,7 +892,7 @@
result.map(|m| m.1)
}
- pub(crate) fn register_obligation(&mut self, predicate: Predicate<'a>) {
+ pub(crate) fn register_obligation(&mut self, predicate: Predicate<'db>) {
let goal = next_solver::Goal {
param_env: self.trait_env.env.to_nextsolver(self.interner),
predicate,
@@ -746,127 +903,76 @@
#[tracing::instrument(level = "debug", skip(self))]
fn register_obligation_in_env(
&mut self,
- goal: next_solver::Goal<'a, next_solver::Predicate<'a>>,
+ goal: next_solver::Goal<'db, next_solver::Predicate<'db>>,
) {
let result = next_trait_solve_in_ctxt(&self.infer_ctxt, goal);
tracing::debug!(?result);
match result {
Ok((_, Certainty::Yes)) => {}
Err(rustc_type_ir::solve::NoSolution) => {}
- Ok((_, Certainty::Maybe(_))) => {
- self.pending_obligations.push(goal);
+ Ok((_, Certainty::Maybe { .. })) => {
+ self.fulfillment_cx.register_predicate_obligation(
+ &self.infer_ctxt,
+ Obligation::new(
+ self.interner,
+ ObligationCause::new(),
+ goal.param_env,
+ goal.predicate,
+ ),
+ );
}
}
}
- pub(crate) fn register_infer_ok<T>(&mut self, infer_ok: InferOk<'a, T>) {
- infer_ok.goals.into_iter().for_each(|goal| self.register_obligation_in_env(goal));
+ pub(crate) fn register_infer_ok<T>(&mut self, infer_ok: InferOk<'db, T>) -> T {
+ let InferOk { value, obligations } = infer_ok;
+ self.register_predicates(obligations);
+ value
}
- pub(crate) fn resolve_obligations_as_possible(&mut self) {
- let _span = tracing::info_span!("resolve_obligations_as_possible").entered();
- let mut changed = true;
- while mem::take(&mut changed) {
- let mut obligations = mem::take(&mut self.pending_obligations);
-
- for goal in obligations.drain(..) {
- tracing::debug!(obligation = ?goal);
-
- let result = next_trait_solve_in_ctxt(&self.infer_ctxt, goal);
- let (has_changed, certainty) = match result {
- Ok(result) => result,
- Err(_) => {
- continue;
- }
- };
-
- if matches!(has_changed, HasChanged::Yes) {
- changed = true;
- }
-
- match certainty {
- Certainty::Yes => {}
- Certainty::Maybe(_) => self.pending_obligations.push(goal),
- }
- }
- }
- }
-
- pub(crate) fn fudge_inference<T: TypeFoldable<Interner>>(
+ pub(crate) fn register_obligations(
&mut self,
- f: impl FnOnce(&mut Self) -> T,
- ) -> T {
- use chalk_ir::fold::TypeFolder;
+ obligations: Vec<crate::next_solver::Goal<'db, crate::next_solver::Predicate<'db>>>,
+ ) {
+ obligations.into_iter().for_each(|goal| self.register_obligation_in_env(goal));
+ }
- #[derive(chalk_derive::FallibleTypeFolder)]
- #[has_interner(Interner)]
- struct VarFudger<'a, 'b> {
- table: &'a mut InferenceTable<'b>,
- highest_known_var: InferenceVar,
- }
- impl TypeFolder<Interner> for VarFudger<'_, '_> {
- fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner> {
- self
- }
+ pub(crate) fn select_obligations_where_possible(&mut self) {
+ self.fulfillment_cx.select_where_possible(&self.infer_ctxt);
+ }
- fn interner(&self) -> Interner {
- Interner
- }
-
- fn fold_inference_ty(
- &mut self,
- var: chalk_ir::InferenceVar,
- kind: TyVariableKind,
- _outer_binder: chalk_ir::DebruijnIndex,
- ) -> chalk_ir::Ty<Interner> {
- if var < self.highest_known_var {
- var.to_ty(Interner, kind)
- } else {
- self.table.new_type_var()
- }
- }
-
- fn fold_inference_lifetime(
- &mut self,
- var: chalk_ir::InferenceVar,
- _outer_binder: chalk_ir::DebruijnIndex,
- ) -> chalk_ir::Lifetime<Interner> {
- if var < self.highest_known_var {
- var.to_lifetime(Interner)
- } else {
- self.table.new_lifetime_var()
- }
- }
-
- fn fold_inference_const(
- &mut self,
- ty: chalk_ir::Ty<Interner>,
- var: chalk_ir::InferenceVar,
- _outer_binder: chalk_ir::DebruijnIndex,
- ) -> chalk_ir::Const<Interner> {
- if var < self.highest_known_var {
- var.to_const(Interner, ty)
- } else {
- self.table.new_const_var(ty)
- }
- }
+ pub(super) fn register_predicate(
+ &mut self,
+ obligation: crate::next_solver::infer::traits::PredicateObligation<'db>,
+ ) {
+ if obligation.has_escaping_bound_vars() {
+ panic!("escaping bound vars in predicate {:?}", obligation);
}
- let snapshot = self.snapshot();
- let highest_known_var = self.new_type_var().inference_var(Interner).expect("inference_var");
- let result = f(self);
- self.rollback_to(snapshot);
- result
- .fold_with(&mut VarFudger { table: self, highest_known_var }, DebruijnIndex::INNERMOST)
+ self.fulfillment_cx.register_predicate_obligation(&self.infer_ctxt, obligation);
+ }
+
+ pub(super) fn register_predicates<I>(&mut self, obligations: I)
+ where
+ I: IntoIterator<Item = crate::next_solver::infer::traits::PredicateObligation<'db>>,
+ {
+ obligations.into_iter().for_each(|obligation| {
+ self.register_predicate(obligation);
+ });
}
pub(crate) fn callable_sig(
&mut self,
ty: &Ty,
num_args: usize,
- ) -> Option<(Option<FnTrait>, Vec<Ty>, Ty)> {
+ ) -> Option<(Option<FnTrait>, Vec<crate::next_solver::Ty<'db>>, crate::next_solver::Ty<'db>)>
+ {
match ty.callable_sig(self.db) {
- Some(sig) => Some((None, sig.params().to_vec(), sig.ret().clone())),
+ Some(sig) => Some((
+ None,
+ sig.params().iter().map(|param| param.to_nextsolver(self.interner)).collect(),
+ sig.ret().to_nextsolver(self.interner),
+ )),
None => {
let (f, args_ty, return_ty) = self.callable_sig_from_fn_trait(ty, num_args)?;
Some((Some(f), args_ty, return_ty))
@@ -878,7 +984,7 @@
&mut self,
ty: &Ty,
num_args: usize,
- ) -> Option<(FnTrait, Vec<Ty>, Ty)> {
+ ) -> Option<(FnTrait, Vec<crate::next_solver::Ty<'db>>, crate::next_solver::Ty<'db>)> {
for (fn_trait_name, output_assoc_name, subtraits) in [
(FnTrait::FnOnce, sym::Output, &[FnTrait::Fn, FnTrait::FnMut][..]),
(FnTrait::AsyncFnMut, sym::CallRefFuture, &[FnTrait::AsyncFn]),
@@ -898,7 +1004,7 @@
ParamKind::Lifetime => unreachable!("Tuple with lifetime parameter"),
ParamKind::Const(_) => unreachable!("Tuple with const parameter"),
};
- arg_tys.push(arg.clone());
+ arg_tys.push(arg.to_nextsolver(self.interner));
arg.cast(Interner)
})
.build();
@@ -920,7 +1026,8 @@
let goal: Goal = trait_ref.clone().cast(Interner);
if !self.try_obligation(goal.clone()).no_solution() {
self.register_obligation(goal.to_nextsolver(self.interner));
- let return_ty = self.normalize_projection_ty(projection);
+ let return_ty =
+ self.normalize_projection_ty(projection).to_nextsolver(self.interner);
for &fn_x in subtraits {
let fn_x_trait = fn_x.get_id(self.db, krate)?;
trait_ref.trait_id = to_chalk_trait_id(fn_x_trait);
@@ -969,8 +1076,8 @@
/// Whenever you lower a user-written type, you should call this.
pub(crate) fn process_user_written_ty<T, U>(&mut self, ty: T) -> T
where
- T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + ChalkToNextSolver<'a, U>,
- U: NextSolverToChalk<'a, T> + rustc_type_ir::TypeFoldable<DbInterner<'a>>,
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + ChalkToNextSolver<'db, U>,
+ U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
self.process_remote_user_written_ty(ty)
// FIXME: Register a well-formed obligation.
@@ -980,13 +1087,14 @@
/// while `process_user_written_ty()` should (but doesn't currently).
pub(crate) fn process_remote_user_written_ty<T, U>(&mut self, ty: T) -> T
where
- T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + ChalkToNextSolver<'a, U>,
- U: NextSolverToChalk<'a, T> + rustc_type_ir::TypeFoldable<DbInterner<'a>>,
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + ChalkToNextSolver<'db, U>,
+ U: NextSolverToChalk<'db, T> + rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
let ty = self.insert_type_vars(ty);
// See https://github.com/rust-lang/rust/blob/cdb45c87e2cd43495379f7e867e3cc15dcee9f93/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs#L487-L495:
// Even though the new solver only lazily normalizes usually, here we eagerly normalize so that not everything needs
// to normalize before inspecting the `TyKind`.
+ // FIXME(next-solver): We should not deeply normalize here, only shallowly.
self.normalize_associated_types_in(ty)
}
@@ -1074,7 +1182,10 @@
impl fmt::Debug for InferenceTable<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("InferenceTable").finish()
+ f.debug_struct("InferenceTable")
+ .field("name", &self.infer_ctxt.inner.borrow().type_variable_storage)
+ .field("fulfillment_cx", &self.fulfillment_cx)
+ .finish()
}
}
@@ -1091,7 +1202,7 @@
};
use rustc_type_ir::{FloatVid, IntVid, TyVid};
- #[derive(Copy, Clone, PartialEq, Eq)]
+ #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub(super) enum VarKind {
Ty(TyVariableKind),
Const,
@@ -1264,62 +1375,3 @@
}
}
}
-
-/// This expects its input to be resolved.
-struct Normalizer<'a, 'b> {
- table: &'a mut InferenceTable<'b>,
-}
-
-impl<'db> Normalizer<'_, 'db> {
- fn normalize_alias_term(
- &mut self,
- alias_term: crate::next_solver::Term<'db>,
- ) -> crate::next_solver::Term<'db> {
- let infer_term = self.table.infer_ctxt.next_term_var_of_kind(alias_term);
- let obligation = crate::next_solver::Predicate::new(
- self.table.interner,
- crate::next_solver::Binder::dummy(crate::next_solver::PredicateKind::AliasRelate(
- alias_term,
- infer_term,
- rustc_type_ir::AliasRelationDirection::Equate,
- )),
- );
- self.table.register_obligation(obligation);
- let term = self.table.resolve_vars_with_obligations(infer_term);
- // Now normalize the result, because maybe it contains more aliases.
- match term {
- Term::Ty(term) => term.super_fold_with(self).into(),
- Term::Const(term) => term.super_fold_with(self).into(),
- }
- }
-}
-
-impl<'db> rustc_type_ir::TypeFolder<DbInterner<'db>> for Normalizer<'_, 'db> {
- fn cx(&self) -> DbInterner<'db> {
- self.table.interner
- }
-
- fn fold_ty(&mut self, ty: crate::next_solver::Ty<'db>) -> crate::next_solver::Ty<'db> {
- if !ty.has_aliases() {
- return ty;
- }
-
- let crate::next_solver::TyKind::Alias(..) = ty.kind() else {
- return ty.super_fold_with(self);
- };
- // FIXME: Handle escaping bound vars by replacing them with placeholders (relevant to when we handle HRTB only).
- self.normalize_alias_term(ty.into()).expect_type()
- }
-
- fn fold_const(&mut self, ct: crate::next_solver::Const<'db>) -> crate::next_solver::Const<'db> {
- if !ct.has_aliases() {
- return ct;
- }
-
- let crate::next_solver::ConstKind::Unevaluated(..) = ct.kind() else {
- return ct.super_fold_with(self);
- };
- // FIXME: Handle escaping bound vars by replacing them with placeholders (relevant to when we handle HRTB only).
- self.normalize_alias_term(ct.into()).expect_const()
- }
-}
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index 2020a8b..f21673c 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -28,7 +28,6 @@
DbInterner, GenericArgs, ParamEnv, Ty, TyKind, TypingMode,
infer::{DbInternerInferExt, traits::ObligationCause},
mapping::{ChalkToNextSolver, convert_args_for_result},
- project::solve_normalize::deeply_normalize,
},
};
@@ -172,7 +171,7 @@
let cx = LayoutCx::new(dl);
let infer_ctxt = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let cause = ObligationCause::dummy();
- let ty = deeply_normalize(infer_ctxt.at(&cause, ParamEnv::empty()), ty).unwrap_or(ty);
+ let ty = infer_ctxt.at(&cause, ParamEnv::empty()).deeply_normalize(ty).unwrap_or(ty);
let result = match ty.kind() {
TyKind::Adt(def, args) => {
match def.inner().id {
@@ -335,8 +334,8 @@
.clone()
.substitute(
Interner,
- ClosureSubst(&convert_args_for_result(interner, args.inner()))
- .parent_subst(),
+ &ClosureSubst(&convert_args_for_result(interner, args.inner()))
+ .parent_subst(db),
)
.to_nextsolver(interner);
db.layout_of_ty(ty, trait_env.clone())
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 37d5347..451622e 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -118,7 +118,7 @@
Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, InferenceResult,
InferenceTyDiagnosticSource, OverloadedDeref, PointerCast,
cast::CastError,
- closure::{CaptureKind, CapturedItem},
+ closure::analysis::{CaptureKind, CapturedItem},
could_coerce, could_unify, could_unify_deeply,
};
pub use interner::Interner;
@@ -136,8 +136,8 @@
pub use target_feature::TargetFeatures;
pub use traits::TraitEnvironment;
pub use utils::{
- Unsafety, all_super_traits, direct_super_traits, is_fn_unsafe_to_call,
- target_feature_is_safe_in_target,
+ TargetFeatureIsSafeInTarget, Unsafety, all_super_traits, direct_super_traits,
+ is_fn_unsafe_to_call, target_feature_is_safe_in_target,
};
pub use variance::Variance;
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 79f78c5..4d5172f 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -299,6 +299,29 @@
const_type,
self.resolver.krate(),
),
+ hir_def::hir::Expr::UnaryOp { expr: inner_expr, op: hir_def::hir::UnaryOp::Neg } => {
+ if let hir_def::hir::Expr::Literal(literal) = &self.store[*inner_expr] {
+ // Only handle negation for signed integers and floats
+ match literal {
+ hir_def::hir::Literal::Int(_, _) | hir_def::hir::Literal::Float(_, _) => {
+ if let Some(negated_literal) = literal.clone().negate() {
+ intern_const_ref(
+ self.db,
+ &negated_literal.into(),
+ const_type,
+ self.resolver.krate(),
+ )
+ } else {
+ unknown_const(const_type)
+ }
+ }
+ // For unsigned integers, chars, bools, etc., negation is not meaningful
+ _ => unknown_const(const_type),
+ }
+ } else {
+ unknown_const(const_type)
+ }
+ }
_ => unknown_const(const_type),
}
}
diff --git a/crates/hir-ty/src/lower_nextsolver.rs b/crates/hir-ty/src/lower_nextsolver.rs
index 5c29bef..0076446 100644
--- a/crates/hir-ty/src/lower_nextsolver.rs
+++ b/crates/hir-ty/src/lower_nextsolver.rs
@@ -17,6 +17,7 @@
use base_db::Crate;
use either::Either;
+use hir_def::item_tree::FieldsShape;
use hir_def::{
AdtId, AssocItemId, CallableDefId, ConstParamId, EnumVariantId, FunctionId, GenericDefId,
GenericParamId, ImplId, ItemContainerId, LocalFieldId, Lookup, StructId, TraitId, TypeAliasId,
@@ -34,6 +35,7 @@
TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId,
},
};
+use hir_def::{ConstId, StaticId};
use hir_expand::name::Name;
use intern::sym;
use la_arena::{Arena, ArenaMap, Idx};
@@ -53,6 +55,7 @@
use stdx::never;
use triomphe::Arc;
+use crate::ValueTyDefId;
use crate::{
FnAbi, ImplTraitId, Interner, ParamKind, TyDefId, TyLoweringDiagnostic,
TyLoweringDiagnosticKind,
@@ -206,6 +209,10 @@
}
}
+ pub(crate) fn set_lifetime_elision(&mut self, lifetime_elision: LifetimeElisionKind<'db>) {
+ self.lifetime_elision = lifetime_elision;
+ }
+
pub(crate) fn with_debruijn<T>(
&mut self,
debruijn: DebruijnIndex,
@@ -278,6 +285,29 @@
const_type,
self.resolver.krate(),
),
+ hir_def::hir::Expr::UnaryOp { expr: inner_expr, op: hir_def::hir::UnaryOp::Neg } => {
+ if let hir_def::hir::Expr::Literal(literal) = &self.store[*inner_expr] {
+ // Only handle negation for signed integers and floats
+ match literal {
+ hir_def::hir::Literal::Int(_, _) | hir_def::hir::Literal::Float(_, _) => {
+ if let Some(negated_literal) = literal.clone().negate() {
+ intern_const_ref(
+ self.db,
+ &negated_literal.into(),
+ const_type,
+ self.resolver.krate(),
+ )
+ } else {
+ unknown_const(const_type)
+ }
+ }
+ // For unsigned integers, chars, bools, etc., negation is not meaningful
+ _ => unknown_const(const_type),
+ }
+ } else {
+ unknown_const(const_type)
+ }
+ }
_ => unknown_const(const_type),
}
}
@@ -783,7 +813,7 @@
},
None => Region::new_static(self.interner),
};
- Ty::new_dynamic(self.interner, bounds, region, rustc_type_ir::DynKind::Dyn)
+ Ty::new_dynamic(self.interner, bounds, region)
} else {
// FIXME: report error
// (additional non-auto traits, associated type rebound, or no resolved trait)
@@ -958,6 +988,105 @@
}
}
+/// Build the declared type of a function. This should not need to look at the
+/// function body.
+fn type_for_fn<'db>(db: &'db dyn HirDatabase, def: FunctionId) -> EarlyBinder<'db, Ty<'db>> {
+ let interner = DbInterner::new_with(db, None, None);
+ EarlyBinder::bind(Ty::new_fn_def(
+ interner,
+ CallableDefId::FunctionId(def).into(),
+ GenericArgs::identity_for_item(interner, def.into()),
+ ))
+}
+
+/// Build the declared type of a const.
+fn type_for_const<'db>(db: &'db dyn HirDatabase, def: ConstId) -> EarlyBinder<'db, Ty<'db>> {
+ let resolver = def.resolver(db);
+ let data = db.const_signature(def);
+ let parent = def.loc(db).container;
+ let mut ctx = TyLoweringContext::new(
+ db,
+ &resolver,
+ &data.store,
+ def.into(),
+ LifetimeElisionKind::AnonymousReportError,
+ );
+ ctx.set_lifetime_elision(LifetimeElisionKind::for_const(ctx.interner, parent));
+ EarlyBinder::bind(ctx.lower_ty(data.type_ref))
+}
+
+/// Build the declared type of a static.
+fn type_for_static<'db>(db: &'db dyn HirDatabase, def: StaticId) -> EarlyBinder<'db, Ty<'db>> {
+ let resolver = def.resolver(db);
+ let module = resolver.module();
+ let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
+ let data = db.static_signature(def);
+ let parent = def.loc(db).container;
+ let mut ctx = TyLoweringContext::new(
+ db,
+ &resolver,
+ &data.store,
+ def.into(),
+ LifetimeElisionKind::AnonymousReportError,
+ );
+ ctx.set_lifetime_elision(LifetimeElisionKind::Elided(Region::new_static(ctx.interner)));
+ EarlyBinder::bind(ctx.lower_ty(data.type_ref))
+}
+
+/// Build the type of a tuple struct constructor.
+fn type_for_struct_constructor<'db>(
+ db: &'db dyn HirDatabase,
+ def: StructId,
+) -> Option<EarlyBinder<'db, Ty<'db>>> {
+ let struct_data = def.fields(db);
+ match struct_data.shape {
+ FieldsShape::Record => None,
+ FieldsShape::Unit => Some(type_for_adt(db, def.into())),
+ FieldsShape::Tuple => {
+ let interner = DbInterner::new_with(db, None, None);
+ Some(EarlyBinder::bind(Ty::new_fn_def(
+ interner,
+ CallableDefId::StructId(def).into(),
+ GenericArgs::identity_for_item(interner, def.into()),
+ )))
+ }
+ }
+}
+
+/// Build the type of a tuple enum variant constructor.
+fn type_for_enum_variant_constructor<'db>(
+ db: &'db dyn HirDatabase,
+ def: EnumVariantId,
+) -> Option<EarlyBinder<'db, Ty<'db>>> {
+ let struct_data = def.fields(db);
+ match struct_data.shape {
+ FieldsShape::Record => None,
+ FieldsShape::Unit => Some(type_for_adt(db, def.loc(db).parent.into())),
+ FieldsShape::Tuple => {
+ let interner = DbInterner::new_with(db, None, None);
+ Some(EarlyBinder::bind(Ty::new_fn_def(
+ interner,
+ CallableDefId::EnumVariantId(def).into(),
+ GenericArgs::identity_for_item(interner, def.loc(db).parent.into()),
+ )))
+ }
+ }
+}
+
+pub(crate) fn value_ty_query<'db>(
+ db: &'db dyn HirDatabase,
+ def: ValueTyDefId,
+) -> Option<EarlyBinder<'db, Ty<'db>>> {
+ match def {
+ ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)),
+ ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
+ ValueTyDefId::UnionId(it) => Some(type_for_adt(db, it.into())),
+ ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
+ ValueTyDefId::ConstId(it) => Some(type_for_const(db, it)),
+ ValueTyDefId::StaticId(it) => Some(type_for_static(db, it)),
+ }
+}
+
pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>(
db: &'db dyn HirDatabase,
t: TypeAliasId,
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index ac85bf7..7fa3d31 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -24,7 +24,7 @@
use crate::{
AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, GenericArgData,
Goal, InEnvironment, Interner, Mutability, Scalar, Substitution, TraitEnvironment, TraitRef,
- TraitRefExt, Ty, TyBuilder, TyExt, TyKind, TyVariableKind, VariableKind, WhereClause,
+ TraitRefExt, Ty, TyBuilder, TyExt, TyKind, VariableKind, WhereClause,
autoderef::{self, AutoderefKind},
db::HirDatabase,
from_chalk_trait_id, from_foreign_def_id,
@@ -164,7 +164,7 @@
rustc_ast_ir::Mutability::Not => TyFingerprint::RawPtr(Mutability::Not),
},
TyKind::Foreign(def) => TyFingerprint::ForeignType(crate::to_foreign_def_id(def.0)),
- TyKind::Dynamic(bounds, _, _) => {
+ TyKind::Dynamic(bounds, _) => {
let trait_ref = bounds
.as_slice()
.iter()
@@ -622,18 +622,23 @@
}
impl ReceiverAdjustments {
- pub(crate) fn apply(&self, table: &mut InferenceTable<'_>, ty: Ty) -> (Ty, Vec<Adjustment>) {
- let mut ty = table.structurally_resolve_type(&ty);
+ pub(crate) fn apply(
+ &self,
+ table: &mut InferenceTable<'_>,
+ mut ty: Ty,
+ ) -> (Ty, Vec<Adjustment>) {
let mut adjust = Vec::new();
+ let mut autoderef = table.autoderef(ty.to_nextsolver(table.interner));
+ autoderef.next();
for _ in 0..self.autoderefs {
- match autoderef::autoderef_step(table, ty.clone(), true, false) {
+ match autoderef.next() {
None => {
never!("autoderef not possible for {:?}", ty);
ty = TyKind::Error.intern(Interner);
break;
}
- Some((kind, new_ty)) => {
- ty = new_ty.clone();
+ Some((new_ty, _)) => {
+ ty = new_ty.to_chalk(autoderef.table.interner);
let mutbl = match self.autoref {
Some(AutorefOrPtrAdjustment::Autoref(m)) => Some(m),
Some(AutorefOrPtrAdjustment::ToConstPtr) => Some(Mutability::Not),
@@ -641,11 +646,11 @@
None => None,
};
adjust.push(Adjustment {
- kind: Adjust::Deref(match kind {
+ kind: Adjust::Deref(match autoderef.steps().last().unwrap().1 {
AutoderefKind::Overloaded => Some(OverloadedDeref(mutbl)),
AutoderefKind::Builtin => None,
}),
- target: new_ty,
+ target: ty.clone(),
});
}
}
@@ -1282,17 +1287,20 @@
name: Option<&Name>,
callback: &mut dyn MethodCandidateCallback,
) -> ControlFlow<()> {
+ let interner = table.interner;
let receiver_ty = table.instantiate_canonical_ns(receiver_ty);
- let receiver_ty: crate::Ty = receiver_ty.to_chalk(table.interner);
+ let receiver_ty: crate::Ty = receiver_ty.to_chalk(interner);
// We're looking for methods with *receiver* type receiver_ty. These could
// be found in any of the derefs of receiver_ty, so we have to go through
// that, including raw derefs.
table.run_in_snapshot(|table| {
let mut autoderef =
- autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true, true);
+ autoderef::Autoderef::new_no_tracking(table, receiver_ty.to_nextsolver(interner))
+ .include_raw_pointers()
+ .use_receiver_trait();
while let Some((self_ty, _)) = autoderef.next() {
iterate_inherent_methods(
- &self_ty,
+ &self_ty.to_chalk(interner),
autoderef.table,
name,
Some(&receiver_ty),
@@ -1308,15 +1316,18 @@
})?;
table.run_in_snapshot(|table| {
let mut autoderef =
- autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true, true);
+ autoderef::Autoderef::new_no_tracking(table, receiver_ty.to_nextsolver(interner))
+ .include_raw_pointers()
+ .use_receiver_trait();
while let Some((self_ty, _)) = autoderef.next() {
- if matches!(self_ty.kind(Interner), TyKind::InferenceVar(_, TyVariableKind::General)) {
+ if matches!(self_ty.kind(), crate::next_solver::TyKind::Infer(rustc_type_ir::TyVar(_)))
+ {
// don't try to resolve methods on unknown types
return ControlFlow::Continue(());
}
iterate_trait_method_candidates(
- &self_ty,
+ &self_ty.to_chalk(interner),
autoderef.table,
traits_in_scope,
name,
@@ -1760,7 +1771,8 @@
for pred in infer_ok.into_obligations() {
ctxt.register_predicate_obligation(&table.infer_ctxt, pred);
}
- check_that!(ctxt.select_all_or_error(&table.infer_ctxt).is_empty());
+ // FIXME: Are we doing this correctly? Probably better to follow rustc more closely.
+ check_that!(ctxt.select_where_possible(&table.infer_ctxt).is_empty());
}
check_that!(table.unify(receiver_ty, &expected_receiver));
@@ -1937,11 +1949,10 @@
) -> Vec<(next_solver::Canonical<'db, crate::next_solver::Ty<'db>>, ReceiverAdjustments)> {
let interner = table.interner;
let mut deref_chain = Vec::new();
- let mut autoderef =
- autoderef::Autoderef::new_no_tracking(table, ty.to_chalk(interner), false, true);
+ let mut autoderef = autoderef::Autoderef::new_no_tracking(table, ty).use_receiver_trait();
while let Some((ty, derefs)) = autoderef.next() {
deref_chain.push((
- autoderef.table.canonicalize(ty.to_nextsolver(interner)),
+ autoderef.table.canonicalize(ty),
ReceiverAdjustments { autoref: None, autoderefs: derefs, unsize_array: false },
));
}
diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs
index 52df851..2c09fb9 100644
--- a/crates/hir-ty/src/mir/borrowck.rs
+++ b/crates/hir-ty/src/mir/borrowck.rs
@@ -113,8 +113,13 @@
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
- let parent_subst = ClosureSubst(subst).parent_subst();
- captures.get(f).expect("broken closure field").ty.clone().substitute(Interner, parent_subst)
+ let parent_subst = ClosureSubst(subst).parent_subst(db);
+ captures
+ .get(f)
+ .expect("broken closure field")
+ .ty
+ .clone()
+ .substitute(Interner, &parent_subst)
}
}
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index cddd1fb..3e658cb 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -712,13 +712,13 @@
let InternedClosure(def, _) = self.db.lookup_intern_closure(c.into());
let infer = self.db.infer(def);
let (captures, _) = infer.closure_info(&c);
- let parent_subst = ClosureSubst(subst).parent_subst();
+ let parent_subst = ClosureSubst(subst).parent_subst(self.db);
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
- .substitute(Interner, parent_subst)
+ .substitute(Interner, &parent_subst)
},
self.crate_id,
);
@@ -2446,7 +2446,7 @@
| TyKind::Foreign(_)
| TyKind::Error(_)
| TyKind::Placeholder(_)
- | TyKind::Dynamic(_, _, _)
+ | TyKind::Dynamic(_, _)
| TyKind::Alias(_, _)
| TyKind::Bound(_, _)
| TyKind::Infer(_)
@@ -2772,7 +2772,7 @@
TyKind::Closure(closure, subst) => self.exec_closure(
*closure,
func_data,
- &Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()),
+ &ClosureSubst(subst).parent_subst(self.db),
destination,
&args[1..],
locals,
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index e27d334..f67778b 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -4,6 +4,7 @@
use std::cmp::{self, Ordering};
use chalk_ir::TyKind;
+use hir_def::signatures::FunctionSignature;
use hir_def::{
CrateRootModuleId,
builtin_type::{BuiltinInt, BuiltinUint},
@@ -63,17 +64,7 @@
let function_data = self.db.function_signature(def);
let attrs = self.db.attrs(def.into());
- let is_intrinsic = attrs.by_key(sym::rustc_intrinsic).exists()
- // Keep this around for a bit until extern "rustc-intrinsic" abis are no longer used
- || (match &function_data.abi {
- Some(abi) => *abi == sym::rust_dash_intrinsic,
- None => match def.lookup(self.db).container {
- hir_def::ItemContainerId::ExternBlockId(block) => {
- block.abi(self.db) == Some(sym::rust_dash_intrinsic)
- }
- _ => false,
- },
- });
+ let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def);
if is_intrinsic {
return self.exec_intrinsic(
@@ -194,7 +185,7 @@
let infer = self.db.infer(closure_owner);
let (captures, _) = infer.closure_info(id);
let layout = self.layout(self_ty.to_nextsolver(interner))?;
- let ty_iter = captures.iter().map(|c| c.ty(subst));
+ let ty_iter = captures.iter().map(|c| c.ty(self.db, subst));
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
TyKind::Tuple(_, subst) => {
diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs
index 5a56d99..2a6e3a1 100644
--- a/crates/hir-ty/src/mir/eval/tests.rs
+++ b/crates/hir-ty/src/mir/eval/tests.rs
@@ -12,34 +12,37 @@
use super::{MirEvalError, interpret_mir};
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> {
- let module_id = db.module_for_file(file_id.file_id(db));
- let def_map = module_id.def_map(db);
- let scope = &def_map[module_id.local_id].scope;
- let func_id = scope
- .declarations()
- .find_map(|x| match x {
- hir_def::ModuleDefId::FunctionId(x) => {
- if db.function_signature(x).name.display(db, Edition::CURRENT).to_string() == "main"
- {
- Some(x)
- } else {
- None
+ salsa::attach(db, || {
+ let module_id = db.module_for_file(file_id.file_id(db));
+ let def_map = module_id.def_map(db);
+ let scope = &def_map[module_id.local_id].scope;
+ let func_id = scope
+ .declarations()
+ .find_map(|x| match x {
+ hir_def::ModuleDefId::FunctionId(x) => {
+ if db.function_signature(x).name.display(db, Edition::CURRENT).to_string()
+ == "main"
+ {
+ Some(x)
+ } else {
+ None
+ }
}
- }
- _ => None,
- })
- .expect("no main function found");
- let body = db
- .monomorphized_mir_body(
- func_id.into(),
- Substitution::empty(Interner),
- db.trait_environment(func_id.into()),
- )
- .map_err(|e| MirEvalError::MirLowerError(func_id, e))?;
+ _ => None,
+ })
+ .expect("no main function found");
+ let body = db
+ .monomorphized_mir_body(
+ func_id.into(),
+ Substitution::empty(Interner),
+ db.trait_environment(func_id.into()),
+ )
+ .map_err(|e| MirEvalError::MirLowerError(func_id, e))?;
- let (result, output) = salsa::attach(db, || interpret_mir(db, body, false, None))?;
- result?;
- Ok((output.stdout().into_owned(), output.stderr().into_owned()))
+ let (result, output) = interpret_mir(db, body, false, None)?;
+ result?;
+ Ok((output.stdout().into_owned(), output.stderr().into_owned()))
+ })
}
fn check_pass(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
@@ -53,43 +56,60 @@
) {
let _tracing = setup_tracing();
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
- let file_id = *file_ids.last().unwrap();
- let x = eval_main(&db, file_id);
- match x {
- Err(e) => {
- let mut err = String::new();
- let line_index = |size: TextSize| {
- let mut size = u32::from(size) as usize;
- let lines = ra_fixture.lines().enumerate();
- for (i, l) in lines {
- if let Some(x) = size.checked_sub(l.len()) {
- size = x;
- } else {
- return (i, size);
+ salsa::attach(&db, || {
+ let file_id = *file_ids.last().unwrap();
+ let x = eval_main(&db, file_id);
+ match x {
+ Err(e) => {
+ let mut err = String::new();
+ let line_index = |size: TextSize| {
+ let mut size = u32::from(size) as usize;
+ let lines = ra_fixture.lines().enumerate();
+ for (i, l) in lines {
+ if let Some(x) = size.checked_sub(l.len()) {
+ size = x;
+ } else {
+ return (i, size);
+ }
}
- }
- (usize::MAX, size)
- };
- let span_formatter = |file, range: TextRange| {
- format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end()))
- };
- let krate = db.module_for_file(file_id.file_id(&db)).krate();
- e.pretty_print(&mut err, &db, span_formatter, DisplayTarget::from_crate(&db, krate))
+ (usize::MAX, size)
+ };
+ let span_formatter = |file, range: TextRange| {
+ format!(
+ "{:?} {:?}..{:?}",
+ file,
+ line_index(range.start()),
+ line_index(range.end())
+ )
+ };
+ let krate = db.module_for_file(file_id.file_id(&db)).krate();
+ e.pretty_print(
+ &mut err,
+ &db,
+ span_formatter,
+ DisplayTarget::from_crate(&db, krate),
+ )
.unwrap();
- panic!("Error in interpreting: {err}");
+ panic!("Error in interpreting: {err}");
+ }
+ Ok((stdout, stderr)) => {
+ assert_eq!(stdout, expected_stdout);
+ assert_eq!(stderr, expected_stderr);
+ }
}
- Ok((stdout, stderr)) => {
- assert_eq!(stdout, expected_stdout);
- assert_eq!(stderr, expected_stderr);
- }
- }
+ })
}
fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic: &str) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
- let file_id = *file_ids.last().unwrap();
- let e = eval_main(&db, file_id).unwrap_err();
- assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), expected_panic);
+ salsa::attach(&db, || {
+ let file_id = *file_ids.last().unwrap();
+ let e = eval_main(&db, file_id).unwrap_err();
+ assert_eq!(
+ e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")),
+ expected_panic
+ );
+ })
}
fn check_error_with(
@@ -97,9 +117,11 @@
expect_err: impl FnOnce(MirEvalError) -> bool,
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
- let file_id = *file_ids.last().unwrap();
- let e = eval_main(&db, file_id).unwrap_err();
- assert!(expect_err(e));
+ salsa::attach(&db, || {
+ let file_id = *file_ids.last().unwrap();
+ let e = eval_main(&db, file_id).unwrap_err();
+ assert!(expect_err(e));
+ })
}
#[test]
@@ -492,7 +514,7 @@
fn from_fn() {
check_pass(
r#"
-//- minicore: fn, iterator
+//- minicore: fn, iterator, sized
struct FromFn<F>(F);
impl<T, F: FnMut() -> Option<T>> Iterator for FromFn<F> {
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 45a1131..50e416a 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -2065,7 +2065,7 @@
},
});
ctx.result.param_locals.push(closure_local);
- let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else {
+ let Some(sig) = ClosureSubst(substs).sig_ty(db).callable_sig(db) else {
implementation_error!("closure has not callable sig");
};
let resolver_guard = ctx.resolver.update_to_inner_scope(db, owner, expr);
diff --git a/crates/hir-ty/src/next_solver.rs b/crates/hir-ty/src/next_solver.rs
index 99a501b..073a029 100644
--- a/crates/hir-ty/src/next_solver.rs
+++ b/crates/hir-ty/src/next_solver.rs
@@ -9,14 +9,17 @@
mod generic_arg;
pub mod generics;
pub mod infer;
+pub(crate) mod inspect;
pub mod interner;
mod ir_print;
pub mod mapping;
+mod normalize;
+pub mod obligation_ctxt;
mod opaques;
pub mod predicate;
-pub(crate) mod project;
mod region;
mod solver;
+mod structural_normalize;
mod ty;
pub mod util;
@@ -37,11 +40,9 @@
pub type CanonicalVarKind<'db> = rustc_type_ir::CanonicalVarKind<DbInterner<'db>>;
pub type CanonicalQueryInput<'db, V> = rustc_type_ir::CanonicalQueryInput<DbInterner<'db>, V>;
pub type AliasTy<'db> = rustc_type_ir::AliasTy<DbInterner<'db>>;
+pub type FnSig<'db> = rustc_type_ir::FnSig<DbInterner<'db>>;
pub type PolyFnSig<'db> = Binder<'db, rustc_type_ir::FnSig<DbInterner<'db>>>;
pub type TypingMode<'db> = rustc_type_ir::TypingMode<DbInterner<'db>>;
-
-#[cfg(feature = "in-rust-tree")]
-use rustc_data_structure::sorted_map::index_map as indexmap;
-
-pub type FxIndexMap<K, V> =
- indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
+pub type TypeError<'db> = rustc_type_ir::error::TypeError<DbInterner<'db>>;
+pub type QueryResult<'db> = rustc_type_ir::solve::QueryResult<DbInterner<'db>>;
+pub type FxIndexMap<K, V> = rustc_type_ir::data_structures::IndexMap<K, V>;
diff --git a/crates/hir-ty/src/next_solver/consts.rs b/crates/hir-ty/src/next_solver/consts.rs
index 23789b0..7ebefa7 100644
--- a/crates/hir-ty/src/next_solver/consts.rs
+++ b/crates/hir-ty/src/next_solver/consts.rs
@@ -4,16 +4,19 @@
use hir_def::{ConstParamId, TypeOrConstParamId};
use intern::{Interned, Symbol};
-use rustc_ast_ir::try_visit;
-use rustc_ast_ir::visit::VisitorResult;
+use rustc_ast_ir::{try_visit, visit::VisitorResult};
use rustc_type_ir::{
BoundVar, FlagComputation, Flags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable,
- TypeVisitable, WithCachedTypeInfo,
- inherent::{IntoKind, PlaceholderLike},
+ TypeVisitable, TypeVisitableExt, WithCachedTypeInfo,
+ inherent::{IntoKind, ParamEnv as _, PlaceholderLike, SliceLike},
relate::Relate,
};
-use crate::{ConstScalar, MemoryMap, interner::InternedWrapperNoDebug};
+use crate::{
+ ConstScalar, MemoryMap,
+ interner::InternedWrapperNoDebug,
+ next_solver::{ClauseKind, ParamEnv},
+};
use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder, Ty};
@@ -33,6 +36,8 @@
internee: kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
+ #[cfg(feature = "in-rust-tree")]
+ stable_hash: ena::fingerprint::Fingerprint::ZERO,
};
Const::new_(interner.db(), InternedWrapperNoDebug(cached))
}
@@ -96,6 +101,40 @@
}
}
+impl ParamConst {
+ pub fn find_const_ty_from_env<'db>(self, env: ParamEnv<'db>) -> Ty<'db> {
+ let mut candidates = env.caller_bounds().iter().filter_map(|clause| {
+ // `ConstArgHasType` are never desugared to be higher ranked.
+ match clause.kind().skip_binder() {
+ ClauseKind::ConstArgHasType(param_ct, ty) => {
+ assert!(!(param_ct, ty).has_escaping_bound_vars());
+
+ match param_ct.kind() {
+ ConstKind::Param(param_ct) if param_ct.index == self.index => Some(ty),
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ });
+
+ // N.B. it may be tempting to fix ICEs by making this function return
+ // `Option<Ty<'db>>` instead of `Ty<'db>`; however, this is generally
+ // considered to be a bandaid solution, since it hides more important
+ // underlying issues with how we construct generics and predicates of
+ // items. It's advised to fix the underlying issue rather than trying
+ // to modify this function.
+ let ty = candidates.next().unwrap_or_else(|| {
+ panic!("cannot find `{self:?}` in param-env: {env:#?}");
+ });
+ assert!(
+ candidates.next().is_none(),
+ "did not expect duplicate `ConstParamHasTy` for `{self:?}` in param-env: {env:#?}"
+ );
+ ty
+ }
+}
+
/// A type-level constant value.
///
/// Represents a typed, fully evaluated constant.
diff --git a/crates/hir-ty/src/next_solver/def_id.rs b/crates/hir-ty/src/next_solver/def_id.rs
index a9c572d..1ae59be 100644
--- a/crates/hir-ty/src/next_solver/def_id.rs
+++ b/crates/hir-ty/src/next_solver/def_id.rs
@@ -17,7 +17,7 @@
Enum(EnumVariantId),
}
-#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
+#[derive(PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum SolverDefId {
AdtId(AdtId),
ConstId(ConstId),
@@ -32,6 +32,64 @@
Ctor(Ctor),
}
+impl std::fmt::Debug for SolverDefId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let interner = DbInterner::conjure();
+ let db = interner.db;
+ match *self {
+ SolverDefId::AdtId(AdtId::StructId(id)) => {
+ f.debug_tuple("AdtId").field(&db.struct_signature(id).name.as_str()).finish()
+ }
+ SolverDefId::AdtId(AdtId::EnumId(id)) => {
+ f.debug_tuple("AdtId").field(&db.enum_signature(id).name.as_str()).finish()
+ }
+ SolverDefId::AdtId(AdtId::UnionId(id)) => {
+ f.debug_tuple("AdtId").field(&db.union_signature(id).name.as_str()).finish()
+ }
+ SolverDefId::ConstId(id) => f
+ .debug_tuple("ConstId")
+ .field(&db.const_signature(id).name.as_ref().map_or("_", |name| name.as_str()))
+ .finish(),
+ SolverDefId::FunctionId(id) => {
+ f.debug_tuple("FunctionId").field(&db.function_signature(id).name.as_str()).finish()
+ }
+ SolverDefId::ImplId(id) => f.debug_tuple("ImplId").field(&id).finish(),
+ SolverDefId::StaticId(id) => {
+ f.debug_tuple("StaticId").field(&db.static_signature(id).name.as_str()).finish()
+ }
+ SolverDefId::TraitId(id) => {
+ f.debug_tuple("TraitId").field(&db.trait_signature(id).name.as_str()).finish()
+ }
+ SolverDefId::TypeAliasId(id) => f
+ .debug_tuple("TypeAliasId")
+ .field(&db.type_alias_signature(id).name.as_str())
+ .finish(),
+ SolverDefId::InternedClosureId(id) => {
+ f.debug_tuple("InternedClosureId").field(&id).finish()
+ }
+ SolverDefId::InternedCoroutineId(id) => {
+ f.debug_tuple("InternedCoroutineId").field(&id).finish()
+ }
+ SolverDefId::InternedOpaqueTyId(id) => {
+ f.debug_tuple("InternedOpaqueTyId").field(&id).finish()
+ }
+ SolverDefId::Ctor(Ctor::Struct(id)) => {
+ f.debug_tuple("Ctor").field(&db.struct_signature(id).name.as_str()).finish()
+ }
+ SolverDefId::Ctor(Ctor::Enum(id)) => {
+ let parent_enum = id.loc(db).parent;
+ f.debug_tuple("Ctor")
+ .field(&format_args!(
+ "\"{}::{}\"",
+ db.enum_signature(parent_enum).name.as_str(),
+ parent_enum.enum_variants(db).variant_name_by_id(id).unwrap().as_str()
+ ))
+ .finish()
+ }
+ }
+ }
+}
+
impl_from!(
AdtId(StructId, EnumId, UnionId),
ConstId,
diff --git a/crates/hir-ty/src/next_solver/fulfill.rs b/crates/hir-ty/src/next_solver/fulfill.rs
index 4258f4c..34dff37 100644
--- a/crates/hir-ty/src/next_solver/fulfill.rs
+++ b/crates/hir-ty/src/next_solver/fulfill.rs
@@ -1,21 +1,28 @@
//! Fulfill loop for next-solver.
-use std::marker::PhantomData;
-use std::mem;
-use std::ops::ControlFlow;
-use std::vec::ExtractIf;
+mod errors;
-use rustc_next_trait_solver::delegate::SolverDelegate;
-use rustc_next_trait_solver::solve::{
- GoalEvaluation, GoalStalledOn, HasChanged, SolverDelegateEvalExt,
+use std::{marker::PhantomData, mem, ops::ControlFlow, vec::ExtractIf};
+
+use rustc_hash::FxHashSet;
+use rustc_next_trait_solver::{
+ delegate::SolverDelegate,
+ solve::{GoalEvaluation, GoalStalledOn, HasChanged, SolverDelegateEvalExt},
};
-use rustc_type_ir::Interner;
-use rustc_type_ir::inherent::Span as _;
-use rustc_type_ir::solve::{Certainty, NoSolution};
+use rustc_type_ir::{
+ Interner, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor,
+ inherent::{IntoKind, Span as _},
+ solve::{Certainty, NoSolution},
+};
-use crate::next_solver::infer::InferCtxt;
-use crate::next_solver::infer::traits::{PredicateObligation, PredicateObligations};
-use crate::next_solver::{DbInterner, SolverContext, Span, TypingMode};
+use crate::next_solver::{
+ DbInterner, SolverContext, SolverDefId, Span, Ty, TyKind, TypingMode,
+ infer::{
+ InferCtxt,
+ traits::{PredicateObligation, PredicateObligations},
+ },
+ inspect::ProofTreeVisitor,
+};
type PendingObligations<'db> =
Vec<(PredicateObligation<'db>, Option<GoalStalledOn<DbInterner<'db>>>)>;
@@ -31,6 +38,7 @@
///
/// It is also likely that we want to use slightly different datastructures
/// here as this will have to deal with far more root goals than `evaluate_all`.
+#[derive(Debug, Clone)]
pub struct FulfillmentCtxt<'db> {
obligations: ObligationStorage<'db>,
@@ -41,7 +49,7 @@
usable_in_snapshot: usize,
}
-#[derive(Default, Debug)]
+#[derive(Default, Debug, Clone)]
struct ObligationStorage<'db> {
/// Obligations which resulted in an overflow in fulfillment itself.
///
@@ -123,10 +131,21 @@
infcx: &InferCtxt<'db>,
obligation: PredicateObligation<'db>,
) {
- assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots());
+ // FIXME: See the comment in `select_where_possible()`.
+ // assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots());
self.obligations.register(obligation, None);
}
+ pub(crate) fn register_predicate_obligations(
+ &mut self,
+ infcx: &InferCtxt<'db>,
+ obligations: impl IntoIterator<Item = PredicateObligation<'db>>,
+ ) {
+ // FIXME: See the comment in `select_where_possible()`.
+ // assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots());
+ obligations.into_iter().for_each(|obligation| self.obligations.register(obligation, None));
+ }
+
pub(crate) fn collect_remaining_errors(
&mut self,
infcx: &InferCtxt<'db>,
@@ -143,7 +162,11 @@
&mut self,
infcx: &InferCtxt<'db>,
) -> Vec<NextSolverError<'db>> {
- assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots());
+ // FIXME(next-solver): We should bring this assertion back. Currently it panics because
+ // there are places which use `InferenceTable` and open a snapshot and register obligations
+ // and select. They should use a different `ObligationCtxt` instead. Then we'll be also able
+ // to not put the obligations queue in `InferenceTable`'s snapshots.
+ // assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots());
let mut errors = Vec::new();
loop {
let mut any_changed = false;
@@ -159,7 +182,7 @@
if let Some(certainty) = delegate.compute_goal_fast_path(goal, Span::dummy()) {
match certainty {
Certainty::Yes => {}
- Certainty::Maybe(_) => {
+ Certainty::Maybe { .. } => {
self.obligations.register(obligation, None);
}
}
@@ -188,7 +211,7 @@
match certainty {
Certainty::Yes => {}
- Certainty::Maybe(_) => self.obligations.register(obligation, stalled_on),
+ Certainty::Maybe { .. } => self.obligations.register(obligation, stalled_on),
}
}
@@ -216,9 +239,94 @@
self.obligations.has_pending_obligations()
}
- fn pending_obligations(&self) -> PredicateObligations<'db> {
+ pub(crate) fn pending_obligations(&self) -> PredicateObligations<'db> {
self.obligations.clone_pending()
}
+
+ pub(crate) fn drain_stalled_obligations_for_coroutines(
+ &mut self,
+ infcx: &InferCtxt<'db>,
+ ) -> PredicateObligations<'db> {
+ let stalled_coroutines = match infcx.typing_mode() {
+ TypingMode::Analysis { defining_opaque_types_and_generators } => {
+ defining_opaque_types_and_generators
+ }
+ TypingMode::Coherence
+ | TypingMode::Borrowck { defining_opaque_types: _ }
+ | TypingMode::PostBorrowckAnalysis { defined_opaque_types: _ }
+ | TypingMode::PostAnalysis => return Default::default(),
+ };
+ let stalled_coroutines = stalled_coroutines.inner();
+
+ if stalled_coroutines.is_empty() {
+ return Default::default();
+ }
+
+ self.obligations
+ .drain_pending(|obl| {
+ infcx.probe(|_| {
+ infcx
+ .visit_proof_tree(
+ obl.as_goal(),
+ &mut StalledOnCoroutines {
+ stalled_coroutines,
+ cache: Default::default(),
+ },
+ )
+ .is_break()
+ })
+ })
+ .into_iter()
+ .map(|(o, _)| o)
+ .collect()
+ }
+}
+
+/// Detect if a goal is stalled on a coroutine that is owned by the current typeck root.
+///
+/// This function can (erroneously) fail to detect a predicate, i.e. it doesn't need to
+/// be complete. However, this will lead to ambiguity errors, so we want to make it
+/// accurate.
+///
+/// This function can be also return false positives, which will lead to poor diagnostics
+/// so we want to keep this visitor *precise* too.
+pub struct StalledOnCoroutines<'a, 'db> {
+ pub stalled_coroutines: &'a [SolverDefId],
+ pub cache: FxHashSet<Ty<'db>>,
+}
+
+impl<'db> ProofTreeVisitor<'db> for StalledOnCoroutines<'_, 'db> {
+ type Result = ControlFlow<()>;
+
+ fn visit_goal(&mut self, inspect_goal: &super::inspect::InspectGoal<'_, 'db>) -> Self::Result {
+ inspect_goal.goal().predicate.visit_with(self)?;
+
+ if let Some(candidate) = inspect_goal.unique_applicable_candidate() {
+ candidate.visit_nested_no_probe(self)
+ } else {
+ ControlFlow::Continue(())
+ }
+ }
+}
+
+impl<'db> TypeVisitor<DbInterner<'db>> for StalledOnCoroutines<'_, 'db> {
+ type Result = ControlFlow<()>;
+
+ fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result {
+ if !self.cache.insert(ty) {
+ return ControlFlow::Continue(());
+ }
+
+ if let TyKind::Coroutine(def_id, _) = ty.kind()
+ && self.stalled_coroutines.contains(&def_id.into())
+ {
+ ControlFlow::Break(())
+ } else if ty.has_coroutines() {
+ ty.super_visit_with(self)
+ } else {
+ ControlFlow::Continue(())
+ }
+ }
}
#[derive(Debug)]
@@ -227,3 +335,10 @@
Ambiguity(PredicateObligation<'db>),
Overflow(PredicateObligation<'db>),
}
+
+impl NextSolverError<'_> {
+ #[inline]
+ pub fn is_true_error(&self) -> bool {
+ matches!(self, NextSolverError::TrueError(_))
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/fulfill/errors.rs b/crates/hir-ty/src/next_solver/fulfill/errors.rs
new file mode 100644
index 0000000..ab4a229
--- /dev/null
+++ b/crates/hir-ty/src/next_solver/fulfill/errors.rs
@@ -0,0 +1,1336 @@
+//! Trait solving error diagnosis and reporting.
+//!
+//! This code isn't used by rust-analyzer (it should, but then it'll probably be better to re-port it from rustc).
+//! It's only there because without it, debugging trait solver errors is a nightmare.
+
+use std::{fmt::Debug, ops::ControlFlow};
+
+use rustc_next_trait_solver::solve::{GoalEvaluation, SolverDelegateEvalExt};
+use rustc_type_ir::{
+ AliasRelationDirection, AliasTermKind, HostEffectPredicate, Interner, PredicatePolarity,
+ error::ExpectedFound,
+ inherent::{IntoKind, PlaceholderConst, SliceLike, Span as _},
+ lang_items::SolverTraitLangItem,
+ solve::{CandidateSource, Certainty, GoalSource, MaybeCause, NoSolution},
+};
+use tracing::{instrument, trace};
+
+use crate::next_solver::{
+ AliasTerm, Binder, ClauseKind, Const, ConstKind, DbInterner, PolyTraitPredicate, PredicateKind,
+ SolverContext, SolverDefId, Span, Term, TraitPredicate, Ty, TyKind, TypeError,
+ fulfill::NextSolverError,
+ infer::{
+ InferCtxt,
+ select::SelectionError,
+ traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations},
+ },
+ inspect::{self, ProofTreeVisitor},
+ normalize::deeply_normalize_for_diagnostics,
+};
+
+#[derive(Debug)]
+pub struct FulfillmentError<'db> {
+ pub obligation: PredicateObligation<'db>,
+ pub code: FulfillmentErrorCode<'db>,
+ /// Diagnostics only: the 'root' obligation which resulted in
+ /// the failure to process `obligation`. This is the obligation
+ /// that was initially passed to `register_predicate_obligation`
+ pub root_obligation: PredicateObligation<'db>,
+}
+
+impl<'db> FulfillmentError<'db> {
+ pub fn new(
+ obligation: PredicateObligation<'db>,
+ code: FulfillmentErrorCode<'db>,
+ root_obligation: PredicateObligation<'db>,
+ ) -> FulfillmentError<'db> {
+ FulfillmentError { obligation, code, root_obligation }
+ }
+
+ pub fn is_true_error(&self) -> bool {
+ match self.code {
+ FulfillmentErrorCode::Select(_)
+ | FulfillmentErrorCode::Project(_)
+ | FulfillmentErrorCode::Subtype(_, _)
+ | FulfillmentErrorCode::ConstEquate(_, _) => true,
+ FulfillmentErrorCode::Cycle(_) | FulfillmentErrorCode::Ambiguity { overflow: _ } => {
+ false
+ }
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub enum FulfillmentErrorCode<'db> {
+ /// Inherently impossible to fulfill; this trait is implemented if and only
+ /// if it is already implemented.
+ Cycle(PredicateObligations<'db>),
+ Select(SelectionError<'db>),
+ Project(MismatchedProjectionTypes<'db>),
+ Subtype(ExpectedFound<Ty<'db>>, TypeError<'db>), // always comes from a SubtypePredicate
+ ConstEquate(ExpectedFound<Const<'db>>, TypeError<'db>),
+ Ambiguity {
+ /// Overflow is only `Some(suggest_recursion_limit)` when using the next generation
+ /// trait solver `-Znext-solver`. With the old solver overflow is eagerly handled by
+ /// emitting a fatal error instead.
+ overflow: Option<bool>,
+ },
+}
+
+#[derive(Debug, Clone)]
+pub struct MismatchedProjectionTypes<'db> {
+ pub err: TypeError<'db>,
+}
+
+pub(super) fn fulfillment_error_for_no_solution<'db>(
+ infcx: &InferCtxt<'db>,
+ root_obligation: PredicateObligation<'db>,
+) -> FulfillmentError<'db> {
+ let obligation = find_best_leaf_obligation(infcx, &root_obligation, false);
+
+ let code = match obligation.predicate.kind().skip_binder() {
+ PredicateKind::Clause(ClauseKind::Projection(_)) => {
+ FulfillmentErrorCode::Project(
+ // FIXME: This could be a `Sorts` if the term is a type
+ MismatchedProjectionTypes { err: TypeError::Mismatch },
+ )
+ }
+ PredicateKind::Clause(ClauseKind::ConstArgHasType(ct, expected_ty)) => {
+ let ct_ty = match ct.kind() {
+ ConstKind::Unevaluated(uv) => {
+ infcx.interner.type_of(uv.def).instantiate(infcx.interner, uv.args)
+ }
+ ConstKind::Param(param_ct) => param_ct.find_const_ty_from_env(obligation.param_env),
+ ConstKind::Value(cv) => cv.ty,
+ kind => panic!(
+ "ConstArgHasWrongType failed but we don't know how to compute type for {kind:?}"
+ ),
+ };
+ FulfillmentErrorCode::Select(SelectionError::ConstArgHasWrongType {
+ ct,
+ ct_ty,
+ expected_ty,
+ })
+ }
+ PredicateKind::NormalizesTo(..) => {
+ FulfillmentErrorCode::Project(MismatchedProjectionTypes { err: TypeError::Mismatch })
+ }
+ PredicateKind::AliasRelate(_, _, _) => {
+ FulfillmentErrorCode::Project(MismatchedProjectionTypes { err: TypeError::Mismatch })
+ }
+ PredicateKind::Subtype(pred) => {
+ let (a, b) = infcx.enter_forall_and_leak_universe(
+ obligation.predicate.kind().rebind((pred.a, pred.b)),
+ );
+ let expected_found = ExpectedFound::new(a, b);
+ FulfillmentErrorCode::Subtype(expected_found, TypeError::Sorts(expected_found))
+ }
+ PredicateKind::Coerce(pred) => {
+ let (a, b) = infcx.enter_forall_and_leak_universe(
+ obligation.predicate.kind().rebind((pred.a, pred.b)),
+ );
+ let expected_found = ExpectedFound::new(b, a);
+ FulfillmentErrorCode::Subtype(expected_found, TypeError::Sorts(expected_found))
+ }
+ PredicateKind::Clause(_) | PredicateKind::DynCompatible(_) | PredicateKind::Ambiguous => {
+ FulfillmentErrorCode::Select(SelectionError::Unimplemented)
+ }
+ PredicateKind::ConstEquate(..) => {
+ panic!("unexpected goal: {obligation:?}")
+ }
+ };
+
+ FulfillmentError { obligation, code, root_obligation }
+}
+
+pub(super) fn fulfillment_error_for_stalled<'db>(
+ infcx: &InferCtxt<'db>,
+ root_obligation: PredicateObligation<'db>,
+) -> FulfillmentError<'db> {
+ let (code, refine_obligation) = infcx.probe(|_| {
+ match <&SolverContext<'db>>::from(infcx).evaluate_root_goal(
+ root_obligation.as_goal(),
+ Span::dummy(),
+ None,
+ ) {
+ Ok(GoalEvaluation {
+ certainty: Certainty::Maybe { cause: MaybeCause::Ambiguity, .. },
+ ..
+ }) => (FulfillmentErrorCode::Ambiguity { overflow: None }, true),
+ Ok(GoalEvaluation {
+ certainty:
+ Certainty::Maybe {
+ cause:
+ MaybeCause::Overflow { suggest_increasing_limit, keep_constraints: _ },
+ ..
+ },
+ ..
+ }) => (
+ FulfillmentErrorCode::Ambiguity { overflow: Some(suggest_increasing_limit) },
+ // Don't look into overflows because we treat overflows weirdly anyways.
+ // We discard the inference constraints from overflowing goals, so
+ // recomputing the goal again during `find_best_leaf_obligation` may apply
+ // inference guidance that makes other goals go from ambig -> pass, for example.
+ //
+ // FIXME: We should probably just look into overflows here.
+ false,
+ ),
+ Ok(GoalEvaluation { certainty: Certainty::Yes, .. }) => {
+ panic!(
+ "did not expect successful goal when collecting ambiguity errors for `{:?}`",
+ infcx.resolve_vars_if_possible(root_obligation.predicate),
+ )
+ }
+ Err(_) => {
+ panic!(
+ "did not expect selection error when collecting ambiguity errors for `{:?}`",
+ infcx.resolve_vars_if_possible(root_obligation.predicate),
+ )
+ }
+ }
+ });
+
+ FulfillmentError {
+ obligation: if refine_obligation {
+ find_best_leaf_obligation(infcx, &root_obligation, true)
+ } else {
+ root_obligation.clone()
+ },
+ code,
+ root_obligation,
+ }
+}
+
+pub(super) fn fulfillment_error_for_overflow<'db>(
+ infcx: &InferCtxt<'db>,
+ root_obligation: PredicateObligation<'db>,
+) -> FulfillmentError<'db> {
+ FulfillmentError {
+ obligation: find_best_leaf_obligation(infcx, &root_obligation, true),
+ code: FulfillmentErrorCode::Ambiguity { overflow: Some(true) },
+ root_obligation,
+ }
+}
+
+#[instrument(level = "debug", skip(infcx), ret)]
+fn find_best_leaf_obligation<'db>(
+ infcx: &InferCtxt<'db>,
+ obligation: &PredicateObligation<'db>,
+ consider_ambiguities: bool,
+) -> PredicateObligation<'db> {
+ let obligation = infcx.resolve_vars_if_possible(obligation.clone());
+ // FIXME: we use a probe here as the `BestObligation` visitor does not
+ // check whether it uses candidates which get shadowed by where-bounds.
+ //
+ // We should probably fix the visitor to not do so instead, as this also
+ // means the leaf obligation may be incorrect.
+ let obligation = infcx
+ .fudge_inference_if_ok(|| {
+ infcx
+ .visit_proof_tree(
+ obligation.as_goal(),
+ &mut BestObligation { obligation: obligation.clone(), consider_ambiguities },
+ )
+ .break_value()
+ .ok_or(())
+ })
+ .unwrap_or(obligation);
+ deeply_normalize_for_diagnostics(infcx, obligation.param_env, obligation)
+}
+
+struct BestObligation<'db> {
+ obligation: PredicateObligation<'db>,
+ consider_ambiguities: bool,
+}
+
+impl<'db> BestObligation<'db> {
+ fn with_derived_obligation(
+ &mut self,
+ derived_obligation: PredicateObligation<'db>,
+ and_then: impl FnOnce(&mut Self) -> <Self as ProofTreeVisitor<'db>>::Result,
+ ) -> <Self as ProofTreeVisitor<'db>>::Result {
+ let old_obligation = std::mem::replace(&mut self.obligation, derived_obligation);
+ let res = and_then(self);
+ self.obligation = old_obligation;
+ res
+ }
+
+ /// Filter out the candidates that aren't interesting to visit for the
+ /// purposes of reporting errors. For ambiguities, we only consider
+ /// candidates that may hold. For errors, we only consider candidates that
+ /// *don't* hold and which have impl-where clauses that also don't hold.
+ fn non_trivial_candidates<'a>(
+ &self,
+ goal: &'a inspect::InspectGoal<'a, 'db>,
+ ) -> Vec<inspect::InspectCandidate<'a, 'db>> {
+ let mut candidates = goal.candidates();
+ match self.consider_ambiguities {
+ true => {
+ // If we have an ambiguous obligation, we must consider *all* candidates
+ // that hold, or else we may guide inference causing other goals to go
+ // from ambig -> pass/fail.
+ candidates.retain(|candidate| candidate.result().is_ok());
+ }
+ false => {
+ // We always handle rigid alias candidates separately as we may not add them for
+ // aliases whose trait bound doesn't hold.
+ candidates.retain(|c| !matches!(c.kind(), inspect::ProbeKind::RigidAlias { .. }));
+ // If we have >1 candidate, one may still be due to "boring" reasons, like
+ // an alias-relate that failed to hold when deeply evaluated. We really
+ // don't care about reasons like this.
+ if candidates.len() > 1 {
+ candidates.retain(|candidate| {
+ goal.infcx().probe(|_| {
+ candidate.instantiate_nested_goals().iter().any(|nested_goal| {
+ matches!(
+ nested_goal.source(),
+ GoalSource::ImplWhereBound
+ | GoalSource::AliasBoundConstCondition
+ | GoalSource::InstantiateHigherRanked
+ | GoalSource::AliasWellFormed
+ ) && nested_goal.result().is_err()
+ })
+ })
+ });
+ }
+ }
+ }
+
+ candidates
+ }
+
+ /// HACK: We walk the nested obligations for a well-formed arg manually,
+ /// since there's nontrivial logic in `wf.rs` to set up an obligation cause.
+ /// Ideally we'd be able to track this better.
+ fn visit_well_formed_goal(
+ &mut self,
+ candidate: &inspect::InspectCandidate<'_, 'db>,
+ term: Term<'db>,
+ ) -> ControlFlow<PredicateObligation<'db>> {
+ let infcx = candidate.goal().infcx();
+ let param_env = candidate.goal().goal().param_env;
+
+ for obligation in wf::unnormalized_obligations(infcx, param_env, term).into_iter().flatten()
+ {
+ let nested_goal = candidate
+ .instantiate_proof_tree_for_nested_goal(GoalSource::Misc, obligation.as_goal());
+ // Skip nested goals that aren't the *reason* for our goal's failure.
+ match (self.consider_ambiguities, nested_goal.result()) {
+ (true, Ok(Certainty::Maybe { cause: MaybeCause::Ambiguity, .. }))
+ | (false, Err(_)) => {}
+ _ => continue,
+ }
+
+ self.with_derived_obligation(obligation, |this| nested_goal.visit_with(this))?;
+ }
+
+ ControlFlow::Break(self.obligation.clone())
+ }
+
+ /// If a normalization of an associated item or a trait goal fails without trying any
+ /// candidates it's likely that normalizing its self type failed. We manually detect
+ /// such cases here.
+ fn detect_error_in_self_ty_normalization(
+ &mut self,
+ goal: &inspect::InspectGoal<'_, 'db>,
+ self_ty: Ty<'db>,
+ ) -> ControlFlow<PredicateObligation<'db>> {
+ assert!(!self.consider_ambiguities);
+ let interner = goal.infcx().interner;
+ if let TyKind::Alias(..) = self_ty.kind() {
+ let infer_term = goal.infcx().next_ty_var();
+ let pred = PredicateKind::AliasRelate(
+ self_ty.into(),
+ infer_term.into(),
+ AliasRelationDirection::Equate,
+ );
+ let obligation = Obligation::new(
+ interner,
+ self.obligation.cause.clone(),
+ goal.goal().param_env,
+ pred,
+ );
+ self.with_derived_obligation(obligation, |this| {
+ goal.infcx().visit_proof_tree_at_depth(
+ goal.goal().with(interner, pred),
+ goal.depth() + 1,
+ this,
+ )
+ })
+ } else {
+ ControlFlow::Continue(())
+ }
+ }
+
+ /// When a higher-ranked projection goal fails, check that the corresponding
+ /// higher-ranked trait goal holds or not. This is because the process of
+ /// instantiating and then re-canonicalizing the binder of the projection goal
+ /// forces us to be unable to see that the leak check failed in the nested
+ /// `NormalizesTo` goal, so we don't fall back to the rigid projection check
+ /// that should catch when a projection goal fails due to an unsatisfied trait
+ /// goal.
+ fn detect_trait_error_in_higher_ranked_projection(
+ &mut self,
+ goal: &inspect::InspectGoal<'_, 'db>,
+ ) -> ControlFlow<PredicateObligation<'db>> {
+ let interner = goal.infcx().interner;
+ if let Some(projection_clause) = goal.goal().predicate.as_projection_clause()
+ && !projection_clause.bound_vars().is_empty()
+ {
+ let pred = projection_clause.map_bound(|proj| proj.projection_term.trait_ref(interner));
+ let obligation = Obligation::new(
+ interner,
+ self.obligation.cause.clone(),
+ goal.goal().param_env,
+ deeply_normalize_for_diagnostics(goal.infcx(), goal.goal().param_env, pred),
+ );
+ self.with_derived_obligation(obligation, |this| {
+ goal.infcx().visit_proof_tree_at_depth(
+ goal.goal().with(interner, pred),
+ goal.depth() + 1,
+ this,
+ )
+ })
+ } else {
+ ControlFlow::Continue(())
+ }
+ }
+
+ /// It is likely that `NormalizesTo` failed without any applicable candidates
+ /// because the alias is not well-formed.
+ ///
+ /// As we only enter `RigidAlias` candidates if the trait bound of the associated type
+ /// holds, we discard these candidates in `non_trivial_candidates` and always manually
+ /// check this here.
+ fn detect_non_well_formed_assoc_item(
+ &mut self,
+ goal: &inspect::InspectGoal<'_, 'db>,
+ alias: AliasTerm<'db>,
+ ) -> ControlFlow<PredicateObligation<'db>> {
+ let interner = goal.infcx().interner;
+ let obligation = Obligation::new(
+ interner,
+ self.obligation.cause.clone(),
+ goal.goal().param_env,
+ alias.trait_ref(interner),
+ );
+ self.with_derived_obligation(obligation, |this| {
+ goal.infcx().visit_proof_tree_at_depth(
+ goal.goal().with(interner, alias.trait_ref(interner)),
+ goal.depth() + 1,
+ this,
+ )
+ })
+ }
+
+ /// If we have no candidates, then it's likely that there is a
+ /// non-well-formed alias in the goal.
+ fn detect_error_from_empty_candidates(
+ &mut self,
+ goal: &inspect::InspectGoal<'_, 'db>,
+ ) -> ControlFlow<PredicateObligation<'db>> {
+ let interner = goal.infcx().interner;
+ let pred_kind = goal.goal().predicate.kind();
+
+ match pred_kind.no_bound_vars() {
+ Some(PredicateKind::Clause(ClauseKind::Trait(pred))) => {
+ self.detect_error_in_self_ty_normalization(goal, pred.self_ty())?;
+ }
+ Some(PredicateKind::NormalizesTo(pred)) => {
+ if let AliasTermKind::ProjectionTy | AliasTermKind::ProjectionConst =
+ pred.alias.kind(interner)
+ {
+ self.detect_error_in_self_ty_normalization(goal, pred.alias.self_ty())?;
+ self.detect_non_well_formed_assoc_item(goal, pred.alias)?;
+ }
+ }
+ Some(_) | None => {}
+ }
+
+ ControlFlow::Break(self.obligation.clone())
+ }
+}
+
+impl<'db> ProofTreeVisitor<'db> for BestObligation<'db> {
+ type Result = ControlFlow<PredicateObligation<'db>>;
+
+ #[instrument(level = "trace", skip(self, goal), fields(goal = ?goal.goal()))]
+ fn visit_goal(&mut self, goal: &inspect::InspectGoal<'_, 'db>) -> Self::Result {
+ let interner = goal.infcx().interner;
+ // Skip goals that aren't the *reason* for our goal's failure.
+ match (self.consider_ambiguities, goal.result()) {
+ (true, Ok(Certainty::Maybe { cause: MaybeCause::Ambiguity, .. })) | (false, Err(_)) => {
+ }
+ _ => return ControlFlow::Continue(()),
+ }
+
+ let pred = goal.goal().predicate;
+
+ let candidates = self.non_trivial_candidates(goal);
+ let candidate = match candidates.as_slice() {
+ [candidate] => candidate,
+ [] => return self.detect_error_from_empty_candidates(goal),
+ _ => return ControlFlow::Break(self.obligation.clone()),
+ };
+
+ // // Don't walk into impls that have `do_not_recommend`.
+ // if let inspect::ProbeKind::TraitCandidate {
+ // source: CandidateSource::Impl(impl_def_id),
+ // result: _,
+ // } = candidate.kind()
+ // && interner.do_not_recommend_impl(impl_def_id)
+ // {
+ // trace!("#[do_not_recommend] -> exit");
+ // return ControlFlow::Break(self.obligation.clone());
+ // }
+
+ // FIXME: Also, what about considering >1 layer up the stack? May be necessary
+ // for normalizes-to.
+ let child_mode = match pred.kind().skip_binder() {
+ PredicateKind::Clause(ClauseKind::Trait(trait_pred)) => {
+ ChildMode::Trait(pred.kind().rebind(trait_pred))
+ }
+ PredicateKind::Clause(ClauseKind::HostEffect(host_pred)) => {
+ ChildMode::Host(pred.kind().rebind(host_pred))
+ }
+ PredicateKind::NormalizesTo(normalizes_to)
+ if matches!(
+ normalizes_to.alias.kind(interner),
+ AliasTermKind::ProjectionTy | AliasTermKind::ProjectionConst
+ ) =>
+ {
+ ChildMode::Trait(pred.kind().rebind(TraitPredicate {
+ trait_ref: normalizes_to.alias.trait_ref(interner),
+ polarity: PredicatePolarity::Positive,
+ }))
+ }
+ PredicateKind::Clause(ClauseKind::WellFormed(term)) => {
+ return self.visit_well_formed_goal(candidate, term);
+ }
+ _ => ChildMode::PassThrough,
+ };
+
+ let nested_goals = candidate.instantiate_nested_goals();
+
+ // If the candidate requires some `T: FnPtr` bound which does not hold should not be treated as
+ // an actual candidate, instead we should treat them as if the impl was never considered to
+ // have potentially applied. As if `impl<A, R> Trait for for<..> fn(..A) -> R` was written
+ // instead of `impl<T: FnPtr> Trait for T`.
+ //
+ // We do this as a separate loop so that we do not choose to tell the user about some nested
+ // goal before we encounter a `T: FnPtr` nested goal.
+ for nested_goal in &nested_goals {
+ if let Some(poly_trait_pred) = nested_goal.goal().predicate.as_trait_clause()
+ && interner
+ .is_trait_lang_item(poly_trait_pred.def_id(), SolverTraitLangItem::FnPtrTrait)
+ && let Err(NoSolution) = nested_goal.result()
+ {
+ return ControlFlow::Break(self.obligation.clone());
+ }
+ }
+
+ let mut impl_where_bound_count = 0;
+ for nested_goal in nested_goals {
+ trace!(nested_goal = ?(nested_goal.goal(), nested_goal.source(), nested_goal.result()));
+
+ let nested_pred = nested_goal.goal().predicate;
+
+ let make_obligation = || Obligation {
+ cause: ObligationCause::dummy(),
+ param_env: nested_goal.goal().param_env,
+ predicate: nested_pred,
+ recursion_depth: self.obligation.recursion_depth + 1,
+ };
+
+ let obligation;
+ match (child_mode, nested_goal.source()) {
+ (
+ ChildMode::Trait(_) | ChildMode::Host(_),
+ GoalSource::Misc | GoalSource::TypeRelating | GoalSource::NormalizeGoal(_),
+ ) => {
+ continue;
+ }
+ (ChildMode::Trait(parent_trait_pred), GoalSource::ImplWhereBound) => {
+ obligation = make_obligation();
+ impl_where_bound_count += 1;
+ }
+ (
+ ChildMode::Host(parent_host_pred),
+ GoalSource::ImplWhereBound | GoalSource::AliasBoundConstCondition,
+ ) => {
+ obligation = make_obligation();
+ impl_where_bound_count += 1;
+ }
+ // Skip over a higher-ranked predicate.
+ (_, GoalSource::InstantiateHigherRanked) => {
+ obligation = self.obligation.clone();
+ }
+ (ChildMode::PassThrough, _)
+ | (_, GoalSource::AliasWellFormed | GoalSource::AliasBoundConstCondition) => {
+ obligation = make_obligation();
+ }
+ }
+
+ self.with_derived_obligation(obligation, |this| nested_goal.visit_with(this))?;
+ }
+
+ // alias-relate may fail because the lhs or rhs can't be normalized,
+ // and therefore is treated as rigid.
+ if let Some(PredicateKind::AliasRelate(lhs, rhs, _)) = pred.kind().no_bound_vars() {
+ goal.infcx().visit_proof_tree_at_depth(
+ goal.goal().with(interner, ClauseKind::WellFormed(lhs)),
+ goal.depth() + 1,
+ self,
+ )?;
+ goal.infcx().visit_proof_tree_at_depth(
+ goal.goal().with(interner, ClauseKind::WellFormed(rhs)),
+ goal.depth() + 1,
+ self,
+ )?;
+ }
+
+ self.detect_trait_error_in_higher_ranked_projection(goal)?;
+
+ ControlFlow::Break(self.obligation.clone())
+ }
+}
+
+#[derive(Debug, Copy, Clone)]
+enum ChildMode<'db> {
+ // Try to derive an `ObligationCause::{ImplDerived,BuiltinDerived}`,
+ // and skip all `GoalSource::Misc`, which represent useless obligations
+ // such as alias-eq which may not hold.
+ Trait(PolyTraitPredicate<'db>),
+ // Try to derive an `ObligationCause::{ImplDerived,BuiltinDerived}`,
+ // and skip all `GoalSource::Misc`, which represent useless obligations
+ // such as alias-eq which may not hold.
+ Host(Binder<'db, HostEffectPredicate<DbInterner<'db>>>),
+ // Skip trying to derive an `ObligationCause` from this obligation, and
+ // report *all* sub-obligations as if they came directly from the parent
+ // obligation.
+ PassThrough,
+}
+
+impl<'db> NextSolverError<'db> {
+ pub fn to_debuggable_error(&self, infcx: &InferCtxt<'db>) -> FulfillmentError<'db> {
+ match self {
+ NextSolverError::TrueError(obligation) => {
+ fulfillment_error_for_no_solution(infcx, obligation.clone())
+ }
+ NextSolverError::Ambiguity(obligation) => {
+ fulfillment_error_for_stalled(infcx, obligation.clone())
+ }
+ NextSolverError::Overflow(obligation) => {
+ fulfillment_error_for_overflow(infcx, obligation.clone())
+ }
+ }
+ }
+}
+
+mod wf {
+ use std::iter;
+
+ use hir_def::ItemContainerId;
+ use rustc_type_ir::inherent::{
+ AdtDef, BoundExistentialPredicates, GenericArg, GenericArgs as _, IntoKind, SliceLike,
+ Term as _, Ty as _,
+ };
+ use rustc_type_ir::lang_items::SolverTraitLangItem;
+ use rustc_type_ir::{
+ Interner, PredicatePolarity, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
+ TypeVisitor,
+ };
+ use tracing::{debug, instrument, trace};
+
+ use crate::next_solver::infer::InferCtxt;
+ use crate::next_solver::infer::traits::{
+ Obligation, ObligationCause, PredicateObligation, PredicateObligations,
+ };
+ use crate::next_solver::{
+ AliasTerm, Binder, ClauseKind, Const, ConstKind, Ctor, DbInterner, ExistentialPredicate,
+ GenericArgs, ParamEnv, Predicate, PredicateKind, Region, SolverDefId, Term, TraitPredicate,
+ TraitRef, Ty, TyKind,
+ };
+
+ /// Compute the predicates that are required for a type to be well-formed.
+ ///
+ /// This is only intended to be used in the new solver, since it does not
+ /// take into account recursion depth or proper error-reporting spans.
+ pub fn unnormalized_obligations<'db>(
+ infcx: &InferCtxt<'db>,
+ param_env: ParamEnv<'db>,
+ term: Term<'db>,
+ ) -> Option<PredicateObligations<'db>> {
+ debug_assert_eq!(term, infcx.resolve_vars_if_possible(term));
+
+ // However, if `arg` IS an unresolved inference variable, returns `None`,
+ // because we are not able to make any progress at all. This is to prevent
+ // cycles where we say "?0 is WF if ?0 is WF".
+ if term.is_infer() {
+ return None;
+ }
+
+ let mut wf =
+ WfPredicates { infcx, param_env, out: PredicateObligations::new(), recursion_depth: 0 };
+ wf.add_wf_preds_for_term(term);
+ Some(wf.out)
+ }
+
+ struct WfPredicates<'a, 'db> {
+ infcx: &'a InferCtxt<'db>,
+ param_env: ParamEnv<'db>,
+ out: PredicateObligations<'db>,
+ recursion_depth: usize,
+ }
+
+ /// Controls whether we "elaborate" supertraits and so forth on the WF
+ /// predicates. This is a kind of hack to address #43784. The
+ /// underlying problem in that issue was a trait structure like:
+ ///
+ /// ```ignore (illustrative)
+ /// trait Foo: Copy { }
+ /// trait Bar: Foo { }
+ /// impl<T: Bar> Foo for T { }
+ /// impl<T> Bar for T { }
+ /// ```
+ ///
+ /// Here, in the `Foo` impl, we will check that `T: Copy` holds -- but
+ /// we decide that this is true because `T: Bar` is in the
+ /// where-clauses (and we can elaborate that to include `T:
+ /// Copy`). This wouldn't be a problem, except that when we check the
+ /// `Bar` impl, we decide that `T: Foo` must hold because of the `Foo`
+ /// impl. And so nowhere did we check that `T: Copy` holds!
+ ///
+ /// To resolve this, we elaborate the WF requirements that must be
+ /// proven when checking impls. This means that (e.g.) the `impl Bar
+ /// for T` will be forced to prove not only that `T: Foo` but also `T:
+ /// Copy` (which it won't be able to do, because there is no `Copy`
+ /// impl for `T`).
+ #[derive(Debug, PartialEq, Eq, Copy, Clone)]
+ enum Elaborate {
+ All,
+ None,
+ }
+
+ impl<'a, 'db> WfPredicates<'a, 'db> {
+ fn interner(&self) -> DbInterner<'db> {
+ self.infcx.interner
+ }
+
+ /// Pushes the obligations required for `trait_ref` to be WF into `self.out`.
+ fn add_wf_preds_for_trait_pred(
+ &mut self,
+ trait_pred: TraitPredicate<'db>,
+ elaborate: Elaborate,
+ ) {
+ let tcx = self.interner();
+ let trait_ref = trait_pred.trait_ref;
+
+ // Negative trait predicates don't require supertraits to hold, just
+ // that their args are WF.
+ if trait_pred.polarity == PredicatePolarity::Negative {
+ self.add_wf_preds_for_negative_trait_pred(trait_ref);
+ return;
+ }
+
+ // if the trait predicate is not const, the wf obligations should not be const as well.
+ let obligations = self.nominal_obligations(trait_ref.def_id.0.into(), trait_ref.args);
+
+ debug!("compute_trait_pred obligations {:?}", obligations);
+ let param_env = self.param_env;
+ let depth = self.recursion_depth;
+
+ let extend = |PredicateObligation { predicate, mut cause, .. }| {
+ Obligation::with_depth(tcx, cause, depth, param_env, predicate)
+ };
+
+ if let Elaborate::All = elaborate {
+ let implied_obligations = rustc_type_ir::elaborate::elaborate(tcx, obligations);
+ let implied_obligations = implied_obligations.map(extend);
+ self.out.extend(implied_obligations);
+ } else {
+ self.out.extend(obligations);
+ }
+
+ self.out.extend(
+ trait_ref
+ .args
+ .iter()
+ .enumerate()
+ .filter_map(|(i, arg)| arg.as_term().map(|t| (i, t)))
+ .filter(|(_, term)| !term.has_escaping_bound_vars())
+ .map(|(i, term)| {
+ let mut cause = ObligationCause::misc();
+ // The first arg is the self ty - use the correct span for it.
+ Obligation::with_depth(
+ tcx,
+ cause,
+ depth,
+ param_env,
+ ClauseKind::WellFormed(term),
+ )
+ }),
+ );
+ }
+
+ // Compute the obligations that are required for `trait_ref` to be WF,
+ // given that it is a *negative* trait predicate.
+ fn add_wf_preds_for_negative_trait_pred(&mut self, trait_ref: TraitRef<'db>) {
+ for arg in trait_ref.args {
+ if let Some(term) = arg.as_term() {
+ self.add_wf_preds_for_term(term);
+ }
+ }
+ }
+
+ /// Pushes the obligations required for an alias (except inherent) to be WF
+ /// into `self.out`.
+ fn add_wf_preds_for_alias_term(&mut self, data: AliasTerm<'db>) {
+ // A projection is well-formed if
+ //
+ // (a) its predicates hold (*)
+ // (b) its args are wf
+ //
+ // (*) The predicates of an associated type include the predicates of
+ // the trait that it's contained in. For example, given
+ //
+ // trait A<T>: Clone {
+ // type X where T: Copy;
+ // }
+ //
+ // The predicates of `<() as A<i32>>::X` are:
+ // [
+ // `(): Sized`
+ // `(): Clone`
+ // `(): A<i32>`
+ // `i32: Sized`
+ // `i32: Clone`
+ // `i32: Copy`
+ // ]
+ let obligations = self.nominal_obligations(data.def_id, data.args);
+ self.out.extend(obligations);
+
+ self.add_wf_preds_for_projection_args(data.args);
+ }
+
+ fn add_wf_preds_for_projection_args(&mut self, args: GenericArgs<'db>) {
+ let tcx = self.interner();
+ let cause = ObligationCause::new();
+ let param_env = self.param_env;
+ let depth = self.recursion_depth;
+
+ self.out.extend(
+ args.iter()
+ .filter_map(|arg| arg.as_term())
+ .filter(|term| !term.has_escaping_bound_vars())
+ .map(|term| {
+ Obligation::with_depth(
+ tcx,
+ cause.clone(),
+ depth,
+ param_env,
+ ClauseKind::WellFormed(term),
+ )
+ }),
+ );
+ }
+
+ fn require_sized(&mut self, subty: Ty<'db>) {
+ if !subty.has_escaping_bound_vars() {
+ let cause = ObligationCause::new();
+ let trait_ref = TraitRef::new(
+ self.interner(),
+ self.interner().require_trait_lang_item(SolverTraitLangItem::Sized),
+ [subty],
+ );
+ self.out.push(Obligation::with_depth(
+ self.interner(),
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ Binder::dummy(trait_ref),
+ ));
+ }
+ }
+
+ /// Pushes all the predicates needed to validate that `term` is WF into `out`.
+ #[instrument(level = "debug", skip(self))]
+ fn add_wf_preds_for_term(&mut self, term: Term<'db>) {
+ term.visit_with(self);
+ debug!(?self.out);
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ fn nominal_obligations(
+ &mut self,
+ def_id: SolverDefId,
+ args: GenericArgs<'db>,
+ ) -> PredicateObligations<'db> {
+ // PERF: `Sized`'s predicates include `MetaSized`, but both are compiler implemented marker
+ // traits, so `MetaSized` will always be WF if `Sized` is WF and vice-versa. Determining
+ // the nominal obligations of `Sized` would in-effect just elaborate `MetaSized` and make
+ // the compiler do a bunch of work needlessly.
+ if let SolverDefId::TraitId(def_id) = def_id
+ && self.interner().is_trait_lang_item(def_id.into(), SolverTraitLangItem::Sized)
+ {
+ return Default::default();
+ }
+
+ self.interner()
+ .predicates_of(def_id)
+ .iter_instantiated(self.interner(), args)
+ .map(|pred| {
+ let cause = ObligationCause::new();
+ Obligation::with_depth(
+ self.interner(),
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ pred,
+ )
+ })
+ .filter(|pred| !pred.has_escaping_bound_vars())
+ .collect()
+ }
+
+ fn add_wf_preds_for_dyn_ty(
+ &mut self,
+ ty: Ty<'db>,
+ data: &[Binder<'db, ExistentialPredicate<'db>>],
+ region: Region<'db>,
+ ) {
+ // Imagine a type like this:
+ //
+ // trait Foo { }
+ // trait Bar<'c> : 'c { }
+ //
+ // &'b (Foo+'c+Bar<'d>)
+ // ^
+ //
+ // In this case, the following relationships must hold:
+ //
+ // 'b <= 'c
+ // 'd <= 'c
+ //
+ // The first conditions is due to the normal region pointer
+ // rules, which say that a reference cannot outlive its
+ // referent.
+ //
+ // The final condition may be a bit surprising. In particular,
+ // you may expect that it would have been `'c <= 'd`, since
+ // usually lifetimes of outer things are conservative
+ // approximations for inner things. However, it works somewhat
+ // differently with trait objects: here the idea is that if the
+ // user specifies a region bound (`'c`, in this case) it is the
+ // "master bound" that *implies* that bounds from other traits are
+ // all met. (Remember that *all bounds* in a type like
+ // `Foo+Bar+Zed` must be met, not just one, hence if we write
+ // `Foo<'x>+Bar<'y>`, we know that the type outlives *both* 'x and
+ // 'y.)
+ //
+ // Note: in fact we only permit builtin traits, not `Bar<'d>`, I
+ // am looking forward to the future here.
+ if !data.has_escaping_bound_vars() && !region.has_escaping_bound_vars() {
+ let implicit_bounds = object_region_bounds(self.interner(), data);
+
+ let explicit_bound = region;
+
+ self.out.reserve(implicit_bounds.len());
+ for implicit_bound in implicit_bounds {
+ let cause = ObligationCause::new();
+ let outlives = Binder::dummy(rustc_type_ir::OutlivesPredicate(
+ explicit_bound,
+ implicit_bound,
+ ));
+ self.out.push(Obligation::with_depth(
+ self.interner(),
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ outlives,
+ ));
+ }
+
+ // We don't add any wf predicates corresponding to the trait ref's generic arguments
+ // which allows code like this to compile:
+ // ```rust
+ // trait Trait<T: Sized> {}
+ // fn foo(_: &dyn Trait<[u32]>) {}
+ // ```
+ }
+ }
+ }
+
+ impl<'a, 'db> TypeVisitor<DbInterner<'db>> for WfPredicates<'a, 'db> {
+ type Result = ();
+
+ fn visit_ty(&mut self, t: Ty<'db>) -> Self::Result {
+ debug!("wf bounds for t={:?} t.kind={:#?}", t, t.kind());
+
+ let tcx = self.interner();
+
+ match t.kind() {
+ TyKind::Bool
+ | TyKind::Char
+ | TyKind::Int(..)
+ | TyKind::Uint(..)
+ | TyKind::Float(..)
+ | TyKind::Error(_)
+ | TyKind::Str
+ | TyKind::CoroutineWitness(..)
+ | TyKind::Never
+ | TyKind::Param(_)
+ | TyKind::Bound(..)
+ | TyKind::Placeholder(..)
+ | TyKind::Foreign(..) => {
+ // WfScalar, WfParameter, etc
+ }
+
+ // Can only infer to `TyKind::Int(_) | TyKind::Uint(_)`.
+ TyKind::Infer(rustc_type_ir::IntVar(_)) => {}
+
+ // Can only infer to `TyKind::Float(_)`.
+ TyKind::Infer(rustc_type_ir::FloatVar(_)) => {}
+
+ TyKind::Slice(subty) => {
+ self.require_sized(subty);
+ }
+
+ TyKind::Array(subty, len) => {
+ self.require_sized(subty);
+ // Note that the len being WF is implicitly checked while visiting.
+ // Here we just check that it's of type usize.
+ let cause = ObligationCause::new();
+ self.out.push(Obligation::with_depth(
+ tcx,
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ Binder::dummy(PredicateKind::Clause(ClauseKind::ConstArgHasType(
+ len,
+ Ty::new_unit(self.interner()),
+ ))),
+ ));
+ }
+
+ TyKind::Pat(base_ty, pat) => {
+ self.require_sized(base_ty);
+ }
+
+ TyKind::Tuple(tys) => {
+ if let Some((_last, rest)) = tys.split_last() {
+ for &elem in rest {
+ self.require_sized(elem);
+ }
+ }
+ }
+
+ TyKind::RawPtr(_, _) => {
+ // Simple cases that are WF if their type args are WF.
+ }
+
+ TyKind::Alias(
+ rustc_type_ir::Projection | rustc_type_ir::Opaque | rustc_type_ir::Free,
+ data,
+ ) => {
+ let obligations = self.nominal_obligations(data.def_id, data.args);
+ self.out.extend(obligations);
+ }
+ TyKind::Alias(rustc_type_ir::Inherent, data) => {
+ return;
+ }
+
+ TyKind::Adt(def, args) => {
+ // WfNominalType
+ let obligations = self.nominal_obligations(def.def_id().0.into(), args);
+ self.out.extend(obligations);
+ }
+
+ TyKind::FnDef(did, args) => {
+ // HACK: Check the return type of function definitions for
+ // well-formedness to mostly fix #84533. This is still not
+ // perfect and there may be ways to abuse the fact that we
+ // ignore requirements with escaping bound vars. That's a
+ // more general issue however.
+ let fn_sig = tcx.fn_sig(did).instantiate(tcx, args);
+ fn_sig.output().skip_binder().visit_with(self);
+
+ let did = match did.0 {
+ hir_def::CallableDefId::FunctionId(id) => id.into(),
+ hir_def::CallableDefId::StructId(id) => SolverDefId::Ctor(Ctor::Struct(id)),
+ hir_def::CallableDefId::EnumVariantId(id) => {
+ SolverDefId::Ctor(Ctor::Enum(id))
+ }
+ };
+ let obligations = self.nominal_obligations(did, args);
+ self.out.extend(obligations);
+ }
+
+ TyKind::Ref(r, rty, _) => {
+ // WfReference
+ if !r.has_escaping_bound_vars() && !rty.has_escaping_bound_vars() {
+ let cause = ObligationCause::new();
+ self.out.push(Obligation::with_depth(
+ tcx,
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ Binder::dummy(PredicateKind::Clause(ClauseKind::TypeOutlives(
+ rustc_type_ir::OutlivesPredicate(rty, r),
+ ))),
+ ));
+ }
+ }
+
+ TyKind::Coroutine(did, args, ..) => {
+ // Walk ALL the types in the coroutine: this will
+ // include the upvar types as well as the yield
+ // type. Note that this is mildly distinct from
+ // the closure case, where we have to be careful
+ // about the signature of the closure. We don't
+ // have the problem of implied bounds here since
+ // coroutines don't take arguments.
+ let obligations = self.nominal_obligations(did.0.into(), args);
+ self.out.extend(obligations);
+ }
+
+ TyKind::Closure(did, args) => {
+ // Note that we cannot skip the generic types
+ // types. Normally, within the fn
+ // body where they are created, the generics will
+ // always be WF, and outside of that fn body we
+ // are not directly inspecting closure types
+ // anyway, except via auto trait matching (which
+ // only inspects the upvar types).
+ // But when a closure is part of a type-alias-impl-trait
+ // then the function that created the defining site may
+ // have had more bounds available than the type alias
+ // specifies. This may cause us to have a closure in the
+ // hidden type that is not actually well formed and
+ // can cause compiler crashes when the user abuses unsafe
+ // code to procure such a closure.
+ // See tests/ui/type-alias-impl-trait/wf_check_closures.rs
+ let obligations = self.nominal_obligations(did.0.into(), args);
+ self.out.extend(obligations);
+ // Only check the upvar types for WF, not the rest
+ // of the types within. This is needed because we
+ // capture the signature and it may not be WF
+ // without the implied bounds. Consider a closure
+ // like `|x: &'a T|` -- it may be that `T: 'a` is
+ // not known to hold in the creator's context (and
+ // indeed the closure may not be invoked by its
+ // creator, but rather turned to someone who *can*
+ // verify that).
+ //
+ // The special treatment of closures here really
+ // ought not to be necessary either; the problem
+ // is related to #25860 -- there is no way for us
+ // to express a fn type complete with the implied
+ // bounds that it is assuming. I think in reality
+ // the WF rules around fn are a bit messed up, and
+ // that is the rot problem: `fn(&'a T)` should
+ // probably always be WF, because it should be
+ // shorthand for something like `where(T: 'a) {
+ // fn(&'a T) }`, as discussed in #25860.
+ let upvars = args.as_closure().tupled_upvars_ty();
+ return upvars.visit_with(self);
+ }
+
+ TyKind::CoroutineClosure(did, args) => {
+ // See the above comments. The same apply to coroutine-closures.
+ let obligations = self.nominal_obligations(did.0.into(), args);
+ self.out.extend(obligations);
+ let upvars = args.as_coroutine_closure().tupled_upvars_ty();
+ return upvars.visit_with(self);
+ }
+
+ TyKind::FnPtr(..) => {
+ // Let the visitor iterate into the argument/return
+ // types appearing in the fn signature.
+ }
+ TyKind::UnsafeBinder(ty) => {}
+
+ TyKind::Dynamic(data, r) => {
+ // WfObject
+ //
+ // Here, we defer WF checking due to higher-ranked
+ // regions. This is perhaps not ideal.
+ self.add_wf_preds_for_dyn_ty(t, data.as_slice(), r);
+
+ // FIXME(#27579) RFC also considers adding trait
+ // obligations that don't refer to Self and
+ // checking those
+ if let Some(principal) = data.principal_def_id() {
+ self.out.push(Obligation::with_depth(
+ tcx,
+ ObligationCause::new(),
+ self.recursion_depth,
+ self.param_env,
+ Binder::dummy(PredicateKind::DynCompatible(principal)),
+ ));
+ }
+ }
+
+ // Inference variables are the complicated case, since we don't
+ // know what type they are. We do two things:
+ //
+ // 1. Check if they have been resolved, and if so proceed with
+ // THAT type.
+ // 2. If not, we've at least simplified things (e.g., we went
+ // from `Vec?0>: WF` to `?0: WF`), so we can
+ // register a pending obligation and keep
+ // moving. (Goal is that an "inductive hypothesis"
+ // is satisfied to ensure termination.)
+ // See also the comment on `fn obligations`, describing cycle
+ // prevention, which happens before this can be reached.
+ TyKind::Infer(_) => {
+ let cause = ObligationCause::new();
+ self.out.push(Obligation::with_depth(
+ tcx,
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ Binder::dummy(PredicateKind::Clause(ClauseKind::WellFormed(t.into()))),
+ ));
+ }
+ }
+
+ t.super_visit_with(self)
+ }
+
+ fn visit_const(&mut self, c: Const<'db>) -> Self::Result {
+ let tcx = self.interner();
+
+ match c.kind() {
+ ConstKind::Unevaluated(uv) => {
+ if !c.has_escaping_bound_vars() {
+ let predicate =
+ Binder::dummy(PredicateKind::Clause(ClauseKind::ConstEvaluatable(c)));
+ let cause = ObligationCause::new();
+ self.out.push(Obligation::with_depth(
+ tcx,
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ predicate,
+ ));
+
+ if let SolverDefId::ConstId(uv_def) = uv.def
+ && let ItemContainerId::ImplId(impl_) =
+ uv_def.loc(self.interner().db).container
+ && self.interner().db.impl_signature(impl_).target_trait.is_none()
+ {
+ return; // Subtree is handled by above function
+ } else {
+ let obligations = self.nominal_obligations(uv.def, uv.args);
+ self.out.extend(obligations);
+ }
+ }
+ }
+ ConstKind::Infer(_) => {
+ let cause = ObligationCause::new();
+
+ self.out.push(Obligation::with_depth(
+ tcx,
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ Binder::dummy(PredicateKind::Clause(ClauseKind::WellFormed(c.into()))),
+ ));
+ }
+ ConstKind::Expr(_) => {
+ // FIXME(generic_const_exprs): this doesn't verify that given `Expr(N + 1)` the
+ // trait bound `typeof(N): Add<typeof(1)>` holds. This is currently unnecessary
+ // as `ConstKind::Expr` is only produced via normalization of `ConstKind::Unevaluated`
+ // which means that the `DefId` would have been typeck'd elsewhere. However in
+ // the future we may allow directly lowering to `ConstKind::Expr` in which case
+ // we would not be proving bounds we should.
+
+ let predicate =
+ Binder::dummy(PredicateKind::Clause(ClauseKind::ConstEvaluatable(c)));
+ let cause = ObligationCause::new();
+ self.out.push(Obligation::with_depth(
+ tcx,
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ predicate,
+ ));
+ }
+
+ ConstKind::Error(_)
+ | ConstKind::Param(_)
+ | ConstKind::Bound(..)
+ | ConstKind::Placeholder(..) => {
+ // These variants are trivially WF, so nothing to do here.
+ }
+ ConstKind::Value(..) => {
+ // FIXME: Enforce that values are structurally-matchable.
+ }
+ }
+
+ c.super_visit_with(self)
+ }
+
+ fn visit_predicate(&mut self, _p: Predicate<'db>) -> Self::Result {
+ panic!("predicate should not be checked for well-formedness");
+ }
+ }
+
+ /// Given an object type like `SomeTrait + Send`, computes the lifetime
+ /// bounds that must hold on the elided self type. These are derived
+ /// from the declarations of `SomeTrait`, `Send`, and friends -- if
+ /// they declare `trait SomeTrait : 'static`, for example, then
+ /// `'static` would appear in the list.
+ ///
+ /// N.B., in some cases, particularly around higher-ranked bounds,
+ /// this function returns a kind of conservative approximation.
+ /// That is, all regions returned by this function are definitely
+ /// required, but there may be other region bounds that are not
+ /// returned, as well as requirements like `for<'a> T: 'a`.
+ ///
+ /// Requires that trait definitions have been processed so that we can
+ /// elaborate predicates and walk supertraits.
+ pub fn object_region_bounds<'db>(
+ interner: DbInterner<'db>,
+ existential_predicates: &[Binder<'db, ExistentialPredicate<'db>>],
+ ) -> Vec<Region<'db>> {
+ let erased_self_ty = Ty::new_unit(interner);
+
+ let predicates = existential_predicates
+ .iter()
+ .map(|predicate| predicate.with_self_ty(interner, erased_self_ty));
+
+ rustc_type_ir::elaborate::elaborate(interner, predicates)
+ .filter_map(|pred| {
+ debug!(?pred);
+ match pred.kind().skip_binder() {
+ ClauseKind::TypeOutlives(rustc_type_ir::OutlivesPredicate(ref t, ref r)) => {
+ // Search for a bound of the form `erased_self_ty
+ // : 'a`, but be wary of something like `for<'a>
+ // erased_self_ty : 'a` (we interpret a
+ // higher-ranked bound like that as 'static,
+ // though at present the code in `fulfill.rs`
+ // considers such bounds to be unsatisfiable, so
+ // it's kind of a moot point since you could never
+ // construct such an object, but this seems
+ // correct even if that code changes).
+ if t == &erased_self_ty && !r.has_escaping_bound_vars() {
+ Some(*r)
+ } else {
+ None
+ }
+ }
+ ClauseKind::Trait(_)
+ | ClauseKind::HostEffect(..)
+ | ClauseKind::RegionOutlives(_)
+ | ClauseKind::Projection(_)
+ | ClauseKind::ConstArgHasType(_, _)
+ | ClauseKind::WellFormed(_)
+ | ClauseKind::UnstableFeature(_)
+ | ClauseKind::ConstEvaluatable(_) => None,
+ }
+ })
+ .collect()
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/generic_arg.rs b/crates/hir-ty/src/next_solver/generic_arg.rs
index 4e124d0..097bb85 100644
--- a/crates/hir-ty/src/next_solver/generic_arg.rs
+++ b/crates/hir-ty/src/next_solver/generic_arg.rs
@@ -6,12 +6,15 @@
ClosureArgs, CollectAndApply, ConstVid, CoroutineArgs, CoroutineClosureArgs, FnSig, FnSigTys,
GenericArgKind, IntTy, Interner, TermKind, TyKind, TyVid, TypeFoldable, TypeVisitable,
Variance,
- inherent::{GenericArg as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _},
+ inherent::{
+ GenericArg as _, GenericArgs as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _,
+ },
relate::{Relate, VarianceDiagInfo},
};
use smallvec::SmallVec;
use crate::db::HirDatabase;
+use crate::next_solver::{Binder, PolyFnSig};
use super::{
Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId, Ty, Tys,
@@ -240,6 +243,34 @@
args.push(kind);
}
}
+
+ pub fn closure_sig_untupled(self) -> PolyFnSig<'db> {
+ let TyKind::FnPtr(inputs_and_output, hdr) =
+ self.split_closure_args_untupled().closure_sig_as_fn_ptr_ty.kind()
+ else {
+ unreachable!("not a function pointer")
+ };
+ inputs_and_output.with(hdr)
+ }
+
+ /// A "sensible" `.split_closure_args()`, where the arguments are not in a tuple.
+ pub fn split_closure_args_untupled(self) -> rustc_type_ir::ClosureArgsParts<DbInterner<'db>> {
+ // FIXME: should use `ClosureSubst` when possible
+ match self.inner().as_slice() {
+ [parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => {
+ let interner = DbInterner::conjure();
+ rustc_type_ir::ClosureArgsParts {
+ parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()),
+ closure_sig_as_fn_ptr_ty: sig_ty.expect_ty(),
+ closure_kind_ty: closure_kind_ty.expect_ty(),
+ tupled_upvars_ty: tupled_upvars_ty.expect_ty(),
+ }
+ }
+ _ => {
+ unreachable!("unexpected closure sig");
+ }
+ }
+ }
}
impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for GenericArgs<'db> {
@@ -329,7 +360,7 @@
fn split_closure_args(self) -> rustc_type_ir::ClosureArgsParts<DbInterner<'db>> {
// FIXME: should use `ClosureSubst` when possible
match self.inner().as_slice() {
- [parent_args @ .., sig_ty] => {
+ [parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => {
let interner = DbInterner::conjure();
// This is stupid, but the next solver expects the first input to actually be a tuple
let sig_ty = match sig_ty.expect_ty().kind() {
@@ -354,8 +385,8 @@
rustc_type_ir::ClosureArgsParts {
parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()),
closure_sig_as_fn_ptr_ty: sig_ty,
- closure_kind_ty: Ty::new(interner, TyKind::Int(IntTy::I8)),
- tupled_upvars_ty: Ty::new_unit(interner),
+ closure_kind_ty: closure_kind_ty.expect_ty(),
+ tupled_upvars_ty: tupled_upvars_ty.expect_ty(),
}
}
_ => {
@@ -392,14 +423,14 @@
fn split_coroutine_args(self) -> rustc_type_ir::CoroutineArgsParts<DbInterner<'db>> {
let interner = DbInterner::conjure();
match self.inner().as_slice() {
- [parent_args @ .., resume_ty, yield_ty, return_ty] => {
+ [parent_args @ .., kind_ty, resume_ty, yield_ty, return_ty, tupled_upvars_ty] => {
rustc_type_ir::CoroutineArgsParts {
parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()),
- kind_ty: Ty::new_unit(interner),
+ kind_ty: kind_ty.expect_ty(),
resume_ty: resume_ty.expect_ty(),
yield_ty: yield_ty.expect_ty(),
return_ty: return_ty.expect_ty(),
- tupled_upvars_ty: Ty::new_unit(interner),
+ tupled_upvars_ty: tupled_upvars_ty.expect_ty(),
}
}
_ => panic!("GenericArgs were likely not for a Coroutine."),
diff --git a/crates/hir-ty/src/next_solver/infer/at.rs b/crates/hir-ty/src/next_solver/infer/at.rs
index d64c7ed..8dfffe0 100644
--- a/crates/hir-ty/src/next_solver/infer/at.rs
+++ b/crates/hir-ty/src/next_solver/infer/at.rs
@@ -26,7 +26,7 @@
//! things. (That system should probably be refactored.)
use rustc_type_ir::{
- FnSig, GenericArgKind, TypingMode, Variance,
+ FnSig, GenericArgKind, TypeFoldable, TypingMode, Variance,
error::ExpectedFound,
inherent::{IntoKind, Span as _},
relate::{Relate, TypeRelation, solver_relating::RelateExt},
@@ -36,6 +36,8 @@
AliasTerm, AliasTy, Binder, Const, DbInterner, GenericArg, Goal, ParamEnv,
PolyExistentialProjection, PolyExistentialTraitRef, PolyFnSig, Predicate, Region, Span, Term,
TraitRef, Ty,
+ fulfill::{FulfillmentCtxt, NextSolverError},
+ infer::relate::lattice::{LatticeOp, LatticeOpKind},
};
use super::{
@@ -210,6 +212,34 @@
}
}
+ /// Deeply normalizes `value`, replacing all aliases which can by normalized in
+ /// the current environment. This errors in case normalization fails or is ambiguous.
+ pub fn deeply_normalize<T>(self, value: T) -> Result<T, Vec<NextSolverError<'db>>>
+ where
+ T: TypeFoldable<DbInterner<'db>>,
+ {
+ crate::next_solver::normalize::deeply_normalize(self, value)
+ }
+
+ /// Computes the least-upper-bound, or mutual supertype, of two
+ /// values. The order of the arguments doesn't matter, but since
+ /// this can result in an error (e.g., if asked to compute LUB of
+ /// u32 and i32), it is meaningful to call one of them the
+ /// "expected type".
+ pub fn lub<T>(self, expected: T, actual: T) -> InferResult<'db, T>
+ where
+ T: ToTrace<'db>,
+ {
+ let mut op = LatticeOp::new(
+ self.infcx,
+ ToTrace::to_trace(self.cause, expected, actual),
+ self.param_env,
+ LatticeOpKind::Lub,
+ );
+ let value = op.relate(expected, actual)?;
+ Ok(InferOk { value, obligations: op.into_obligations() })
+ }
+
fn goals_to_obligations(&self, goals: Vec<Goal<'db, Predicate<'db>>>) -> InferOk<'db, ()> {
InferOk {
value: (),
diff --git a/crates/hir-ty/src/next_solver/infer/context.rs b/crates/hir-ty/src/next_solver/infer/context.rs
index 45ce7e6..5aa5ad1 100644
--- a/crates/hir-ty/src/next_solver/infer/context.rs
+++ b/crates/hir-ty/src/next_solver/infer/context.rs
@@ -321,4 +321,13 @@
fn sub_unify_ty_vids_raw(&self, a: rustc_type_ir::TyVid, b: rustc_type_ir::TyVid) {
self.sub_unify_ty_vids_raw(a, b);
}
+
+ fn opaques_with_sub_unified_hidden_type(
+ &self,
+ _ty: TyVid,
+ ) -> Vec<rustc_type_ir::AliasTy<Self::Interner>> {
+ // FIXME: I guess we are okay without this for now since currently r-a lacks of
+ // detailed checks over opaque types. Might need to implement this in future.
+ vec![]
+ }
}
diff --git a/crates/hir-ty/src/next_solver/infer/mod.rs b/crates/hir-ty/src/next_solver/infer/mod.rs
index ce6c941..8e922ab 100644
--- a/crates/hir-ty/src/next_solver/infer/mod.rs
+++ b/crates/hir-ty/src/next_solver/infer/mod.rs
@@ -2,6 +2,7 @@
use std::cell::{Cell, RefCell};
use std::fmt;
+use std::ops::Range;
use std::sync::Arc;
pub use BoundRegionConversionTime::*;
@@ -55,6 +56,7 @@
pub mod region_constraints;
pub mod relate;
pub mod resolve;
+pub(crate) mod select;
pub(crate) mod snapshot;
pub(crate) mod traits;
mod type_variable;
@@ -81,6 +83,10 @@
ut::InPlace<T, &'a mut ut::UnificationStorage<T>, &'a mut InferCtxtUndoLogs<'db>>,
>;
+fn iter_idx_range<T: From<u32> + Into<u32>>(range: Range<T>) -> impl Iterator<Item = T> {
+ (range.start.into()..range.end.into()).map(Into::into)
+}
+
/// This type contains all the things within `InferCtxt` that sit within a
/// `RefCell` and are involved with taking/rolling back snapshots. Snapshot
/// operations are hot enough that we want only one call to `borrow_mut` per
diff --git a/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs b/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs
index 5054969..7f15a46 100644
--- a/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs
+++ b/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs
@@ -22,7 +22,7 @@
AliasTy, Binder, DbInterner, OpaqueTypeKey, ParamTy, PlaceholderTy, Region, Ty,
};
-#[derive(Clone, Default)]
+#[derive(Debug, Clone, Default)]
pub struct RegionConstraintStorage<'db> {
/// For each `RegionVid`, the corresponding `RegionVariableOrigin`.
pub(super) var_infos: IndexVec<RegionVid, RegionVariableInfo>,
@@ -239,7 +239,7 @@
pub bound: Region<'db>,
}
-#[derive(Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub(crate) struct TwoRegions<'db> {
a: Region<'db>,
b: Region<'db>,
@@ -458,6 +458,44 @@
}
}
+ pub(super) fn lub_regions(
+ &mut self,
+ db: DbInterner<'db>,
+ a: Region<'db>,
+ b: Region<'db>,
+ ) -> Region<'db> {
+ // cannot add constraints once regions are resolved
+ debug!("RegionConstraintCollector: lub_regions({:?}, {:?})", a, b);
+ #[expect(clippy::if_same_then_else)]
+ if a.is_static() || b.is_static() {
+ a // nothing lives longer than static
+ } else if a == b {
+ a // LUB(a,a) = a
+ } else {
+ self.combine_vars(db, Lub, a, b)
+ }
+ }
+
+ pub(super) fn glb_regions(
+ &mut self,
+ db: DbInterner<'db>,
+ a: Region<'db>,
+ b: Region<'db>,
+ ) -> Region<'db> {
+ // cannot add constraints once regions are resolved
+ debug!("RegionConstraintCollector: glb_regions({:?}, {:?})", a, b);
+ #[expect(clippy::if_same_then_else)]
+ if a.is_static() {
+ b // static lives longer than everything else
+ } else if b.is_static() {
+ a // static lives longer than everything else
+ } else if a == b {
+ a // GLB(a,a) = a
+ } else {
+ self.combine_vars(db, Glb, a, b)
+ }
+ }
+
/// Resolves a region var to its value in the unification table, if it exists.
/// Otherwise, it is resolved to the root `ReVar` in the table.
pub fn opportunistic_resolve_var(
@@ -531,6 +569,17 @@
}
}
+ pub fn vars_since_snapshot(&self, value_count: usize) -> Range<RegionVid> {
+ RegionVid::from(value_count)..RegionVid::from(self.storage.unification_table.len())
+ }
+
+ /// See `InferCtxt::region_constraints_added_in_snapshot`.
+ pub fn region_constraints_added_in_snapshot(&self, mark: &Snapshot) -> bool {
+ self.undo_log
+ .region_constraints_in_snapshot(mark)
+ .any(|elt| matches!(elt, AddConstraint(_)))
+ }
+
#[inline]
fn unification_table_mut(&mut self) -> super::UnificationTable<'_, 'db, RegionVidKey<'db>> {
ut::UnificationTable::with_log(&mut self.storage.unification_table, self.undo_log)
diff --git a/crates/hir-ty/src/next_solver/infer/relate/generalize.rs b/crates/hir-ty/src/next_solver/infer/relate/generalize.rs
index de336c6..7e2735d 100644
--- a/crates/hir-ty/src/next_solver/infer/relate/generalize.rs
+++ b/crates/hir-ty/src/next_solver/infer/relate/generalize.rs
@@ -350,7 +350,7 @@
// with inference variables can cause incorrect ambiguity.
//
// cc trait-system-refactor-initiative#110
- if self.infcx.next_trait_solver() && !alias.has_escaping_bound_vars() && !self.in_alias {
+ if !alias.has_escaping_bound_vars() && !self.in_alias {
return Ok(self.next_ty_var_for_alias());
}
diff --git a/crates/hir-ty/src/next_solver/infer/relate/lattice.rs b/crates/hir-ty/src/next_solver/infer/relate/lattice.rs
new file mode 100644
index 0000000..c7f771f
--- /dev/null
+++ b/crates/hir-ty/src/next_solver/infer/relate/lattice.rs
@@ -0,0 +1,269 @@
+//! # Lattice variables
+//!
+//! Generic code for operating on [lattices] of inference variables
+//! that are characterized by an upper- and lower-bound.
+//!
+//! The code is defined quite generically so that it can be
+//! applied both to type variables, which represent types being inferred,
+//! and fn variables, which represent function types being inferred.
+//! (It may eventually be applied to their types as well.)
+//! In some cases, the functions are also generic with respect to the
+//! operation on the lattice (GLB vs LUB).
+//!
+//! ## Note
+//!
+//! Although all the functions are generic, for simplicity, comments in the source code
+//! generally refer to type variables and the LUB operation.
+//!
+//! [lattices]: https://en.wikipedia.org/wiki/Lattice_(order)
+
+use rustc_type_ir::{
+ AliasRelationDirection, TypeVisitableExt, Upcast, Variance,
+ inherent::{IntoKind, Span as _},
+ relate::{
+ Relate, StructurallyRelateAliases, TypeRelation, VarianceDiagInfo,
+ combine::{PredicateEmittingRelation, super_combine_consts, super_combine_tys},
+ },
+};
+
+use crate::next_solver::{
+ AliasTy, Binder, Const, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Region, Span, Ty,
+ TyKind,
+ infer::{
+ DefineOpaqueTypes, InferCtxt, TypeTrace,
+ relate::RelateResult,
+ traits::{Obligation, PredicateObligations},
+ },
+};
+
+#[derive(Clone, Copy)]
+pub(crate) enum LatticeOpKind {
+ Glb,
+ Lub,
+}
+
+impl LatticeOpKind {
+ fn invert(self) -> Self {
+ match self {
+ LatticeOpKind::Glb => LatticeOpKind::Lub,
+ LatticeOpKind::Lub => LatticeOpKind::Glb,
+ }
+ }
+}
+
+/// A greatest lower bound" (common subtype) or least upper bound (common supertype).
+pub(crate) struct LatticeOp<'infcx, 'db> {
+ infcx: &'infcx InferCtxt<'db>,
+ // Immutable fields
+ trace: TypeTrace<'db>,
+ param_env: ParamEnv<'db>,
+ // Mutable fields
+ kind: LatticeOpKind,
+ obligations: PredicateObligations<'db>,
+}
+
+impl<'infcx, 'db> LatticeOp<'infcx, 'db> {
+ pub(crate) fn new(
+ infcx: &'infcx InferCtxt<'db>,
+ trace: TypeTrace<'db>,
+ param_env: ParamEnv<'db>,
+ kind: LatticeOpKind,
+ ) -> LatticeOp<'infcx, 'db> {
+ LatticeOp { infcx, trace, param_env, kind, obligations: PredicateObligations::new() }
+ }
+
+ pub(crate) fn into_obligations(self) -> PredicateObligations<'db> {
+ self.obligations
+ }
+}
+
+impl<'db> TypeRelation<DbInterner<'db>> for LatticeOp<'_, 'db> {
+ fn cx(&self) -> DbInterner<'db> {
+ self.infcx.interner
+ }
+
+ fn relate_with_variance<T: Relate<DbInterner<'db>>>(
+ &mut self,
+ variance: Variance,
+ _info: VarianceDiagInfo<DbInterner<'db>>,
+ a: T,
+ b: T,
+ ) -> RelateResult<'db, T> {
+ match variance {
+ Variance::Invariant => {
+ self.obligations.extend(
+ self.infcx
+ .at(&self.trace.cause, self.param_env)
+ .eq_trace(DefineOpaqueTypes::Yes, self.trace.clone(), a, b)?
+ .into_obligations(),
+ );
+ Ok(a)
+ }
+ Variance::Covariant => self.relate(a, b),
+ // FIXME(#41044) -- not correct, need test
+ Variance::Bivariant => Ok(a),
+ Variance::Contravariant => {
+ self.kind = self.kind.invert();
+ let res = self.relate(a, b);
+ self.kind = self.kind.invert();
+ res
+ }
+ }
+ }
+
+ /// Relates two types using a given lattice.
+ fn tys(&mut self, a: Ty<'db>, b: Ty<'db>) -> RelateResult<'db, Ty<'db>> {
+ if a == b {
+ return Ok(a);
+ }
+
+ let infcx = self.infcx;
+
+ let a = infcx.shallow_resolve(a);
+ let b = infcx.shallow_resolve(b);
+
+ match (a.kind(), b.kind()) {
+ // If one side is known to be a variable and one is not,
+ // create a variable (`v`) to represent the LUB. Make sure to
+ // relate `v` to the non-type-variable first (by passing it
+ // first to `relate_bound`). Otherwise, we would produce a
+ // subtype obligation that must then be processed.
+ //
+ // Example: if the LHS is a type variable, and RHS is
+ // `Box<i32>`, then we current compare `v` to the RHS first,
+ // which will instantiate `v` with `Box<i32>`. Then when `v`
+ // is compared to the LHS, we instantiate LHS with `Box<i32>`.
+ // But if we did in reverse order, we would create a `v <:
+ // LHS` (or vice versa) constraint and then instantiate
+ // `v`. This would require further processing to achieve same
+ // end-result; in particular, this screws up some of the logic
+ // in coercion, which expects LUB to figure out that the LHS
+ // is (e.g.) `Box<i32>`. A more obvious solution might be to
+ // iterate on the subtype obligations that are returned, but I
+ // think this suffices. -nmatsakis
+ (TyKind::Infer(rustc_type_ir::TyVar(..)), _) => {
+ let v = infcx.next_ty_var();
+ self.relate_bound(v, b, a)?;
+ Ok(v)
+ }
+ (_, TyKind::Infer(rustc_type_ir::TyVar(..))) => {
+ let v = infcx.next_ty_var();
+ self.relate_bound(v, a, b)?;
+ Ok(v)
+ }
+
+ (
+ TyKind::Alias(rustc_type_ir::Opaque, AliasTy { def_id: a_def_id, .. }),
+ TyKind::Alias(rustc_type_ir::Opaque, AliasTy { def_id: b_def_id, .. }),
+ ) if a_def_id == b_def_id => super_combine_tys(infcx, self, a, b),
+
+ _ => super_combine_tys(infcx, self, a, b),
+ }
+ }
+
+ fn regions(&mut self, a: Region<'db>, b: Region<'db>) -> RelateResult<'db, Region<'db>> {
+ let mut inner = self.infcx.inner.borrow_mut();
+ let mut constraints = inner.unwrap_region_constraints();
+ Ok(match self.kind {
+ // GLB(&'static u8, &'a u8) == &RegionLUB('static, 'a) u8 == &'static u8
+ LatticeOpKind::Glb => constraints.lub_regions(self.cx(), a, b),
+
+ // LUB(&'static u8, &'a u8) == &RegionGLB('static, 'a) u8 == &'a u8
+ LatticeOpKind::Lub => constraints.glb_regions(self.cx(), a, b),
+ })
+ }
+
+ fn consts(&mut self, a: Const<'db>, b: Const<'db>) -> RelateResult<'db, Const<'db>> {
+ super_combine_consts(self.infcx, self, a, b)
+ }
+
+ fn binders<T>(
+ &mut self,
+ a: Binder<'db, T>,
+ b: Binder<'db, T>,
+ ) -> RelateResult<'db, Binder<'db, T>>
+ where
+ T: Relate<DbInterner<'db>>,
+ {
+ // GLB/LUB of a binder and itself is just itself
+ if a == b {
+ return Ok(a);
+ }
+
+ if a.skip_binder().has_escaping_bound_vars() || b.skip_binder().has_escaping_bound_vars() {
+ // When higher-ranked types are involved, computing the GLB/LUB is
+ // very challenging, switch to invariance. This is obviously
+ // overly conservative but works ok in practice.
+ self.relate_with_variance(Variance::Invariant, VarianceDiagInfo::default(), a, b)?;
+ Ok(a)
+ } else {
+ Ok(Binder::dummy(self.relate(a.skip_binder(), b.skip_binder())?))
+ }
+ }
+}
+
+impl<'infcx, 'db> LatticeOp<'infcx, 'db> {
+ // Relates the type `v` to `a` and `b` such that `v` represents
+ // the LUB/GLB of `a` and `b` as appropriate.
+ //
+ // Subtle hack: ordering *may* be significant here. This method
+ // relates `v` to `a` first, which may help us to avoid unnecessary
+ // type variable obligations. See caller for details.
+ fn relate_bound(&mut self, v: Ty<'db>, a: Ty<'db>, b: Ty<'db>) -> RelateResult<'db, ()> {
+ let at = self.infcx.at(&self.trace.cause, self.param_env);
+ match self.kind {
+ LatticeOpKind::Glb => {
+ self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, v, a)?.into_obligations());
+ self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, v, b)?.into_obligations());
+ }
+ LatticeOpKind::Lub => {
+ self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, a, v)?.into_obligations());
+ self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, b, v)?.into_obligations());
+ }
+ }
+ Ok(())
+ }
+}
+
+impl<'db> PredicateEmittingRelation<InferCtxt<'db>> for LatticeOp<'_, 'db> {
+ fn span(&self) -> Span {
+ Span::dummy()
+ }
+
+ fn structurally_relate_aliases(&self) -> StructurallyRelateAliases {
+ StructurallyRelateAliases::No
+ }
+
+ fn param_env(&self) -> ParamEnv<'db> {
+ self.param_env
+ }
+
+ fn register_predicates(
+ &mut self,
+ preds: impl IntoIterator<Item: Upcast<DbInterner<'db>, Predicate<'db>>>,
+ ) {
+ self.obligations.extend(preds.into_iter().map(|pred| {
+ Obligation::new(self.infcx.interner, self.trace.cause.clone(), self.param_env, pred)
+ }))
+ }
+
+ fn register_goals(&mut self, goals: impl IntoIterator<Item = Goal<'db, Predicate<'db>>>) {
+ self.obligations.extend(goals.into_iter().map(|goal| {
+ Obligation::new(
+ self.infcx.interner,
+ self.trace.cause.clone(),
+ goal.param_env,
+ goal.predicate,
+ )
+ }))
+ }
+
+ fn register_alias_relate_predicate(&mut self, a: Ty<'db>, b: Ty<'db>) {
+ self.register_predicates([Binder::dummy(PredicateKind::AliasRelate(
+ a.into(),
+ b.into(),
+ // FIXME(deferred_projection_equality): This isn't right, I think?
+ AliasRelationDirection::Equate,
+ ))]);
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/infer/relate/mod.rs b/crates/hir-ty/src/next_solver/infer/relate/mod.rs
index 836ae39..0cc1cf7 100644
--- a/crates/hir-ty/src/next_solver/infer/relate/mod.rs
+++ b/crates/hir-ty/src/next_solver/infer/relate/mod.rs
@@ -9,5 +9,6 @@
mod generalize;
mod higher_ranked;
+pub(crate) mod lattice;
pub type RelateResult<'db, T> = rustc_type_ir::relate::RelateResult<DbInterner<'db>, T>;
diff --git a/crates/hir-ty/src/next_solver/infer/select.rs b/crates/hir-ty/src/next_solver/infer/select.rs
new file mode 100644
index 0000000..4f111fa
--- /dev/null
+++ b/crates/hir-ty/src/next_solver/infer/select.rs
@@ -0,0 +1,334 @@
+use std::ops::ControlFlow;
+
+use hir_def::{ImplId, TraitId};
+use rustc_type_ir::{
+ Interner,
+ solve::{BuiltinImplSource, CandidateSource, Certainty, inspect::ProbeKind},
+};
+
+use crate::{
+ db::InternedOpaqueTyId,
+ next_solver::{
+ Const, ErrorGuaranteed, GenericArgs, Goal, TraitRef, Ty, TypeError,
+ infer::{
+ InferCtxt,
+ traits::{Obligation, ObligationCause, PredicateObligation, TraitObligation},
+ },
+ inspect::{InspectCandidate, InspectGoal, ProofTreeVisitor},
+ },
+};
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum SelectionError<'db> {
+ /// The trait is not implemented.
+ Unimplemented,
+ /// After a closure impl has selected, its "outputs" were evaluated
+ /// (which for closures includes the "input" type params) and they
+ /// didn't resolve. See `confirm_poly_trait_refs` for more.
+ SignatureMismatch(Box<SignatureMismatchData<'db>>),
+ /// The trait pointed by `DefId` is dyn-incompatible.
+ TraitDynIncompatible(TraitId),
+ /// A given constant couldn't be evaluated.
+ NotConstEvaluatable(NotConstEvaluatable),
+ /// Exceeded the recursion depth during type projection.
+ Overflow(OverflowError),
+ /// Computing an opaque type's hidden type caused an error (e.g. a cycle error).
+ /// We can thus not know whether the hidden type implements an auto trait, so
+ /// we should not presume anything about it.
+ OpaqueTypeAutoTraitLeakageUnknown(InternedOpaqueTyId),
+ /// Error for a `ConstArgHasType` goal
+ ConstArgHasWrongType { ct: Const<'db>, ct_ty: Ty<'db>, expected_ty: Ty<'db> },
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum NotConstEvaluatable {
+ Error(ErrorGuaranteed),
+ MentionsInfer,
+ MentionsParam,
+}
+
+/// Indicates that trait evaluation caused overflow and in which pass.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum OverflowError {
+ Error(ErrorGuaranteed),
+ Canonical,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct SignatureMismatchData<'db> {
+ pub found_trait_ref: TraitRef<'db>,
+ pub expected_trait_ref: TraitRef<'db>,
+ pub terr: TypeError<'db>,
+}
+
+/// When performing resolution, it is typically the case that there
+/// can be one of three outcomes:
+///
+/// - `Ok(Some(r))`: success occurred with result `r`
+/// - `Ok(None)`: could not definitely determine anything, usually due
+/// to inconclusive type inference.
+/// - `Err(e)`: error `e` occurred
+pub type SelectionResult<'db, T> = Result<Option<T>, SelectionError<'db>>;
+
+/// Given the successful resolution of an obligation, the `ImplSource`
+/// indicates where the impl comes from.
+///
+/// For example, the obligation may be satisfied by a specific impl (case A),
+/// or it may be relative to some bound that is in scope (case B).
+///
+/// ```ignore (illustrative)
+/// impl<T:Clone> Clone<T> for Option<T> { ... } // Impl_1
+/// impl<T:Clone> Clone<T> for Box<T> { ... } // Impl_2
+/// impl Clone for i32 { ... } // Impl_3
+///
+/// fn foo<T: Clone>(concrete: Option<Box<i32>>, param: T, mixed: Option<T>) {
+/// // Case A: ImplSource points at a specific impl. Only possible when
+/// // type is concretely known. If the impl itself has bounded
+/// // type parameters, ImplSource will carry resolutions for those as well:
+/// concrete.clone(); // ImplSource(Impl_1, [ImplSource(Impl_2, [ImplSource(Impl_3)])])
+///
+/// // Case B: ImplSource must be provided by caller. This applies when
+/// // type is a type parameter.
+/// param.clone(); // ImplSource::Param
+///
+/// // Case C: A mix of cases A and B.
+/// mixed.clone(); // ImplSource(Impl_1, [ImplSource::Param])
+/// }
+/// ```
+///
+/// ### The type parameter `N`
+///
+/// See explanation on `ImplSourceUserDefinedData`.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ImplSource<'db, N> {
+ /// ImplSource identifying a particular impl.
+ UserDefined(ImplSourceUserDefinedData<'db, N>),
+
+ /// Successful resolution to an obligation provided by the caller
+ /// for some type parameter. The `Vec<N>` represents the
+ /// obligations incurred from normalizing the where-clause (if
+ /// any).
+ Param(Vec<N>),
+
+ /// Successful resolution for a builtin impl.
+ Builtin(BuiltinImplSource, Vec<N>),
+}
+
+impl<'db, N> ImplSource<'db, N> {
+ pub fn nested_obligations(self) -> Vec<N> {
+ match self {
+ ImplSource::UserDefined(i) => i.nested,
+ ImplSource::Param(n) | ImplSource::Builtin(_, n) => n,
+ }
+ }
+
+ pub fn borrow_nested_obligations(&self) -> &[N] {
+ match self {
+ ImplSource::UserDefined(i) => &i.nested,
+ ImplSource::Param(n) | ImplSource::Builtin(_, n) => n,
+ }
+ }
+
+ pub fn borrow_nested_obligations_mut(&mut self) -> &mut [N] {
+ match self {
+ ImplSource::UserDefined(i) => &mut i.nested,
+ ImplSource::Param(n) | ImplSource::Builtin(_, n) => n,
+ }
+ }
+
+ pub fn map<M, F>(self, f: F) -> ImplSource<'db, M>
+ where
+ F: FnMut(N) -> M,
+ {
+ match self {
+ ImplSource::UserDefined(i) => ImplSource::UserDefined(ImplSourceUserDefinedData {
+ impl_def_id: i.impl_def_id,
+ args: i.args,
+ nested: i.nested.into_iter().map(f).collect(),
+ }),
+ ImplSource::Param(n) => ImplSource::Param(n.into_iter().map(f).collect()),
+ ImplSource::Builtin(source, n) => {
+ ImplSource::Builtin(source, n.into_iter().map(f).collect())
+ }
+ }
+ }
+}
+
+/// Identifies a particular impl in the source, along with a set of
+/// generic parameters from the impl's type/lifetime parameters. The
+/// `nested` vector corresponds to the nested obligations attached to
+/// the impl's type parameters.
+///
+/// The type parameter `N` indicates the type used for "nested
+/// obligations" that are required by the impl. During type-check, this
+/// is `Obligation`, as one might expect. During codegen, however, this
+/// is `()`, because codegen only requires a shallow resolution of an
+/// impl, and nested obligations are satisfied later.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ImplSourceUserDefinedData<'db, N> {
+ pub impl_def_id: ImplId,
+ pub args: GenericArgs<'db>,
+ pub nested: Vec<N>,
+}
+
+pub type Selection<'db> = ImplSource<'db, PredicateObligation<'db>>;
+
+impl<'db> InferCtxt<'db> {
+ pub(crate) fn select(
+ &self,
+ obligation: &TraitObligation<'db>,
+ ) -> SelectionResult<'db, Selection<'db>> {
+ self.visit_proof_tree(
+ Goal::new(self.interner, obligation.param_env, obligation.predicate),
+ &mut Select {},
+ )
+ .break_value()
+ .unwrap()
+ }
+}
+
+struct Select {}
+
+impl<'db> ProofTreeVisitor<'db> for Select {
+ type Result = ControlFlow<SelectionResult<'db, Selection<'db>>>;
+
+ fn visit_goal(&mut self, goal: &InspectGoal<'_, 'db>) -> Self::Result {
+ let mut candidates = goal.candidates();
+ candidates.retain(|cand| cand.result().is_ok());
+
+ // No candidates -- not implemented.
+ if candidates.is_empty() {
+ return ControlFlow::Break(Err(SelectionError::Unimplemented));
+ }
+
+ // One candidate, no need to winnow.
+ if candidates.len() == 1 {
+ return ControlFlow::Break(Ok(to_selection(candidates.into_iter().next().unwrap())));
+ }
+
+ // Don't winnow until `Certainty::Yes` -- we don't need to winnow until
+ // codegen, and only on the good path.
+ if matches!(goal.result().unwrap(), Certainty::Maybe { .. }) {
+ return ControlFlow::Break(Ok(None));
+ }
+
+ // We need to winnow. See comments on `candidate_should_be_dropped_in_favor_of`.
+ let mut i = 0;
+ while i < candidates.len() {
+ let should_drop_i = (0..candidates.len())
+ .filter(|&j| i != j)
+ .any(|j| candidate_should_be_dropped_in_favor_of(&candidates[i], &candidates[j]));
+ if should_drop_i {
+ candidates.swap_remove(i);
+ } else {
+ i += 1;
+ if i > 1 {
+ return ControlFlow::Break(Ok(None));
+ }
+ }
+ }
+
+ ControlFlow::Break(Ok(to_selection(candidates.into_iter().next().unwrap())))
+ }
+}
+
+/// This is a lot more limited than the old solver's equivalent method. This may lead to more `Ok(None)`
+/// results when selecting traits in polymorphic contexts, but we should never rely on the lack of ambiguity,
+/// and should always just gracefully fail here. We shouldn't rely on this incompleteness.
+fn candidate_should_be_dropped_in_favor_of<'db>(
+ victim: &InspectCandidate<'_, 'db>,
+ other: &InspectCandidate<'_, 'db>,
+) -> bool {
+ // Don't winnow until `Certainty::Yes` -- we don't need to winnow until
+ // codegen, and only on the good path.
+ if matches!(other.result().unwrap(), Certainty::Maybe { .. }) {
+ return false;
+ }
+
+ let ProbeKind::TraitCandidate { source: victim_source, result: _ } = victim.kind() else {
+ return false;
+ };
+ let ProbeKind::TraitCandidate { source: other_source, result: _ } = other.kind() else {
+ return false;
+ };
+
+ match (victim_source, other_source) {
+ (_, CandidateSource::CoherenceUnknowable) | (CandidateSource::CoherenceUnknowable, _) => {
+ panic!("should not have assembled a CoherenceUnknowable candidate")
+ }
+
+ // In the old trait solver, we arbitrarily choose lower vtable candidates
+ // over higher ones.
+ (
+ CandidateSource::BuiltinImpl(BuiltinImplSource::Object(a)),
+ CandidateSource::BuiltinImpl(BuiltinImplSource::Object(b)),
+ ) => a >= b,
+ (
+ CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting(a)),
+ CandidateSource::BuiltinImpl(BuiltinImplSource::TraitUpcasting(b)),
+ ) => a >= b,
+ // Prefer dyn candidates over non-dyn candidates. This is necessary to
+ // handle the unsoundness between `impl<T: ?Sized> Any for T` and `dyn Any: Any`.
+ (
+ CandidateSource::Impl(_) | CandidateSource::ParamEnv(_) | CandidateSource::AliasBound,
+ CandidateSource::BuiltinImpl(BuiltinImplSource::Object { .. }),
+ ) => true,
+
+ // Prefer specializing candidates over specialized candidates.
+ (CandidateSource::Impl(victim_def_id), CandidateSource::Impl(other_def_id)) => {
+ victim.goal().infcx().interner.impl_specializes(other_def_id, victim_def_id)
+ }
+
+ _ => false,
+ }
+}
+
+fn to_selection<'db>(cand: InspectCandidate<'_, 'db>) -> Option<Selection<'db>> {
+ if let Certainty::Maybe { .. } = cand.shallow_certainty() {
+ return None;
+ }
+
+ let nested = match cand.result().expect("expected positive result") {
+ Certainty::Yes => Vec::new(),
+ Certainty::Maybe { .. } => cand
+ .instantiate_nested_goals()
+ .into_iter()
+ .map(|nested| {
+ Obligation::new(
+ nested.infcx().interner,
+ ObligationCause::dummy(),
+ nested.goal().param_env,
+ nested.goal().predicate,
+ )
+ })
+ .collect(),
+ };
+
+ Some(match cand.kind() {
+ ProbeKind::TraitCandidate { source, result: _ } => match source {
+ CandidateSource::Impl(impl_def_id) => {
+ // FIXME: Remove this in favor of storing this in the tree
+ // For impl candidates, we do the rematch manually to compute the args.
+ ImplSource::UserDefined(ImplSourceUserDefinedData {
+ impl_def_id: impl_def_id.0,
+ args: cand.instantiate_impl_args(),
+ nested,
+ })
+ }
+ CandidateSource::BuiltinImpl(builtin) => ImplSource::Builtin(builtin, nested),
+ CandidateSource::ParamEnv(_) | CandidateSource::AliasBound => ImplSource::Param(nested),
+ CandidateSource::CoherenceUnknowable => {
+ panic!("didn't expect to select an unknowable candidate")
+ }
+ },
+ ProbeKind::NormalizedSelfTyAssembly
+ | ProbeKind::UnsizeAssembly
+ | ProbeKind::ProjectionCompatibility
+ | ProbeKind::OpaqueTypeStorageLookup { result: _ }
+ | ProbeKind::Root { result: _ }
+ | ProbeKind::ShadowedEnvProbing
+ | ProbeKind::RigidAlias { result: _ } => {
+ panic!("didn't expect to assemble trait candidate from {:#?}", cand.kind())
+ }
+ })
+}
diff --git a/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs b/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs
new file mode 100644
index 0000000..7435357
--- /dev/null
+++ b/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs
@@ -0,0 +1,263 @@
+use std::ops::Range;
+
+use ena::{
+ snapshot_vec as sv,
+ unify::{self as ut, UnifyKey},
+};
+use rustc_type_ir::{
+ ConstVid, FloatVid, IntVid, RegionKind, RegionVid, TyVid, TypeFoldable, TypeFolder,
+ TypeSuperFoldable, TypeVisitableExt, inherent::IntoKind,
+};
+
+use crate::next_solver::{
+ Const, ConstKind, DbInterner, Region, Ty, TyKind,
+ infer::{
+ InferCtxt, UnificationTable, iter_idx_range,
+ snapshot::VariableLengths,
+ type_variable::TypeVariableOrigin,
+ unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey},
+ },
+};
+
+fn vars_since_snapshot<'db, T>(
+ table: &UnificationTable<'_, 'db, T>,
+ snapshot_var_len: usize,
+) -> Range<T>
+where
+ T: UnifyKey,
+ super::UndoLog<'db>: From<sv::UndoLog<ut::Delegate<T>>>,
+{
+ T::from_index(snapshot_var_len as u32)..T::from_index(table.len() as u32)
+}
+
+fn const_vars_since_snapshot<'db>(
+ table: &mut UnificationTable<'_, 'db, ConstVidKey<'db>>,
+ snapshot_var_len: usize,
+) -> (Range<ConstVid>, Vec<ConstVariableOrigin>) {
+ let range = vars_since_snapshot(table, snapshot_var_len);
+ let range = range.start.vid..range.end.vid;
+
+ (
+ range.clone(),
+ iter_idx_range(range)
+ .map(|index| match table.probe_value(index) {
+ ConstVariableValue::Known { value: _ } => {
+ ConstVariableOrigin { param_def_id: None }
+ }
+ ConstVariableValue::Unknown { origin, universe: _ } => origin,
+ })
+ .collect(),
+ )
+}
+
+impl<'db> InferCtxt<'db> {
+ /// This rather funky routine is used while processing expected
+ /// types. What happens here is that we want to propagate a
+ /// coercion through the return type of a fn to its
+ /// argument. Consider the type of `Option::Some`, which is
+ /// basically `for<T> fn(T) -> Option<T>`. So if we have an
+ /// expression `Some(&[1, 2, 3])`, and that has the expected type
+ /// `Option<&[u32]>`, we would like to type check `&[1, 2, 3]`
+ /// with the expectation of `&[u32]`. This will cause us to coerce
+ /// from `&[u32; 3]` to `&[u32]` and make the users life more
+ /// pleasant.
+ ///
+ /// The way we do this is using `fudge_inference_if_ok`. What the
+ /// routine actually does is to start a snapshot and execute the
+ /// closure `f`. In our example above, what this closure will do
+ /// is to unify the expectation (`Option<&[u32]>`) with the actual
+ /// return type (`Option<?T>`, where `?T` represents the variable
+ /// instantiated for `T`). This will cause `?T` to be unified
+ /// with `&?a [u32]`, where `?a` is a fresh lifetime variable. The
+ /// input type (`?T`) is then returned by `f()`.
+ ///
+ /// At this point, `fudge_inference_if_ok` will normalize all type
+ /// variables, converting `?T` to `&?a [u32]` and end the
+ /// snapshot. The problem is that we can't just return this type
+ /// out, because it references the region variable `?a`, and that
+ /// region variable was popped when we popped the snapshot.
+ ///
+ /// So what we do is to keep a list (`region_vars`, in the code below)
+ /// of region variables created during the snapshot (here, `?a`). We
+ /// fold the return value and replace any such regions with a *new*
+ /// region variable (e.g., `?b`) and return the result (`&?b [u32]`).
+ /// This can then be used as the expectation for the fn argument.
+ ///
+ /// The important point here is that, for soundness purposes, the
+ /// regions in question are not particularly important. We will
+ /// use the expected types to guide coercions, but we will still
+ /// type-check the resulting types from those coercions against
+ /// the actual types (`?T`, `Option<?T>`) -- and remember that
+ /// after the snapshot is popped, the variable `?T` is no longer
+ /// unified.
+ pub fn fudge_inference_if_ok<T, E, F>(&self, f: F) -> Result<T, E>
+ where
+ F: FnOnce() -> Result<T, E>,
+ T: TypeFoldable<DbInterner<'db>>,
+ {
+ let variable_lengths = self.variable_lengths();
+ let (snapshot_vars, value) = self.probe(|_| {
+ let value = f()?;
+ // At this point, `value` could in principle refer
+ // to inference variables that have been created during
+ // the snapshot. Once we exit `probe()`, those are
+ // going to be popped, so we will have to
+ // eliminate any references to them.
+ let snapshot_vars = SnapshotVarData::new(self, variable_lengths);
+ Ok((snapshot_vars, self.resolve_vars_if_possible(value)))
+ })?;
+
+ // At this point, we need to replace any of the now-popped
+ // type/region variables that appear in `value` with a fresh
+ // variable of the appropriate kind. We can't do this during
+ // the probe because they would just get popped then too. =)
+ Ok(self.fudge_inference(snapshot_vars, value))
+ }
+
+ fn fudge_inference<T: TypeFoldable<DbInterner<'db>>>(
+ &self,
+ snapshot_vars: SnapshotVarData,
+ value: T,
+ ) -> T {
+ // Micro-optimization: if no variables have been created, then
+ // `value` can't refer to any of them. =) So we can just return it.
+ if snapshot_vars.is_empty() {
+ value
+ } else {
+ value.fold_with(&mut InferenceFudger { infcx: self, snapshot_vars })
+ }
+ }
+}
+
+struct SnapshotVarData {
+ region_vars: Range<RegionVid>,
+ type_vars: (Range<TyVid>, Vec<TypeVariableOrigin>),
+ int_vars: Range<IntVid>,
+ float_vars: Range<FloatVid>,
+ const_vars: (Range<ConstVid>, Vec<ConstVariableOrigin>),
+}
+
+impl SnapshotVarData {
+ fn new(infcx: &InferCtxt<'_>, vars_pre_snapshot: VariableLengths) -> SnapshotVarData {
+ let mut inner = infcx.inner.borrow_mut();
+ let region_vars = inner
+ .unwrap_region_constraints()
+ .vars_since_snapshot(vars_pre_snapshot.region_constraints_len);
+ let type_vars = inner.type_variables().vars_since_snapshot(vars_pre_snapshot.type_var_len);
+ let int_vars =
+ vars_since_snapshot(&inner.int_unification_table(), vars_pre_snapshot.int_var_len);
+ let float_vars =
+ vars_since_snapshot(&inner.float_unification_table(), vars_pre_snapshot.float_var_len);
+
+ let const_vars = const_vars_since_snapshot(
+ &mut inner.const_unification_table(),
+ vars_pre_snapshot.const_var_len,
+ );
+ SnapshotVarData { region_vars, type_vars, int_vars, float_vars, const_vars }
+ }
+
+ fn is_empty(&self) -> bool {
+ let SnapshotVarData { region_vars, type_vars, int_vars, float_vars, const_vars } = self;
+ region_vars.is_empty()
+ && type_vars.0.is_empty()
+ && int_vars.is_empty()
+ && float_vars.is_empty()
+ && const_vars.0.is_empty()
+ }
+}
+
+struct InferenceFudger<'a, 'db> {
+ infcx: &'a InferCtxt<'db>,
+ snapshot_vars: SnapshotVarData,
+}
+
+impl<'a, 'db> TypeFolder<DbInterner<'db>> for InferenceFudger<'a, 'db> {
+ fn cx(&self) -> DbInterner<'db> {
+ self.infcx.interner
+ }
+
+ fn fold_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
+ if let TyKind::Infer(infer_ty) = ty.kind() {
+ match infer_ty {
+ rustc_type_ir::TyVar(vid) => {
+ if self.snapshot_vars.type_vars.0.contains(&vid) {
+ // This variable was created during the fudging.
+ // Recreate it with a fresh variable here.
+ let idx = vid.as_usize() - self.snapshot_vars.type_vars.0.start.as_usize();
+ let origin = self.snapshot_vars.type_vars.1[idx];
+ self.infcx.next_ty_var_with_origin(origin)
+ } else {
+ // This variable was created before the
+ // "fudging". Since we refresh all type
+ // variables to their binding anyhow, we know
+ // that it is unbound, so we can just return
+ // it.
+ debug_assert!(
+ self.infcx.inner.borrow_mut().type_variables().probe(vid).is_unknown()
+ );
+ ty
+ }
+ }
+ rustc_type_ir::IntVar(vid) => {
+ if self.snapshot_vars.int_vars.contains(&vid) {
+ self.infcx.next_int_var()
+ } else {
+ ty
+ }
+ }
+ rustc_type_ir::FloatVar(vid) => {
+ if self.snapshot_vars.float_vars.contains(&vid) {
+ self.infcx.next_float_var()
+ } else {
+ ty
+ }
+ }
+ rustc_type_ir::FreshTy(_)
+ | rustc_type_ir::FreshIntTy(_)
+ | rustc_type_ir::FreshFloatTy(_) => {
+ unreachable!("unexpected fresh infcx var")
+ }
+ }
+ } else if ty.has_infer() {
+ ty.super_fold_with(self)
+ } else {
+ ty
+ }
+ }
+
+ fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
+ if let RegionKind::ReVar(vid) = r.kind() {
+ if self.snapshot_vars.region_vars.contains(&vid) {
+ let idx = vid.index() - self.snapshot_vars.region_vars.start.index();
+ self.infcx.next_region_var()
+ } else {
+ r
+ }
+ } else {
+ r
+ }
+ }
+
+ fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> {
+ if let ConstKind::Infer(infer_ct) = ct.kind() {
+ match infer_ct {
+ rustc_type_ir::InferConst::Var(vid) => {
+ if self.snapshot_vars.const_vars.0.contains(&vid) {
+ let idx = vid.index() - self.snapshot_vars.const_vars.0.start.index();
+ let origin = self.snapshot_vars.const_vars.1[idx];
+ self.infcx.next_const_var_with_origin(origin)
+ } else {
+ ct
+ }
+ }
+ rustc_type_ir::InferConst::Fresh(_) => {
+ unreachable!("unexpected fresh infcx var")
+ }
+ }
+ } else if ct.has_infer() {
+ ct.super_fold_with(self)
+ } else {
+ ct
+ }
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/infer/snapshot/mod.rs b/crates/hir-ty/src/next_solver/infer/snapshot/mod.rs
index 8c7dfb2..7b9ca96 100644
--- a/crates/hir-ty/src/next_solver/infer/snapshot/mod.rs
+++ b/crates/hir-ty/src/next_solver/infer/snapshot/mod.rs
@@ -7,6 +7,7 @@
use super::InferCtxt;
use super::region_constraints::RegionSnapshot;
+mod fudge;
pub(crate) mod undo_log;
use undo_log::{Snapshot, UndoLog};
diff --git a/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs b/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs
index 28ae56f..05a1013 100644
--- a/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs
+++ b/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs
@@ -178,6 +178,10 @@
})
}
+ pub(crate) fn opaque_types_in_snapshot(&self, s: &Snapshot) -> bool {
+ self.logs[s.undo_len..].iter().any(|log| matches!(log, UndoLog::OpaqueTypes(..)))
+ }
+
fn assert_open_snapshot(&self, snapshot: &Snapshot) {
// Failures here may indicate a failure to follow a stack discipline.
assert!(self.logs.len() >= snapshot.undo_len);
diff --git a/crates/hir-ty/src/next_solver/infer/traits.rs b/crates/hir-ty/src/next_solver/infer/traits.rs
index f1df806..68aa12d 100644
--- a/crates/hir-ty/src/next_solver/infer/traits.rs
+++ b/crates/hir-ty/src/next_solver/infer/traits.rs
@@ -7,14 +7,16 @@
hash::{Hash, Hasher},
};
+use rustc_type_ir::elaborate::Elaboratable;
use rustc_type_ir::{
PredicatePolarity, Upcast,
solve::{Certainty, NoSolution},
};
+use rustc_type_ir::{TypeFoldable, TypeVisitable};
use crate::next_solver::{
- Binder, DbInterner, Goal, ParamEnv, PolyTraitPredicate, Predicate, SolverDefId, TraitPredicate,
- Ty,
+ Binder, Clause, DbInterner, Goal, ParamEnv, PolyTraitPredicate, Predicate, SolverDefId, Span,
+ TraitPredicate, Ty,
};
use super::InferCtxt;
@@ -29,24 +31,29 @@
/// only live for a short period of time.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ObligationCause {
- /// The ID of the fn body that triggered this obligation. This is
- /// used for region obligations to determine the precise
- /// environment in which the region obligation should be evaluated
- /// (in particular, closures can add new assumptions). See the
- /// field `region_obligations` of the `FulfillmentContext` for more
- /// information.
- pub body_id: Option<SolverDefId>,
+ // FIXME: This should contain an `ExprId`/`PatId` etc., and a cause code. But for now we
+ // don't report trait solving diagnostics, so this is irrelevant.
+ _private: (),
}
impl ObligationCause {
+ #[expect(
+ clippy::new_without_default,
+ reason = "`new` is temporary, eventually we will provide span etc. here"
+ )]
#[inline]
- pub fn new(body_id: SolverDefId) -> ObligationCause {
- ObligationCause { body_id: Some(body_id) }
+ pub fn new() -> ObligationCause {
+ ObligationCause { _private: () }
}
- #[inline(always)]
+ #[inline]
pub fn dummy() -> ObligationCause {
- ObligationCause { body_id: None }
+ ObligationCause::new()
+ }
+
+ #[inline]
+ pub fn misc() -> ObligationCause {
+ ObligationCause::new()
}
}
@@ -75,6 +82,72 @@
pub recursion_depth: usize,
}
+/// For [`Obligation`], a sub-obligation is combined with the current obligation's
+/// param-env and cause code.
+impl<'db> Elaboratable<DbInterner<'db>> for PredicateObligation<'db> {
+ fn predicate(&self) -> Predicate<'db> {
+ self.predicate
+ }
+
+ fn child(&self, clause: Clause<'db>) -> Self {
+ Obligation {
+ cause: self.cause.clone(),
+ param_env: self.param_env,
+ recursion_depth: 0,
+ predicate: clause.as_predicate(),
+ }
+ }
+
+ fn child_with_derived_cause(
+ &self,
+ clause: Clause<'db>,
+ span: Span,
+ parent_trait_pred: PolyTraitPredicate<'db>,
+ index: usize,
+ ) -> Self {
+ let cause = ObligationCause::new();
+ Obligation {
+ cause,
+ param_env: self.param_env,
+ recursion_depth: 0,
+ predicate: clause.as_predicate(),
+ }
+ }
+}
+
+impl<'db, T: TypeVisitable<DbInterner<'db>>> TypeVisitable<DbInterner<'db>> for Obligation<'db, T> {
+ fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
+ &self,
+ visitor: &mut V,
+ ) -> V::Result {
+ rustc_ast_ir::try_visit!(self.param_env.visit_with(visitor));
+ self.predicate.visit_with(visitor)
+ }
+}
+
+impl<'db, T: TypeFoldable<DbInterner<'db>>> TypeFoldable<DbInterner<'db>> for Obligation<'db, T> {
+ fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
+ self,
+ folder: &mut F,
+ ) -> Result<Self, F::Error> {
+ Ok(Obligation {
+ cause: self.cause.clone(),
+ param_env: self.param_env.try_fold_with(folder)?,
+ predicate: self.predicate.try_fold_with(folder)?,
+ recursion_depth: self.recursion_depth,
+ })
+ }
+
+ fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
+ Obligation {
+ cause: self.cause.clone(),
+ param_env: self.param_env.fold_with(folder),
+ predicate: self.predicate.fold_with(folder),
+ recursion_depth: self.recursion_depth,
+ }
+ }
+}
+
impl<'db, T: Copy> Obligation<'db, T> {
pub fn as_goal(&self) -> Goal<'db, T> {
Goal { param_env: self.param_env, predicate: self.predicate }
@@ -156,15 +229,6 @@
Obligation { cause, param_env, recursion_depth, predicate }
}
- pub fn misc(
- tcx: DbInterner<'db>,
- body_id: SolverDefId,
- param_env: ParamEnv<'db>,
- trait_ref: impl Upcast<DbInterner<'db>, O>,
- ) -> Obligation<'db, O> {
- Obligation::new(tcx, ObligationCause::new(body_id), param_env, trait_ref)
- }
-
pub fn with<P>(
&self,
tcx: DbInterner<'db>,
diff --git a/crates/hir-ty/src/next_solver/infer/type_variable.rs b/crates/hir-ty/src/next_solver/infer/type_variable.rs
index b640039..29e7b88 100644
--- a/crates/hir-ty/src/next_solver/infer/type_variable.rs
+++ b/crates/hir-ty/src/next_solver/infer/type_variable.rs
@@ -15,7 +15,7 @@
use crate::next_solver::SolverDefId;
use crate::next_solver::Ty;
-use crate::next_solver::infer::InferCtxtUndoLogs;
+use crate::next_solver::infer::{InferCtxtUndoLogs, iter_idx_range};
/// Represents a single undo-able action that affects a type inference variable.
#[derive(Clone)]
@@ -59,7 +59,7 @@
}
}
-#[derive(Clone, Default)]
+#[derive(Debug, Clone, Default)]
pub(crate) struct TypeVariableStorage<'db> {
/// The origins of each type variable.
values: IndexVec<TyVid, TypeVariableData>,
@@ -102,7 +102,7 @@
pub param_def_id: Option<SolverDefId>,
}
-#[derive(Clone)]
+#[derive(Debug, Clone)]
pub(crate) struct TypeVariableData {
origin: TypeVariableOrigin,
}
@@ -267,6 +267,15 @@
self.storage.sub_unification_table.with_log(self.undo_log)
}
+ /// Returns a range of the type variables created during the snapshot.
+ pub(crate) fn vars_since_snapshot(
+ &mut self,
+ value_count: usize,
+ ) -> (Range<TyVid>, Vec<TypeVariableOrigin>) {
+ let range = TyVid::from_usize(value_count)..TyVid::from_usize(self.num_vars());
+ (range.clone(), iter_idx_range(range).map(|index| self.var_origin(index)).collect())
+ }
+
/// Returns indices of all variables that are not yet
/// instantiated.
pub(crate) fn unresolved_variables(&mut self) -> Vec<TyVid> {
diff --git a/crates/hir-ty/src/next_solver/inspect.rs b/crates/hir-ty/src/next_solver/inspect.rs
new file mode 100644
index 0000000..bc19d51
--- /dev/null
+++ b/crates/hir-ty/src/next_solver/inspect.rs
@@ -0,0 +1,499 @@
+pub use rustc_next_trait_solver::solve::inspect::*;
+
+use rustc_ast_ir::try_visit;
+use rustc_next_trait_solver::{
+ canonical::instantiate_canonical_state,
+ resolve::eager_resolve_vars,
+ solve::{SolverDelegateEvalExt, inspect},
+};
+use rustc_type_ir::{
+ VisitorResult,
+ inherent::{IntoKind, Span as _},
+ solve::{Certainty, GoalSource, MaybeCause, NoSolution},
+};
+
+use crate::next_solver::{
+ DbInterner, GenericArg, GenericArgs, Goal, NormalizesTo, ParamEnv, Predicate, PredicateKind,
+ QueryResult, SolverContext, Span, Term,
+ fulfill::NextSolverError,
+ infer::{
+ InferCtxt,
+ traits::{Obligation, ObligationCause},
+ },
+ obligation_ctxt::ObligationCtxt,
+};
+
+pub struct InspectConfig {
+ pub max_depth: usize,
+}
+
+pub struct InspectGoal<'a, 'db> {
+ infcx: &'a SolverContext<'db>,
+ depth: usize,
+ orig_values: Vec<GenericArg<'db>>,
+ goal: Goal<'db, Predicate<'db>>,
+ result: Result<Certainty, NoSolution>,
+ final_revision: inspect::Probe<DbInterner<'db>>,
+ normalizes_to_term_hack: Option<NormalizesToTermHack<'db>>,
+ source: GoalSource,
+}
+
+impl<'a, 'db> std::fmt::Debug for InspectGoal<'a, 'db> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("InspectGoal")
+ .field("depth", &self.depth)
+ .field("orig_values", &self.orig_values)
+ .field("goal", &self.goal)
+ .field("result", &self.result)
+ .field("final_revision", &self.final_revision)
+ .field("normalizes_to_term_hack", &self.normalizes_to_term_hack)
+ .field("source", &self.source)
+ .finish()
+ }
+}
+
+impl<'a, 'db> std::fmt::Debug for InspectCandidate<'a, 'db> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("InspectCandidate")
+ .field("kind", &self.kind)
+ .field("steps", &self.steps)
+ .field("final_state", &self.final_state)
+ .field("result", &self.result)
+ .field("shallow_certainty", &self.shallow_certainty)
+ .finish()
+ }
+}
+
+/// The expected term of a `NormalizesTo` goal gets replaced
+/// with an unconstrained inference variable when computing
+/// `NormalizesTo` goals and we return the nested goals to the
+/// caller, who also equates the actual term with the expected.
+///
+/// This is an implementation detail of the trait solver and
+/// not something we want to leak to users. We therefore
+/// treat `NormalizesTo` goals as if they apply the expected
+/// type at the end of each candidate.
+#[derive(Debug, Copy, Clone)]
+struct NormalizesToTermHack<'db> {
+ term: Term<'db>,
+ unconstrained_term: Term<'db>,
+}
+
+impl<'db> NormalizesToTermHack<'db> {
+ /// Relate the `term` with the new `unconstrained_term` created
+ /// when computing the proof tree for this `NormalizesTo` goals.
+ /// This handles nested obligations.
+ fn constrain_and(
+ &self,
+ infcx: &InferCtxt<'db>,
+ param_env: ParamEnv<'db>,
+ f: impl FnOnce(&mut ObligationCtxt<'_, 'db>),
+ ) -> Result<Certainty, NoSolution> {
+ let mut ocx = ObligationCtxt::new(infcx);
+ ocx.eq(&ObligationCause::dummy(), param_env, self.term, self.unconstrained_term)?;
+ f(&mut ocx);
+ let errors = ocx.select_all_or_error();
+ if errors.is_empty() {
+ Ok(Certainty::Yes)
+ } else if errors.iter().all(|e| !matches!(e, NextSolverError::TrueError(_))) {
+ Ok(Certainty::AMBIGUOUS)
+ } else {
+ Err(NoSolution)
+ }
+ }
+}
+
+pub struct InspectCandidate<'a, 'db> {
+ goal: &'a InspectGoal<'a, 'db>,
+ kind: inspect::ProbeKind<DbInterner<'db>>,
+ steps: Vec<&'a inspect::ProbeStep<DbInterner<'db>>>,
+ final_state: inspect::CanonicalState<DbInterner<'db>, ()>,
+ result: QueryResult<'db>,
+ shallow_certainty: Certainty,
+}
+
+impl<'a, 'db> InspectCandidate<'a, 'db> {
+ pub fn kind(&self) -> inspect::ProbeKind<DbInterner<'db>> {
+ self.kind
+ }
+
+ pub fn result(&self) -> Result<Certainty, NoSolution> {
+ self.result.map(|c| c.value.certainty)
+ }
+
+ pub fn goal(&self) -> &'a InspectGoal<'a, 'db> {
+ self.goal
+ }
+
+ /// Certainty passed into `evaluate_added_goals_and_make_canonical_response`.
+ ///
+ /// If this certainty is `Yes`, then we must be confident that the candidate
+ /// must hold iff it's nested goals hold. This is not true if the certainty is
+ /// `Maybe(..)`, which suggests we forced ambiguity instead.
+ ///
+ /// This is *not* the certainty of the candidate's full nested evaluation, which
+ /// can be accessed with [`Self::result`] instead.
+ pub fn shallow_certainty(&self) -> Certainty {
+ self.shallow_certainty
+ }
+
+ /// Visit all nested goals of this candidate without rolling
+ /// back their inference constraints. This function modifies
+ /// the state of the `infcx`.
+ pub fn visit_nested_no_probe<V: ProofTreeVisitor<'db>>(&self, visitor: &mut V) -> V::Result {
+ for goal in self.instantiate_nested_goals() {
+ try_visit!(goal.visit_with(visitor));
+ }
+
+ V::Result::output()
+ }
+
+ /// Instantiate the nested goals for the candidate without rolling back their
+ /// inference constraints. This function modifies the state of the `infcx`.
+ ///
+ /// See [`Self::instantiate_impl_args`] if you need the impl args too.
+ pub fn instantiate_nested_goals(&self) -> Vec<InspectGoal<'a, 'db>> {
+ let infcx = self.goal.infcx;
+ let param_env = self.goal.goal.param_env;
+ let mut orig_values = self.goal.orig_values.to_vec();
+
+ let mut instantiated_goals = vec![];
+ for step in &self.steps {
+ match **step {
+ inspect::ProbeStep::AddGoal(source, goal) => instantiated_goals.push((
+ source,
+ instantiate_canonical_state(
+ infcx,
+ Span::dummy(),
+ param_env,
+ &mut orig_values,
+ goal,
+ ),
+ )),
+ inspect::ProbeStep::RecordImplArgs { .. } => {}
+ inspect::ProbeStep::MakeCanonicalResponse { .. }
+ | inspect::ProbeStep::NestedProbe(_) => unreachable!(),
+ }
+ }
+
+ let () = instantiate_canonical_state(
+ infcx,
+ Span::dummy(),
+ param_env,
+ &mut orig_values,
+ self.final_state,
+ );
+
+ if let Some(term_hack) = &self.goal.normalizes_to_term_hack {
+ // FIXME: We ignore the expected term of `NormalizesTo` goals
+ // when computing the result of its candidates. This is
+ // scuffed.
+ let _ = term_hack.constrain_and(infcx, param_env, |_| {});
+ }
+
+ instantiated_goals
+ .into_iter()
+ .map(|(source, goal)| self.instantiate_proof_tree_for_nested_goal(source, goal))
+ .collect()
+ }
+
+ /// Instantiate the args of an impl if this candidate came from a
+ /// `CandidateSource::Impl`. This function modifies the state of the
+ /// `infcx`.
+ pub fn instantiate_impl_args(&self) -> GenericArgs<'db> {
+ let infcx = self.goal.infcx;
+ let param_env = self.goal.goal.param_env;
+ let mut orig_values = self.goal.orig_values.to_vec();
+
+ for step in &self.steps {
+ match **step {
+ inspect::ProbeStep::RecordImplArgs { impl_args } => {
+ let impl_args = instantiate_canonical_state(
+ infcx,
+ Span::dummy(),
+ param_env,
+ &mut orig_values,
+ impl_args,
+ );
+
+ let () = instantiate_canonical_state(
+ infcx,
+ Span::dummy(),
+ param_env,
+ &mut orig_values,
+ self.final_state,
+ );
+
+ // No reason we couldn't support this, but we don't need to for select.
+ assert!(
+ self.goal.normalizes_to_term_hack.is_none(),
+ "cannot use `instantiate_impl_args` with a `NormalizesTo` goal"
+ );
+
+ return eager_resolve_vars(infcx, impl_args);
+ }
+ inspect::ProbeStep::AddGoal(..) => {}
+ inspect::ProbeStep::MakeCanonicalResponse { .. }
+ | inspect::ProbeStep::NestedProbe(_) => unreachable!(),
+ }
+ }
+
+ panic!("expected impl args probe step for `instantiate_impl_args`");
+ }
+
+ pub fn instantiate_proof_tree_for_nested_goal(
+ &self,
+ source: GoalSource,
+ goal: Goal<'db, Predicate<'db>>,
+ ) -> InspectGoal<'a, 'db> {
+ let infcx = self.goal.infcx;
+ match goal.predicate.kind().no_bound_vars() {
+ Some(PredicateKind::NormalizesTo(NormalizesTo { alias, term })) => {
+ let unconstrained_term = infcx.next_term_var_of_kind(term);
+ let goal =
+ goal.with(infcx.interner, NormalizesTo { alias, term: unconstrained_term });
+ // We have to use a `probe` here as evaluating a `NormalizesTo` can constrain the
+ // expected term. This means that candidates which only fail due to nested goals
+ // and which normalize to a different term then the final result could ICE: when
+ // building their proof tree, the expected term was unconstrained, but when
+ // instantiating the candidate it is already constrained to the result of another
+ // candidate.
+ let normalizes_to_term_hack = NormalizesToTermHack { term, unconstrained_term };
+ let (proof_tree, nested_goals_result) = infcx.probe(|_| {
+ // Here, if we have any nested goals, then we make sure to apply them
+ // considering the constrained RHS, and pass the resulting certainty to
+ // `InspectGoal::new` so that the goal has the right result (and maintains
+ // the impression that we don't do this normalizes-to infer hack at all).
+ let (nested, proof_tree) =
+ infcx.evaluate_root_goal_for_proof_tree(goal, Span::dummy());
+ let nested_goals_result = nested.and_then(|nested| {
+ normalizes_to_term_hack.constrain_and(
+ infcx,
+ proof_tree.uncanonicalized_goal.param_env,
+ |ocx| {
+ ocx.register_obligations(nested.0.into_iter().map(|(_, goal)| {
+ Obligation::new(
+ infcx.interner,
+ ObligationCause::dummy(),
+ goal.param_env,
+ goal.predicate,
+ )
+ }));
+ },
+ )
+ });
+ (proof_tree, nested_goals_result)
+ });
+ InspectGoal::new(
+ infcx,
+ self.goal.depth + 1,
+ proof_tree,
+ Some((normalizes_to_term_hack, nested_goals_result)),
+ source,
+ )
+ }
+ _ => {
+ // We're using a probe here as evaluating a goal could constrain
+ // inference variables by choosing one candidate. If we then recurse
+ // into another candidate who ends up with different inference
+ // constraints, we get an ICE if we already applied the constraints
+ // from the chosen candidate.
+ let proof_tree =
+ infcx.probe(|_| infcx.evaluate_root_goal_for_proof_tree(goal, Span::dummy()).1);
+ InspectGoal::new(infcx, self.goal.depth + 1, proof_tree, None, source)
+ }
+ }
+ }
+
+ /// Visit all nested goals of this candidate, rolling back
+ /// all inference constraints.
+ pub fn visit_nested_in_probe<V: ProofTreeVisitor<'db>>(&self, visitor: &mut V) -> V::Result {
+ self.goal.infcx.probe(|_| self.visit_nested_no_probe(visitor))
+ }
+}
+
+impl<'a, 'db> InspectGoal<'a, 'db> {
+ pub fn infcx(&self) -> &'a InferCtxt<'db> {
+ self.infcx
+ }
+
+ pub fn goal(&self) -> Goal<'db, Predicate<'db>> {
+ self.goal
+ }
+
+ pub fn result(&self) -> Result<Certainty, NoSolution> {
+ self.result
+ }
+
+ pub fn source(&self) -> GoalSource {
+ self.source
+ }
+
+ pub fn depth(&self) -> usize {
+ self.depth
+ }
+
+ fn candidates_recur(
+ &'a self,
+ candidates: &mut Vec<InspectCandidate<'a, 'db>>,
+ steps: &mut Vec<&'a inspect::ProbeStep<DbInterner<'db>>>,
+ probe: &'a inspect::Probe<DbInterner<'db>>,
+ ) {
+ let mut shallow_certainty = None;
+ for step in &probe.steps {
+ match *step {
+ inspect::ProbeStep::AddGoal(..) | inspect::ProbeStep::RecordImplArgs { .. } => {
+ steps.push(step)
+ }
+ inspect::ProbeStep::MakeCanonicalResponse { shallow_certainty: c } => {
+ assert!(matches!(
+ shallow_certainty.replace(c),
+ None | Some(Certainty::Maybe { cause: MaybeCause::Ambiguity, .. })
+ ));
+ }
+ inspect::ProbeStep::NestedProbe(ref probe) => {
+ match probe.kind {
+ // These never assemble candidates for the goal we're trying to solve.
+ inspect::ProbeKind::ProjectionCompatibility
+ | inspect::ProbeKind::ShadowedEnvProbing => continue,
+
+ inspect::ProbeKind::NormalizedSelfTyAssembly
+ | inspect::ProbeKind::UnsizeAssembly
+ | inspect::ProbeKind::Root { .. }
+ | inspect::ProbeKind::TraitCandidate { .. }
+ | inspect::ProbeKind::OpaqueTypeStorageLookup { .. }
+ | inspect::ProbeKind::RigidAlias { .. } => {
+ // Nested probes have to prove goals added in their parent
+ // but do not leak them, so we truncate the added goals
+ // afterwards.
+ let num_steps = steps.len();
+ self.candidates_recur(candidates, steps, probe);
+ steps.truncate(num_steps);
+ }
+ }
+ }
+ }
+ }
+
+ match probe.kind {
+ inspect::ProbeKind::ProjectionCompatibility
+ | inspect::ProbeKind::ShadowedEnvProbing => {
+ panic!()
+ }
+
+ inspect::ProbeKind::NormalizedSelfTyAssembly | inspect::ProbeKind::UnsizeAssembly => {}
+
+ // We add a candidate even for the root evaluation if there
+ // is only one way to prove a given goal, e.g. for `WellFormed`.
+ inspect::ProbeKind::Root { result }
+ | inspect::ProbeKind::TraitCandidate { source: _, result }
+ | inspect::ProbeKind::OpaqueTypeStorageLookup { result }
+ | inspect::ProbeKind::RigidAlias { result } => {
+ // We only add a candidate if `shallow_certainty` was set, which means
+ // that we ended up calling `evaluate_added_goals_and_make_canonical_response`.
+ if let Some(shallow_certainty) = shallow_certainty {
+ candidates.push(InspectCandidate {
+ goal: self,
+ kind: probe.kind,
+ steps: steps.clone(),
+ final_state: probe.final_state,
+ shallow_certainty,
+ result,
+ });
+ }
+ }
+ }
+ }
+
+ pub fn candidates(&'a self) -> Vec<InspectCandidate<'a, 'db>> {
+ let mut candidates = vec![];
+ let mut nested_goals = vec![];
+ self.candidates_recur(&mut candidates, &mut nested_goals, &self.final_revision);
+ candidates
+ }
+
+ /// Returns the single candidate applicable for the current goal, if it exists.
+ ///
+ /// Returns `None` if there are either no or multiple applicable candidates.
+ pub fn unique_applicable_candidate(&'a self) -> Option<InspectCandidate<'a, 'db>> {
+ // FIXME(-Znext-solver): This does not handle impl candidates
+ // hidden by env candidates.
+ let mut candidates = self.candidates();
+ candidates.retain(|c| c.result().is_ok());
+ candidates.pop().filter(|_| candidates.is_empty())
+ }
+
+ fn new(
+ infcx: &'a InferCtxt<'db>,
+ depth: usize,
+ root: inspect::GoalEvaluation<DbInterner<'db>>,
+ term_hack_and_nested_certainty: Option<(
+ NormalizesToTermHack<'db>,
+ Result<Certainty, NoSolution>,
+ )>,
+ source: GoalSource,
+ ) -> Self {
+ let infcx = <&SolverContext<'db>>::from(infcx);
+
+ let inspect::GoalEvaluation { uncanonicalized_goal, orig_values, final_revision, result } =
+ root;
+ // If there's a normalizes-to goal, AND the evaluation result with the result of
+ // constraining the normalizes-to RHS and computing the nested goals.
+ let result = result.and_then(|ok| {
+ let nested_goals_certainty =
+ term_hack_and_nested_certainty.map_or(Ok(Certainty::Yes), |(_, c)| c)?;
+ Ok(ok.value.certainty.and(nested_goals_certainty))
+ });
+
+ InspectGoal {
+ infcx,
+ depth,
+ orig_values,
+ goal: eager_resolve_vars(infcx, uncanonicalized_goal),
+ result,
+ final_revision,
+ normalizes_to_term_hack: term_hack_and_nested_certainty.map(|(n, _)| n),
+ source,
+ }
+ }
+
+ pub(crate) fn visit_with<V: ProofTreeVisitor<'db>>(&self, visitor: &mut V) -> V::Result {
+ if self.depth < visitor.config().max_depth {
+ try_visit!(visitor.visit_goal(self));
+ }
+
+ V::Result::output()
+ }
+}
+
+/// The public API to interact with proof trees.
+pub trait ProofTreeVisitor<'db> {
+ type Result: VisitorResult;
+
+ fn config(&self) -> InspectConfig {
+ InspectConfig { max_depth: 10 }
+ }
+
+ fn visit_goal(&mut self, goal: &InspectGoal<'_, 'db>) -> Self::Result;
+}
+
+impl<'db> InferCtxt<'db> {
+ pub(crate) fn visit_proof_tree<V: ProofTreeVisitor<'db>>(
+ &self,
+ goal: Goal<'db, Predicate<'db>>,
+ visitor: &mut V,
+ ) -> V::Result {
+ self.visit_proof_tree_at_depth(goal, 0, visitor)
+ }
+
+ pub(crate) fn visit_proof_tree_at_depth<V: ProofTreeVisitor<'db>>(
+ &self,
+ goal: Goal<'db, Predicate<'db>>,
+ depth: usize,
+ visitor: &mut V,
+ ) -> V::Result {
+ let (_, proof_tree) = <&SolverContext<'db>>::from(self)
+ .evaluate_root_goal_for_proof_tree(goal, Span::dummy());
+ visitor.visit_goal(&InspectGoal::new(self, depth, proof_tree, None, GoalSource::Misc))
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs
index 76d10f7..9cf56be 100644
--- a/crates/hir-ty/src/next_solver/interner.rs
+++ b/crates/hir-ty/src/next_solver/interner.rs
@@ -48,8 +48,8 @@
use crate::next_solver::util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls};
use crate::next_solver::{
AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
- CoroutineIdWrapper, FxIndexMap, ImplIdWrapper, InternedWrapperNoDebug, RegionAssumptions,
- SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper,
+ CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, InternedWrapperNoDebug,
+ RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper,
};
use crate::{ConstScalar, FnAbi, Interner, db::HirDatabase};
@@ -1055,60 +1055,62 @@
}
fn variances_of(self, def_id: Self::DefId) -> Self::VariancesOf {
- match def_id {
- SolverDefId::FunctionId(def_id) => VariancesOf::new_from_iter(
- self,
- self.db()
- .variances_of(hir_def::GenericDefId::FunctionId(def_id))
- .as_deref()
- .unwrap_or_default()
- .iter()
- .map(|v| v.to_nextsolver(self)),
- ),
- SolverDefId::AdtId(def_id) => VariancesOf::new_from_iter(
- self,
- self.db()
- .variances_of(hir_def::GenericDefId::AdtId(def_id))
- .as_deref()
- .unwrap_or_default()
- .iter()
- .map(|v| v.to_nextsolver(self)),
- ),
+ let generic_def = match def_id {
+ SolverDefId::FunctionId(def_id) => def_id.into(),
+ SolverDefId::AdtId(def_id) => def_id.into(),
+ SolverDefId::Ctor(Ctor::Struct(def_id)) => def_id.into(),
+ SolverDefId::Ctor(Ctor::Enum(def_id)) => def_id.loc(self.db).parent.into(),
SolverDefId::InternedOpaqueTyId(_def_id) => {
// FIXME(next-solver): track variances
//
// We compute them based on the only `Ty` level info in rustc,
// move `variances_of_opaque` into `rustc_next_trait_solver` for reuse.
- VariancesOf::new_from_iter(
+ return VariancesOf::new_from_iter(
self,
(0..self.generics_of(def_id).count()).map(|_| Variance::Invariant),
- )
+ );
}
- _ => VariancesOf::new_from_iter(self, []),
- }
+ _ => return VariancesOf::new_from_iter(self, []),
+ };
+ VariancesOf::new_from_iter(
+ self,
+ self.db()
+ .variances_of(generic_def)
+ .as_deref()
+ .unwrap_or_default()
+ .iter()
+ .map(|v| v.to_nextsolver(self)),
+ )
}
fn type_of(self, def_id: Self::DefId) -> EarlyBinder<Self, Self::Ty> {
- let def_id = match def_id {
+ match def_id {
SolverDefId::TypeAliasId(id) => {
use hir_def::Lookup;
match id.lookup(self.db()).container {
ItemContainerId::ImplId(it) => it,
_ => panic!("assoc ty value should be in impl"),
};
- crate::TyDefId::TypeAliasId(id)
+ self.db().ty_ns(id.into())
}
- SolverDefId::AdtId(id) => crate::TyDefId::AdtId(id),
+ SolverDefId::AdtId(id) => self.db().ty_ns(id.into()),
// FIXME(next-solver): This uses the types of `query mir_borrowck` in rustc.
//
// We currently always use the type from HIR typeck which ignores regions. This
// should be fine.
- SolverDefId::InternedOpaqueTyId(_) => {
- return self.type_of_opaque_hir_typeck(def_id);
+ SolverDefId::InternedOpaqueTyId(_) => self.type_of_opaque_hir_typeck(def_id),
+ SolverDefId::FunctionId(id) => self.db.value_ty_ns(id.into()).unwrap(),
+ SolverDefId::Ctor(id) => {
+ let id = match id {
+ Ctor::Struct(id) => id.into(),
+ Ctor::Enum(id) => id.into(),
+ };
+ self.db
+ .value_ty_ns(id)
+ .expect("`SolverDefId::Ctor` should have a function-like ctor")
}
_ => panic!("Unexpected def_id `{def_id:?}` provided for `type_of`"),
- };
- self.db().ty_ns(def_id)
+ }
}
fn adt_def(self, def_id: Self::AdtId) -> Self::AdtDef {
@@ -1672,6 +1674,20 @@
}
}
+ fn for_each_blanket_impl(self, trait_def_id: Self::TraitId, mut f: impl FnMut(Self::ImplId)) {
+ let Some(krate) = self.krate else { return };
+
+ for impls in self.db.trait_impls_in_deps(krate).iter() {
+ for impl_id in impls.for_trait(trait_def_id.0) {
+ let impl_data = self.db.impl_signature(impl_id);
+ let self_ty_ref = &impl_data.store[impl_data.self_ty];
+ if matches!(self_ty_ref, hir_def::type_ref::TypeRef::TypeParam(_)) {
+ f(impl_id.into());
+ }
+ }
+ }
+ }
+
fn has_item_definition(self, def_id: Self::DefId) -> bool {
// FIXME(next-solver): should check if the associated item has a value.
true
@@ -2012,6 +2028,28 @@
) -> T {
self.replace_escaping_bound_vars_uncached(value.skip_binder(), delegate)
}
+
+ pub fn mk_fn_sig<I>(
+ self,
+ inputs: I,
+ output: Ty<'db>,
+ c_variadic: bool,
+ safety: Safety,
+ abi: FnAbi,
+ ) -> FnSig<'db>
+ where
+ I: IntoIterator<Item = Ty<'db>>,
+ {
+ FnSig {
+ inputs_and_output: Tys::new_from_iter(
+ self,
+ inputs.into_iter().chain(std::iter::once(output)),
+ ),
+ c_variadic,
+ safety,
+ abi,
+ }
+ }
}
macro_rules! TrivialTypeTraversalImpls {
diff --git a/crates/hir-ty/src/next_solver/mapping.rs b/crates/hir-ty/src/next_solver/mapping.rs
index eb2cc69..b24b996 100644
--- a/crates/hir-ty/src/next_solver/mapping.rs
+++ b/crates/hir-ty/src/next_solver/mapping.rs
@@ -147,6 +147,24 @@
fn to_chalk(self, interner: DbInterner<'db>) -> Out;
}
+impl NextSolverToChalk<'_, chalk_ir::Mutability> for rustc_ast_ir::Mutability {
+ fn to_chalk(self, interner: DbInterner<'_>) -> chalk_ir::Mutability {
+ match self {
+ rustc_ast_ir::Mutability::Not => chalk_ir::Mutability::Not,
+ rustc_ast_ir::Mutability::Mut => chalk_ir::Mutability::Mut,
+ }
+ }
+}
+
+impl NextSolverToChalk<'_, chalk_ir::Safety> for crate::next_solver::abi::Safety {
+ fn to_chalk(self, interner: DbInterner<'_>) -> chalk_ir::Safety {
+ match self {
+ crate::next_solver::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe,
+ crate::next_solver::abi::Safety::Safe => chalk_ir::Safety::Safe,
+ }
+ }
+}
+
impl<'db> ChalkToNextSolver<'db, Ty<'db>> for chalk_ir::Ty<Interner> {
fn to_nextsolver(&self, interner: DbInterner<'db>) -> Ty<'db> {
Ty::new(
@@ -352,8 +370,7 @@
}),
);
let region = dyn_ty.lifetime.to_nextsolver(interner);
- let kind = rustc_type_ir::DynKind::Dyn;
- rustc_type_ir::TyKind::Dynamic(bounds, region, kind)
+ rustc_type_ir::TyKind::Dynamic(bounds, region)
}
chalk_ir::TyKind::Alias(alias_ty) => match alias_ty {
chalk_ir::AliasTy::Projection(projection_ty) => {
@@ -617,6 +634,15 @@
}
}
+impl<'db> NextSolverToChalk<'db, crate::Substitution> for Tys<'db> {
+ fn to_chalk(self, interner: DbInterner<'db>) -> crate::Substitution {
+ Substitution::from_iter(
+ Interner,
+ self.inner().iter().map(|ty| ty.to_chalk(interner).cast(Interner)),
+ )
+ }
+}
+
impl<'db> ChalkToNextSolver<'db, rustc_type_ir::DebruijnIndex> for chalk_ir::DebruijnIndex {
fn to_nextsolver(&self, _interner: DbInterner<'db>) -> rustc_type_ir::DebruijnIndex {
rustc_type_ir::DebruijnIndex::from_u32(self.depth())
@@ -859,6 +885,16 @@
}
}
+impl<'db> NextSolverToChalk<'db, crate::ProjectionTy> for crate::next_solver::AliasTy<'db> {
+ fn to_chalk(self, interner: DbInterner<'db>) -> crate::ProjectionTy {
+ let SolverDefId::TypeAliasId(assoc_id) = self.def_id else { unreachable!() };
+ crate::ProjectionTy {
+ associated_ty_id: to_assoc_type_id(assoc_id),
+ substitution: self.args.to_chalk(interner),
+ }
+ }
+}
+
impl<'db> ChalkToNextSolver<'db, ParamEnv<'db>> for chalk_ir::Environment<Interner> {
fn to_nextsolver(&self, interner: DbInterner<'db>) -> ParamEnv<'db> {
let clauses = Clauses::new_from_iter(
@@ -1408,8 +1444,7 @@
TyKind::Function(fnptr)
}
- rustc_type_ir::TyKind::Dynamic(preds, region, dyn_kind) => {
- assert!(matches!(dyn_kind, rustc_type_ir::DynKind::Dyn));
+ rustc_type_ir::TyKind::Dynamic(preds, region) => {
let self_ty = Ty::new_bound(
interner,
DebruijnIndex::from_u32(1),
diff --git a/crates/hir-ty/src/next_solver/project/solve_normalize.rs b/crates/hir-ty/src/next_solver/normalize.rs
similarity index 86%
rename from crates/hir-ty/src/next_solver/project/solve_normalize.rs
rename to crates/hir-ty/src/next_solver/normalize.rs
index 42c238f..41cb488 100644
--- a/crates/hir-ty/src/next_solver/project/solve_normalize.rs
+++ b/crates/hir-ty/src/next_solver/normalize.rs
@@ -1,35 +1,23 @@
-//! Normalization within a next-solver infer context.
-
-use std::fmt::Debug;
-
use rustc_next_trait_solver::placeholder::BoundVarReplacer;
use rustc_type_ir::{
AliasRelationDirection, FallibleTypeFolder, Flags, Interner, TermKind, TypeFoldable,
TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
- inherent::{IntoKind, Span as _, Term as _},
+ inherent::{IntoKind, Term as _},
};
+use crate::next_solver::SolverDefId;
use crate::next_solver::{
- Binder, Const, ConstKind, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Span, Term, Ty,
- TyKind, TypingMode,
+ Binder, Const, ConstKind, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Term, Ty,
+ TyKind,
fulfill::{FulfillmentCtxt, NextSolverError},
infer::{
- DbInternerInferExt, InferCtxt,
+ InferCtxt,
at::At,
traits::{Obligation, ObligationCause},
},
util::PlaceholderReplacer,
};
-pub fn normalize<'db, T>(interner: DbInterner<'db>, param_env: ParamEnv<'db>, value: T) -> T
-where
- T: TypeFoldable<DbInterner<'db>>,
-{
- let infer_ctxt = interner.infer_ctxt().build(TypingMode::non_body_analysis());
- let cause = ObligationCause::dummy();
- deeply_normalize(infer_ctxt.at(&cause, param_env), value.clone()).unwrap_or(value)
-}
-
/// Deeply normalize all aliases in `value`. This does not handle inference and expects
/// its input to be already fully resolved.
pub fn deeply_normalize<'db, T>(at: At<'_, 'db>, value: T) -> Result<T, Vec<NextSolverError<'db>>>
@@ -81,10 +69,16 @@
T: TypeFoldable<DbInterner<'db>>,
{
let fulfill_cx = FulfillmentCtxt::new(at.infcx);
- let mut folder = NormalizationFolder { at, fulfill_cx, depth: 0, universes };
+ let mut folder = NormalizationFolder {
+ at,
+ fulfill_cx,
+ depth: 0,
+ universes,
+ stalled_coroutine_goals: vec![],
+ };
let value = value.try_fold_with(&mut folder)?;
let errors = folder.fulfill_cx.select_all_or_error(at.infcx);
- if errors.is_empty() { Ok((value, vec![])) } else { Err(errors) }
+ if errors.is_empty() { Ok((value, folder.stalled_coroutine_goals)) } else { Err(errors) }
}
struct NormalizationFolder<'me, 'db> {
@@ -92,6 +86,7 @@
fulfill_cx: FulfillmentCtxt<'db>,
depth: usize,
universes: Vec<Option<UniverseIndex>>,
+ stalled_coroutine_goals: Vec<Goal<'db, Predicate<'db>>>,
}
impl<'db> NormalizationFolder<'_, 'db> {
@@ -100,22 +95,30 @@
alias_term: Term<'db>,
) -> Result<Term<'db>, Vec<NextSolverError<'db>>> {
let infcx = self.at.infcx;
- let tcx = infcx.interner;
- let recursion_limit = tcx.recursion_limit();
- if self.depth > recursion_limit {
- return Err(vec![]);
- }
+ let interner = infcx.interner;
+ let recursion_limit = interner.recursion_limit();
self.depth += 1;
let infer_term = infcx.next_term_var_of_kind(alias_term);
let obligation = Obligation::new(
- tcx,
+ interner,
self.at.cause.clone(),
self.at.param_env,
PredicateKind::AliasRelate(alias_term, infer_term, AliasRelationDirection::Equate),
);
+ if self.depth > recursion_limit {
+ // let term = alias_term.to_alias_term().unwrap();
+ // self.at.infcx.err_ctxt().report_overflow_error(
+ // OverflowCause::DeeplyNormalize(term),
+ // self.at.cause.span,
+ // true,
+ // |_| {},
+ // );
+ return Err(vec![NextSolverError::Overflow(obligation)]);
+ }
+
self.fulfill_cx.register_predicate_obligation(infcx, obligation);
self.select_all_and_stall_coroutine_predicates()?;
@@ -140,6 +143,13 @@
return Err(errors);
}
+ self.stalled_coroutine_goals.extend(
+ self.fulfill_cx
+ .drain_stalled_obligations_for_coroutines(self.at.infcx)
+ .into_iter()
+ .map(|obl| obl.as_goal()),
+ );
+
let errors = self.fulfill_cx.collect_remaining_errors(self.at.infcx);
if !errors.is_empty() {
return Err(errors);
@@ -166,7 +176,6 @@
Ok(t)
}
- #[tracing::instrument(level = "trace", skip(self), ret)]
fn try_fold_ty(&mut self, ty: Ty<'db>) -> Result<Ty<'db>, Self::Error> {
let infcx = self.at.infcx;
debug_assert_eq!(ty, infcx.shallow_resolve(ty));
@@ -193,7 +202,6 @@
}
}
- #[tracing::instrument(level = "trace", skip(self), ret)]
fn try_fold_const(&mut self, ct: Const<'db>) -> Result<Const<'db>, Self::Error> {
let infcx = self.at.infcx;
debug_assert_eq!(ct, infcx.shallow_resolve_const(ct));
@@ -232,8 +240,8 @@
})
}
-struct DeeplyNormalizeForDiagnosticsFolder<'a, 'db> {
- at: At<'a, 'db>,
+struct DeeplyNormalizeForDiagnosticsFolder<'a, 'tcx> {
+ at: At<'a, 'tcx>,
}
impl<'db> TypeFolder<DbInterner<'db>> for DeeplyNormalizeForDiagnosticsFolder<'_, 'db> {
diff --git a/crates/hir-ty/src/next_solver/obligation_ctxt.rs b/crates/hir-ty/src/next_solver/obligation_ctxt.rs
new file mode 100644
index 0000000..8e2dc0d
--- /dev/null
+++ b/crates/hir-ty/src/next_solver/obligation_ctxt.rs
@@ -0,0 +1,203 @@
+use hir_def::TraitId;
+use rustc_type_ir::relate::Relate;
+use rustc_type_ir::{TypeFoldable, Upcast, Variance};
+
+use crate::next_solver::fulfill::{FulfillmentCtxt, NextSolverError};
+use crate::next_solver::infer::at::ToTrace;
+use crate::next_solver::infer::traits::{
+ Obligation, ObligationCause, PredicateObligation, PredicateObligations,
+};
+use crate::next_solver::infer::{DefineOpaqueTypes, InferCtxt, InferOk, TypeTrace};
+use crate::next_solver::{Const, DbInterner, ParamEnv, Term, TraitRef, Ty, TypeError};
+
+/// Used if you want to have pleasant experience when dealing
+/// with obligations outside of hir or mir typeck.
+pub struct ObligationCtxt<'a, 'db> {
+ pub infcx: &'a InferCtxt<'db>,
+ engine: FulfillmentCtxt<'db>,
+}
+
+impl<'a, 'db> ObligationCtxt<'a, 'db> {
+ pub fn new(infcx: &'a InferCtxt<'db>) -> Self {
+ Self { infcx, engine: FulfillmentCtxt::new(infcx) }
+ }
+}
+
+impl<'a, 'db> ObligationCtxt<'a, 'db> {
+ pub fn register_obligation(&mut self, obligation: PredicateObligation<'db>) {
+ self.engine.register_predicate_obligation(self.infcx, obligation);
+ }
+
+ pub fn register_obligations(
+ &mut self,
+ obligations: impl IntoIterator<Item = PredicateObligation<'db>>,
+ ) {
+ self.engine.register_predicate_obligations(self.infcx, obligations);
+ }
+
+ pub fn register_infer_ok_obligations<T>(&mut self, infer_ok: InferOk<'db, T>) -> T {
+ let InferOk { value, obligations } = infer_ok;
+ self.register_obligations(obligations);
+ value
+ }
+
+ /// Requires that `ty` must implement the trait with `def_id` in
+ /// the given environment. This trait must not have any type
+ /// parameters (except for `Self`).
+ pub fn register_bound(
+ &mut self,
+ cause: ObligationCause,
+ param_env: ParamEnv<'db>,
+ ty: Ty<'db>,
+ def_id: TraitId,
+ ) {
+ let trait_ref = TraitRef::new(self.infcx.interner, def_id.into(), [ty]);
+ self.register_obligation(Obligation {
+ cause,
+ recursion_depth: 0,
+ param_env,
+ predicate: trait_ref.upcast(self.infcx.interner),
+ });
+ }
+
+ pub fn eq<T: ToTrace<'db>>(
+ &mut self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ expected: T,
+ actual: T,
+ ) -> Result<(), TypeError<'db>> {
+ self.infcx
+ .at(cause, param_env)
+ .eq(DefineOpaqueTypes::Yes, expected, actual)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ pub fn eq_trace<T: Relate<DbInterner<'db>>>(
+ &mut self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ trace: TypeTrace<'db>,
+ expected: T,
+ actual: T,
+ ) -> Result<(), TypeError<'db>> {
+ self.infcx
+ .at(cause, param_env)
+ .eq_trace(DefineOpaqueTypes::Yes, trace, expected, actual)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ /// Checks whether `expected` is a subtype of `actual`: `expected <: actual`.
+ pub fn sub<T: ToTrace<'db>>(
+ &mut self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ expected: T,
+ actual: T,
+ ) -> Result<(), TypeError<'db>> {
+ self.infcx
+ .at(cause, param_env)
+ .sub(DefineOpaqueTypes::Yes, expected, actual)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ pub fn relate<T: ToTrace<'db>>(
+ &mut self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ variance: Variance,
+ expected: T,
+ actual: T,
+ ) -> Result<(), TypeError<'db>> {
+ self.infcx
+ .at(cause, param_env)
+ .relate(DefineOpaqueTypes::Yes, expected, variance, actual)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ /// Checks whether `expected` is a supertype of `actual`: `expected :> actual`.
+ pub fn sup<T: ToTrace<'db>>(
+ &mut self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ expected: T,
+ actual: T,
+ ) -> Result<(), TypeError<'db>> {
+ self.infcx
+ .at(cause, param_env)
+ .sup(DefineOpaqueTypes::Yes, expected, actual)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ /// Computes the least-upper-bound, or mutual supertype, of two values.
+ pub fn lub<T: ToTrace<'db>>(
+ &mut self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ expected: T,
+ actual: T,
+ ) -> Result<T, TypeError<'db>> {
+ self.infcx
+ .at(cause, param_env)
+ .lub(expected, actual)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ #[must_use]
+ pub fn select_where_possible(&mut self) -> Vec<NextSolverError<'db>> {
+ self.engine.select_where_possible(self.infcx)
+ }
+
+ #[must_use]
+ pub fn select_all_or_error(&mut self) -> Vec<NextSolverError<'db>> {
+ self.engine.select_all_or_error(self.infcx)
+ }
+
+ /// Returns the not-yet-processed and stalled obligations from the
+ /// `ObligationCtxt`.
+ ///
+ /// Takes ownership of the context as doing operations such as
+ /// [`ObligationCtxt::eq`] afterwards will result in other obligations
+ /// getting ignored. You can make a new `ObligationCtxt` if this
+ /// needs to be done in a loop, for example.
+ #[must_use]
+ pub fn into_pending_obligations(self) -> PredicateObligations<'db> {
+ self.engine.pending_obligations()
+ }
+
+ pub fn deeply_normalize<T: TypeFoldable<DbInterner<'db>>>(
+ &self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ value: T,
+ ) -> Result<T, Vec<NextSolverError<'db>>> {
+ self.infcx.at(cause, param_env).deeply_normalize(value)
+ }
+
+ pub fn structurally_normalize_ty(
+ &mut self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ value: Ty<'db>,
+ ) -> Result<Ty<'db>, Vec<NextSolverError<'db>>> {
+ self.infcx.at(cause, param_env).structurally_normalize_ty(value, &mut self.engine)
+ }
+
+ pub fn structurally_normalize_const(
+ &mut self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ value: Const<'db>,
+ ) -> Result<Const<'db>, Vec<NextSolverError<'db>>> {
+ self.infcx.at(cause, param_env).structurally_normalize_const(value, &mut self.engine)
+ }
+
+ pub fn structurally_normalize_term(
+ &mut self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ value: Term<'db>,
+ ) -> Result<Term<'db>, Vec<NextSolverError<'db>>> {
+ self.infcx.at(cause, param_env).structurally_normalize_term(value, &mut self.engine)
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/predicate.rs b/crates/hir-ty/src/next_solver/predicate.rs
index c86d3a4..8654541 100644
--- a/crates/hir-ty/src/next_solver/predicate.rs
+++ b/crates/hir-ty/src/next_solver/predicate.rs
@@ -227,6 +227,8 @@
internee: kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
+ #[cfg(feature = "in-rust-tree")]
+ stable_hash: ena::fingerprint::Fingerprint::ZERO,
};
Predicate::new_(interner.db(), InternedWrapperNoDebug(cached))
}
@@ -262,28 +264,6 @@
Some(Predicate::new(DbInterner::conjure(), kind))
}
-
- pub fn as_trait_clause(self) -> Option<PolyTraitPredicate<'db>> {
- let predicate = self.kind();
- match predicate.skip_binder() {
- PredicateKind::Clause(ClauseKind::Trait(t)) => Some(predicate.rebind(t)),
- PredicateKind::Clause(ClauseKind::Projection(..))
- | PredicateKind::Clause(ClauseKind::HostEffect(..))
- | PredicateKind::Clause(ClauseKind::ConstArgHasType(..))
- | PredicateKind::Clause(ClauseKind::UnstableFeature(_))
- | PredicateKind::NormalizesTo(..)
- | PredicateKind::AliasRelate(..)
- | PredicateKind::Subtype(..)
- | PredicateKind::Coerce(..)
- | PredicateKind::Clause(ClauseKind::RegionOutlives(..))
- | PredicateKind::Clause(ClauseKind::WellFormed(..))
- | PredicateKind::DynCompatible(..)
- | PredicateKind::Clause(ClauseKind::TypeOutlives(..))
- | PredicateKind::Clause(ClauseKind::ConstEvaluatable(..))
- | PredicateKind::ConstEquate(..)
- | PredicateKind::Ambiguous => None,
- }
- }
}
// FIXME: should make a "header" in interned_vec
@@ -693,6 +673,12 @@
}
}
+impl<'db> UpcastFrom<DbInterner<'db>, PolyRegionOutlivesPredicate<'db>> for Predicate<'db> {
+ fn upcast_from(from: PolyRegionOutlivesPredicate<'db>, tcx: DbInterner<'db>) -> Self {
+ from.map_bound(|p| PredicateKind::Clause(ClauseKind::RegionOutlives(p))).upcast(tcx)
+ }
+}
+
impl<'db> rustc_type_ir::inherent::Predicate<DbInterner<'db>> for Predicate<'db> {
fn as_clause(self) -> Option<<DbInterner<'db> as rustc_type_ir::Interner>::Clause> {
match self.kind().skip_binder() {
@@ -730,6 +716,30 @@
}
impl<'db> Predicate<'db> {
+ pub fn as_trait_clause(self) -> Option<PolyTraitPredicate<'db>> {
+ let predicate = self.kind();
+ match predicate.skip_binder() {
+ PredicateKind::Clause(ClauseKind::Trait(t)) => Some(predicate.rebind(t)),
+ _ => None,
+ }
+ }
+
+ pub fn as_projection_clause(self) -> Option<PolyProjectionPredicate<'db>> {
+ let predicate = self.kind();
+ match predicate.skip_binder() {
+ PredicateKind::Clause(ClauseKind::Projection(t)) => Some(predicate.rebind(t)),
+ _ => None,
+ }
+ }
+
+ /// Matches a `PredicateKind::Clause` and turns it into a `Clause`, otherwise returns `None`.
+ pub fn as_clause(self) -> Option<Clause<'db>> {
+ match self.kind().skip_binder() {
+ PredicateKind::Clause(..) => Some(self.expect_clause()),
+ _ => None,
+ }
+ }
+
/// Assert that the predicate is a clause.
pub fn expect_clause(self) -> Clause<'db> {
match self.kind().skip_binder() {
diff --git a/crates/hir-ty/src/next_solver/project.rs b/crates/hir-ty/src/next_solver/project.rs
deleted file mode 100644
index e578087..0000000
--- a/crates/hir-ty/src/next_solver/project.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-//! Projection code for next-solver.
-
-pub(crate) mod solve_normalize;
diff --git a/crates/hir-ty/src/next_solver/solver.rs b/crates/hir-ty/src/next_solver/solver.rs
index c7591c0..946e57e 100644
--- a/crates/hir-ty/src/next_solver/solver.rs
+++ b/crates/hir-ty/src/next_solver/solver.rs
@@ -2,6 +2,7 @@
use hir_def::{AssocItemId, GeneralConstId, TypeAliasId};
use rustc_next_trait_solver::delegate::SolverDelegate;
+use rustc_type_ir::GenericArgKind;
use rustc_type_ir::lang_items::SolverTraitLangItem;
use rustc_type_ir::{
InferCtxtLike, Interner, PredicatePolarity, TypeFlags, TypeVisitableExt, UniverseIndex,
@@ -65,12 +66,12 @@
(SolverContext(infcx), value, vars)
}
- fn fresh_var_for_kind_with_span(
- &self,
- arg: <Self::Interner as rustc_type_ir::Interner>::GenericArg,
- span: <Self::Interner as rustc_type_ir::Interner>::Span,
- ) -> <Self::Interner as rustc_type_ir::Interner>::GenericArg {
- unimplemented!()
+ fn fresh_var_for_kind_with_span(&self, arg: GenericArg<'db>, span: Span) -> GenericArg<'db> {
+ match arg.kind() {
+ GenericArgKind::Lifetime(_) => self.next_region_var().into(),
+ GenericArgKind::Type(_) => self.next_ty_var().into(),
+ GenericArgKind::Const(_) => self.next_const_var().into(),
+ }
}
fn leak_check(
@@ -92,7 +93,8 @@
>,
>,
> {
- unimplemented!()
+ // FIXME(next-solver):
+ None
}
fn make_deduplicated_outlives_constraints(
diff --git a/crates/hir-ty/src/next_solver/structural_normalize.rs b/crates/hir-ty/src/next_solver/structural_normalize.rs
new file mode 100644
index 0000000..18859d8
--- /dev/null
+++ b/crates/hir-ty/src/next_solver/structural_normalize.rs
@@ -0,0 +1,57 @@
+use rustc_type_ir::{AliasRelationDirection, inherent::Term as _};
+
+use crate::next_solver::{
+ Const, PredicateKind, Term, Ty,
+ fulfill::{FulfillmentCtxt, NextSolverError},
+ infer::{at::At, traits::Obligation},
+};
+
+impl<'db> At<'_, 'db> {
+ pub(crate) fn structurally_normalize_ty(
+ &self,
+ ty: Ty<'db>,
+ fulfill_cx: &mut FulfillmentCtxt<'db>,
+ ) -> Result<Ty<'db>, Vec<NextSolverError<'db>>> {
+ self.structurally_normalize_term(ty.into(), fulfill_cx).map(|term| term.expect_type())
+ }
+
+ pub(crate) fn structurally_normalize_const(
+ &self,
+ ct: Const<'db>,
+ fulfill_cx: &mut FulfillmentCtxt<'db>,
+ ) -> Result<Const<'db>, Vec<NextSolverError<'db>>> {
+ self.structurally_normalize_term(ct.into(), fulfill_cx).map(|term| term.expect_const())
+ }
+
+ pub(crate) fn structurally_normalize_term(
+ &self,
+ term: Term<'db>,
+ fulfill_cx: &mut FulfillmentCtxt<'db>,
+ ) -> Result<Term<'db>, Vec<NextSolverError<'db>>> {
+ assert!(!term.is_infer(), "should have resolved vars before calling");
+
+ if term.to_alias_term().is_none() {
+ return Ok(term);
+ }
+
+ let new_infer = self.infcx.next_term_var_of_kind(term);
+
+ // We simply emit an `alias-eq` goal here, since that will take care of
+ // normalizing the LHS of the projection until it is a rigid projection
+ // (or a not-yet-defined opaque in scope).
+ let obligation = Obligation::new(
+ self.infcx.interner,
+ self.cause.clone(),
+ self.param_env,
+ PredicateKind::AliasRelate(term, new_infer, AliasRelationDirection::Equate),
+ );
+
+ fulfill_cx.register_predicate_obligation(self.infcx, obligation);
+ let errors = fulfill_cx.select_where_possible(self.infcx);
+ if !errors.is_empty() {
+ return Err(errors);
+ }
+
+ Ok(self.infcx.resolve_vars_if_possible(new_infer))
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/ty.rs b/crates/hir-ty/src/next_solver/ty.rs
index e1c2916..c7a747a 100644
--- a/crates/hir-ty/src/next_solver/ty.rs
+++ b/crates/hir-ty/src/next_solver/ty.rs
@@ -1,17 +1,19 @@
//! Things related to tys in the next-trait-solver.
+use std::iter;
+use std::ops::ControlFlow;
+
use hir_def::{GenericDefId, TypeOrConstParamId, TypeParamId};
use intern::{Interned, Symbol, sym};
use rustc_abi::{Float, Integer, Size};
use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult};
-use rustc_type_ir::Interner;
use rustc_type_ir::{
- BoundVar, ClosureKind, FlagComputation, Flags, FloatTy, FloatVid, InferTy, IntTy, IntVid,
- TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, UintTy,
- WithCachedTypeInfo,
+ BoundVar, ClosureKind, CollectAndApply, FlagComputation, Flags, FloatTy, FloatVid, InferTy,
+ IntTy, IntVid, Interner, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable,
+ TypeVisitableExt, TypeVisitor, UintTy, WithCachedTypeInfo,
inherent::{
- AdtDef, BoundVarLike, GenericArgs as _, IntoKind, ParamLike, PlaceholderLike, SliceLike,
- Ty as _,
+ Abi, AdtDef, BoundVarLike, Const as _, GenericArgs as _, IntoKind, ParamLike,
+ PlaceholderLike, Safety as _, SliceLike, Ty as _,
},
relate::Relate,
solve::SizedTraitKind,
@@ -20,13 +22,16 @@
use salsa::plumbing::{AsId, FromId};
use smallvec::SmallVec;
-use crate::next_solver::{
- CallableIdWrapper, ClosureIdWrapper, CoroutineIdWrapper, GenericArg, TypeAliasIdWrapper,
-};
use crate::{
+ FnAbi,
db::HirDatabase,
interner::InternedWrapperNoDebug,
- next_solver::util::{CoroutineArgsExt, IntegerTypeExt},
+ next_solver::{
+ CallableIdWrapper, ClosureIdWrapper, Const, CoroutineIdWrapper, FnSig, GenericArg,
+ PolyFnSig, TypeAliasIdWrapper,
+ abi::Safety,
+ util::{CoroutineArgsExt, IntegerTypeExt},
+ },
};
use super::{
@@ -55,6 +60,8 @@
internee: kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
+ #[cfg(feature = "in-rust-tree")]
+ stable_hash: ena::fingerprint::Fingerprint::ZERO,
};
Ty::new_(interner.db(), InternedWrapperNoDebug(cached))
}
@@ -173,7 +180,7 @@
| TyKind::Never
| TyKind::Error(_) => true,
- TyKind::Str | TyKind::Slice(_) | TyKind::Dynamic(_, _, _) => match sizedness {
+ TyKind::Str | TyKind::Slice(_) | TyKind::Dynamic(_, _) => match sizedness {
SizedTraitKind::Sized => false,
SizedTraitKind::MetaSized => true,
},
@@ -310,6 +317,68 @@
| TyKind::Error(_) => false,
}
}
+
+ #[inline]
+ pub fn is_never(self) -> bool {
+ matches!(self.kind(), TyKind::Never)
+ }
+
+ #[inline]
+ pub fn is_infer(self) -> bool {
+ matches!(self.kind(), TyKind::Infer(..))
+ }
+
+ #[inline]
+ pub fn is_str(self) -> bool {
+ matches!(self.kind(), TyKind::Str)
+ }
+
+ #[inline]
+ pub fn is_unit(self) -> bool {
+ matches!(self.kind(), TyKind::Tuple(tys) if tys.inner().is_empty())
+ }
+
+ /// Given a `fn` type, returns an equivalent `unsafe fn` type;
+ /// that is, a `fn` type that is equivalent in every way for being
+ /// unsafe.
+ pub fn safe_to_unsafe_fn_ty(interner: DbInterner<'db>, sig: PolyFnSig<'db>) -> Ty<'db> {
+ assert!(sig.safety().is_safe());
+ Ty::new_fn_ptr(interner, sig.map_bound(|sig| FnSig { safety: Safety::Unsafe, ..sig }))
+ }
+
+ /// Returns the type of `*ty`.
+ ///
+ /// The parameter `explicit` indicates if this is an *explicit* dereference.
+ /// Some types -- notably raw ptrs -- can only be dereferenced explicitly.
+ pub fn builtin_deref(self, db: &dyn HirDatabase, explicit: bool) -> Option<Ty<'db>> {
+ match self.kind() {
+ TyKind::Adt(adt, substs) if crate::lang_items::is_box(db, adt.def_id().0) => {
+ Some(substs.as_slice()[0].expect_ty())
+ }
+ TyKind::Ref(_, ty, _) => Some(ty),
+ TyKind::RawPtr(ty, _) if explicit => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Whether the type contains some non-lifetime, aka. type or const, error type.
+ pub fn references_non_lt_error(self) -> bool {
+ self.references_error() && self.visit_with(&mut ReferencesNonLifetimeError).is_break()
+ }
+}
+
+struct ReferencesNonLifetimeError;
+
+impl<'db> TypeVisitor<DbInterner<'db>> for ReferencesNonLifetimeError {
+ type Result = ControlFlow<()>;
+
+ fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result {
+ if ty.is_ty_error() { ControlFlow::Break(()) } else { ty.super_visit_with(self) }
+ }
+
+ fn visit_const(&mut self, c: Const<'db>) -> Self::Result {
+ if c.is_ct_error() { ControlFlow::Break(()) } else { c.super_visit_with(self) }
+ }
}
impl<'db> std::fmt::Debug for Ty<'db> {
@@ -354,7 +423,7 @@
}
TyKind::Slice(typ) => typ.visit_with(visitor),
TyKind::Adt(_, args) => args.visit_with(visitor),
- TyKind::Dynamic(ref trait_ty, ref reg, _) => {
+ TyKind::Dynamic(ref trait_ty, ref reg) => {
try_visit!(trait_ty.visit_with(visitor));
reg.visit_with(visitor)
}
@@ -419,11 +488,9 @@
}
TyKind::Slice(typ) => TyKind::Slice(typ.try_fold_with(folder)?),
TyKind::Adt(tid, args) => TyKind::Adt(tid, args.try_fold_with(folder)?),
- TyKind::Dynamic(trait_ty, region, representation) => TyKind::Dynamic(
- trait_ty.try_fold_with(folder)?,
- region.try_fold_with(folder)?,
- representation,
- ),
+ TyKind::Dynamic(trait_ty, region) => {
+ TyKind::Dynamic(trait_ty.try_fold_with(folder)?, region.try_fold_with(folder)?)
+ }
TyKind::Tuple(ts) => TyKind::Tuple(ts.try_fold_with(folder)?),
TyKind::FnDef(def_id, args) => TyKind::FnDef(def_id, args.try_fold_with(folder)?),
TyKind::FnPtr(sig_tys, hdr) => TyKind::FnPtr(sig_tys.try_fold_with(folder)?, hdr),
@@ -470,11 +537,9 @@
TyKind::Array(typ, sz) => TyKind::Array(typ.fold_with(folder), sz.fold_with(folder)),
TyKind::Slice(typ) => TyKind::Slice(typ.fold_with(folder)),
TyKind::Adt(tid, args) => TyKind::Adt(tid, args.fold_with(folder)),
- TyKind::Dynamic(trait_ty, region, representation) => TyKind::Dynamic(
- trait_ty.fold_with(folder),
- region.fold_with(folder),
- representation,
- ),
+ TyKind::Dynamic(trait_ty, region) => {
+ TyKind::Dynamic(trait_ty.fold_with(folder), region.fold_with(folder))
+ }
TyKind::Tuple(ts) => TyKind::Tuple(ts.fold_with(folder)),
TyKind::FnDef(def_id, args) => TyKind::FnDef(def_id, args.fold_with(folder)),
TyKind::FnPtr(sig_tys, hdr) => TyKind::FnPtr(sig_tys.fold_with(folder), hdr),
@@ -609,9 +674,8 @@
interner: DbInterner<'db>,
preds: <DbInterner<'db> as rustc_type_ir::Interner>::BoundExistentialPredicates,
region: <DbInterner<'db> as rustc_type_ir::Interner>::Region,
- kind: rustc_type_ir::DynKind,
) -> Self {
- Ty::new(interner, TyKind::Dynamic(preds, region, kind))
+ Ty::new(interner, TyKind::Dynamic(preds, region))
}
fn new_coroutine(
diff --git a/crates/hir-ty/src/next_solver/util.rs b/crates/hir-ty/src/next_solver/util.rs
index 97d3ea7..a7f9817 100644
--- a/crates/hir-ty/src/next_solver/util.rs
+++ b/crates/hir-ty/src/next_solver/util.rs
@@ -409,8 +409,7 @@
// Note: Since we're using `impls_for_trait` and `impl_provided_for`,
// only impls where the trait can be resolved should ever reach Chalk.
// `impl_datum` relies on that and will panic if the trait can't be resolved.
- let in_deps = db.trait_impls_in_deps(krate);
- let in_self = db.trait_impls_in_crate(krate);
+ let in_self_and_deps = db.trait_impls_in_deps(krate);
let trait_module = trait_id.module(db);
let type_module = match self_ty_fp {
Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(db)),
@@ -435,8 +434,7 @@
});
})
.filter_map(|block_id| db.trait_impls_in_block(block_id));
- f(&in_self)?;
- for it in in_deps.iter().map(ops::Deref::deref) {
+ for it in in_self_and_deps.iter().map(ops::Deref::deref) {
f(it)?;
}
for it in block_impls {
@@ -463,7 +461,7 @@
| CoroutineWitness(..) | Never => None,
// these are never sized
- Str | Slice(..) | Dynamic(_, _, rustc_type_ir::DynKind::Dyn) => match sizedness {
+ Str | Slice(..) | Dynamic(_, _) => match sizedness {
// Never `Sized`
SizedTraitKind::Sized => Some(ty),
// Always `MetaSized`
diff --git a/crates/hir-ty/src/target_feature.rs b/crates/hir-ty/src/target_feature.rs
index 9d12387..0a8ed2c 100644
--- a/crates/hir-ty/src/target_feature.rs
+++ b/crates/hir-ty/src/target_feature.rs
@@ -7,7 +7,7 @@
use intern::{Symbol, sym};
use rustc_hash::{FxHashMap, FxHashSet};
-#[derive(Debug, Default)]
+#[derive(Debug, Default, Clone)]
pub struct TargetFeatures {
pub(crate) enabled: FxHashSet<Symbol>,
}
diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs
index 7ec5231..1735f55 100644
--- a/crates/hir-ty/src/tests/coercion.rs
+++ b/crates/hir-ty/src/tests/coercion.rs
@@ -177,21 +177,23 @@
#[test]
fn coerce_merge_one_by_one1() {
- cov_mark::check!(coerce_merge_fail_fallback);
-
check(
r"
fn test() {
let t = &mut 1;
let x = match 1 {
1 => t as *mut i32,
- //^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer)
+ //^ adjustments: Deref(None), Borrow(RawPtr(Mut))
+ _ => t as *const i32,
+ };
+ x;
+ // ^ type: *const i32
+ let x = match 1 {
+ 1 => t as *mut i32,
2 => t as &i32,
//^^^^^^^^^ expected *mut i32, got &'? i32
_ => t as *const i32,
};
- x;
- //^ type: *const i32
}
",
@@ -276,17 +278,19 @@
fn coerce_autoderef_implication_1() {
check_no_mismatches(
r"
-//- minicore: deref
-struct Foo<T>;
+//- minicore: deref, phantom_data
+use core::marker::PhantomData;
+
+struct Foo<T>(PhantomData<T>);
impl core::ops::Deref for Foo<u32> { type Target = (); }
fn takes_ref_foo<T>(x: &Foo<T>) {}
fn test() {
- let foo = Foo;
+ let foo = Foo(PhantomData);
//^^^ type: Foo<{unknown}>
takes_ref_foo(&foo);
- let foo = Foo;
+ let foo = Foo(PhantomData);
//^^^ type: Foo<u32>
let _: &() = &foo;
}",
@@ -297,16 +301,18 @@
fn coerce_autoderef_implication_2() {
check(
r"
-//- minicore: deref
-struct Foo<T>;
+//- minicore: deref, phantom_data
+use core::marker::PhantomData;
+
+struct Foo<T>(PhantomData<T>);
impl core::ops::Deref for Foo<u32> { type Target = (); }
fn takes_ref_foo<T>(x: &Foo<T>) {}
fn test() {
- let foo = Foo;
+ let foo = Foo(PhantomData);
//^^^ type: Foo<{unknown}>
- let _: &u32 = &Foo;
- //^^^^ expected &'? u32, got &'? Foo<{unknown}>
+ let _: &u32 = &Foo(PhantomData);
+ //^^^^^^^^^^^^^^^^^ expected &'? u32, got &'? Foo<{unknown}>
}",
);
}
@@ -409,8 +415,6 @@
#[test]
fn coerce_fn_items_in_match_arms() {
- cov_mark::check!(coerce_fn_reification);
-
check_no_mismatches(
r"
fn foo1(x: u32) -> isize { 1 }
@@ -683,9 +687,9 @@
check_no_mismatches(
r#"
//- minicore: coerce_unsized
-struct InFile<T>;
+struct InFile<T>(T);
impl<T> InFile<T> {
- fn with_value<U>(self, value: U) -> InFile<U> { InFile }
+ fn with_value<U>(self, value: U) -> InFile<U> { InFile(loop {}) }
}
struct RecordField;
trait AstNode {}
@@ -694,7 +698,7 @@
fn takes_dyn(it: InFile<&dyn AstNode>) {}
fn test() {
- let x: InFile<()> = InFile;
+ let x: InFile<()> = InFile(());
let n = &RecordField;
takes_dyn(x.with_value(n));
}
diff --git a/crates/hir-ty/src/tests/diagnostics.rs b/crates/hir-ty/src/tests/diagnostics.rs
index 8550341..f257aa1 100644
--- a/crates/hir-ty/src/tests/diagnostics.rs
+++ b/crates/hir-ty/src/tests/diagnostics.rs
@@ -89,7 +89,6 @@
//^^^^ expected (), got &'static str
}
match x { true => true, false => 0 }
- //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected (), got bool
//^ expected bool, got i32
()
}
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index d79639b..c0b930e 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -48,7 +48,6 @@
"expr_scopes_shim",
"lang_item",
"crate_lang_items",
- "lang_item",
]
"#]],
);
@@ -138,7 +137,6 @@
"crate_lang_items",
"attrs_shim",
"attrs_shim",
- "lang_item",
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
@@ -588,8 +586,8 @@
"crate_lang_items",
"attrs_shim",
"attrs_shim",
- "return_type_impl_traits_shim",
"generic_predicates_ns_shim",
+ "return_type_impl_traits_shim",
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
@@ -602,6 +600,7 @@
"VariantFields::firewall_",
"VariantFields::query_",
"lang_item",
+ "lang_item",
"inherent_impls_in_crate_shim",
"impl_signature_shim",
"impl_signature_with_source_map_shim",
@@ -616,7 +615,6 @@
"generic_predicates_ns_shim",
"value_ty_shim",
"generic_predicates_shim",
- "lang_item",
]
"#]],
);
@@ -688,8 +686,8 @@
"attrs_shim",
"attrs_shim",
"attrs_shim",
- "return_type_impl_traits_shim",
"generic_predicates_ns_shim",
+ "return_type_impl_traits_shim",
"infer_shim",
"function_signature_with_source_map_shim",
"expr_scopes_shim",
diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs
index 5d088e4..25b938c 100644
--- a/crates/hir-ty/src/tests/macros.rs
+++ b/crates/hir-ty/src/tests/macros.rs
@@ -194,15 +194,15 @@
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
- 39..442 '{ ...!(); }': ()
+ 39..442 '{ ...!(); }': {unknown}
73..94 'spam!(...am!())': {unknown}
100..119 'for _ ...!() {}': fn into_iter<isize>(isize) -> <isize as IntoIterator>::IntoIter
- 100..119 'for _ ...!() {}': {unknown}
+ 100..119 'for _ ...!() {}': <isize as IntoIterator>::IntoIter
100..119 'for _ ...!() {}': !
- 100..119 'for _ ...!() {}': {unknown}
- 100..119 'for _ ...!() {}': &'? mut {unknown}
+ 100..119 'for _ ...!() {}': <isize as IntoIterator>::IntoIter
+ 100..119 'for _ ...!() {}': &'? mut <isize as IntoIterator>::IntoIter
100..119 'for _ ...!() {}': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
- 100..119 'for _ ...!() {}': Option<{unknown}>
+ 100..119 'for _ ...!() {}': Option<<{unknown} as Iterator>::Item>
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
@@ -288,15 +288,15 @@
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
- 53..456 '{ ...!(); }': ()
+ 53..456 '{ ...!(); }': {unknown}
87..108 'spam!(...am!())': {unknown}
114..133 'for _ ...!() {}': fn into_iter<isize>(isize) -> <isize as IntoIterator>::IntoIter
- 114..133 'for _ ...!() {}': {unknown}
+ 114..133 'for _ ...!() {}': <isize as IntoIterator>::IntoIter
114..133 'for _ ...!() {}': !
- 114..133 'for _ ...!() {}': {unknown}
- 114..133 'for _ ...!() {}': &'? mut {unknown}
+ 114..133 'for _ ...!() {}': <isize as IntoIterator>::IntoIter
+ 114..133 'for _ ...!() {}': &'? mut <isize as IntoIterator>::IntoIter
114..133 'for _ ...!() {}': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
- 114..133 'for _ ...!() {}': Option<{unknown}>
+ 114..133 'for _ ...!() {}': Option<<{unknown} as Iterator>::Item>
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
@@ -707,7 +707,7 @@
expect![[r#"
!0..6 '"file"': &'static str
63..87 '{ ...!(); }': ()
- 73..74 'x': &'static str
+ 73..74 'x': &'? str
"#]],
);
}
@@ -745,7 +745,7 @@
expect![[r#"
!0..13 '"helloworld!"': &'static str
65..121 '{ ...")); }': ()
- 75..76 'x': &'static str
+ 75..76 'x': &'? str
"#]],
);
}
@@ -822,7 +822,7 @@
fn main() {
let a = include_str!("foo.rs");
a;
-} //^ &'static str
+} //^ &'? str
//- /foo.rs
hello
@@ -849,7 +849,7 @@
fn main() {
let a = include_str!(m!(".rs"));
a;
-} //^ &'static str
+} //^ &'? str
//- /foo.rs
hello
@@ -964,7 +964,7 @@
expect![[r#"
!0..13 '"helloworld!"': &'static str
103..160 '{ ...")); }': ()
- 113..114 'x': &'static str
+ 113..114 'x': &'? str
"#]],
);
}
@@ -979,7 +979,7 @@
fn main() {
let x = env!("foo");
- //^ &'static str
+ //^ &'? str
}
"#,
);
@@ -993,7 +993,7 @@
//- /main.rs env:foo=bar
fn main() {
let x = option_env!("foo");
- //^ Option<&'static str>
+ //^ Option<&'? str>
}
"#,
);
diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs
index d50daf0..b14ce35 100644
--- a/crates/hir-ty/src/tests/method_resolution.rs
+++ b/crates/hir-ty/src/tests/method_resolution.rs
@@ -578,17 +578,17 @@
trait Trait<T> {
fn make() -> (Self, T);
}
- struct S<T>;
+ struct S<T>(T);
impl Trait<i64> for S<i32> {}
fn test() {
let a = S::make();
}
"#,
expect![[r#"
- 100..126 '{ ...e(); }': ()
- 110..111 'a': (S<i32>, i64)
- 114..121 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
- 114..123 'S::make()': (S<i32>, i64)
+ 103..129 '{ ...e(); }': ()
+ 113..114 'a': (S<i32>, i64)
+ 117..124 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
+ 117..126 'S::make()': (S<i32>, i64)
"#]],
);
}
@@ -600,7 +600,7 @@
trait Trait<T> {
fn make() -> (Self, T);
}
- struct S<T>;
+ struct S<T>(T);
impl Trait<i64> for S<u64> {}
impl Trait<i32> for S<u32> {}
fn test() {
@@ -609,13 +609,13 @@
}
"#,
expect![[r#"
- 130..202 '{ ...e(); }': ()
- 140..141 'a': (S<u64>, i64)
- 157..164 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
- 157..166 'S::make()': (S<u64>, i64)
- 176..177 'b': (S<u32>, i32)
- 190..197 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
- 190..199 'S::make()': (S<u32>, i32)
+ 133..205 '{ ...e(); }': ()
+ 143..144 'a': (S<u64>, i64)
+ 160..167 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
+ 160..169 'S::make()': (S<u64>, i64)
+ 179..180 'b': (S<u32>, i32)
+ 193..200 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
+ 193..202 'S::make()': (S<u32>, i32)
"#]],
);
}
@@ -627,7 +627,7 @@
trait Trait<T> {
fn make<U>() -> (Self, T, U);
}
- struct S<T>;
+ struct S<T>(T);
impl Trait<i64> for S<u64> {}
fn test() {
let a = <S as Trait<i64>>::make::<u8>();
@@ -635,13 +635,13 @@
}
"#,
expect![[r#"
- 106..210 '{ ...>(); }': ()
- 116..117 'a': (S<u64>, i64, u8)
- 120..149 '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
- 120..151 '<S as ...<u8>()': (S<u64>, i64, u8)
- 161..162 'b': (S<u64>, i64, u8)
- 181..205 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
- 181..207 'Trait:...<u8>()': (S<u64>, i64, u8)
+ 109..213 '{ ...>(); }': ()
+ 119..120 'a': (S<u64>, i64, u8)
+ 123..152 '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 123..154 '<S as ...<u8>()': (S<u64>, i64, u8)
+ 164..165 'b': (S<u64>, i64, u8)
+ 184..208 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 184..210 'Trait:...<u8>()': (S<u64>, i64, u8)
"#]],
);
}
@@ -1107,6 +1107,9 @@
// this can get quite slow if we set the solver size limit too high
check_types(
r#"
+//- minicore: phantom_data
+use core::marker::PhantomData;
+
trait SendX {}
struct S1; impl SendX for S1 {}
@@ -1115,17 +1118,17 @@
trait Trait { fn method(self); }
-struct X1<A, B> {}
+struct X1<A, B>(PhantomData<(A, B)>);
impl<A, B> SendX for X1<A, B> where A: SendX, B: SendX {}
-struct S<B, C> {}
+struct S<B, C>(PhantomData<(B, C)>);
trait FnX {}
impl<B, C> Trait for S<B, C> where C: FnX, B: SendX {}
-fn test() { (S {}).method(); }
- //^^^^^^^^^^^^^^^ ()
+fn test() { (S(PhantomData)).method(); }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^ ()
"#,
);
}
@@ -1187,11 +1190,11 @@
89..109 '{ ... }': bool
99..103 'true': bool
123..167 '{ ...o(); }': ()
- 133..134 's': &'static S
- 137..151 'unsafe { f() }': &'static S
+ 133..134 's': &'? S
+ 137..151 'unsafe { f() }': &'? S
146..147 'f': fn f() -> &'static S
146..149 'f()': &'static S
- 157..158 's': &'static S
+ 157..158 's': &'? S
157..164 's.foo()': bool
"#]],
);
@@ -2191,9 +2194,9 @@
fn main() {
let bar = Bar;
let _v1 = bar.foo1();
- //^^^ type: {unknown}
+ //^^^ type: i32
let _v2 = bar.foo2();
- //^^^ type: {unknown}
+ //^^^ type: bool
}
"#,
);
diff --git a/crates/hir-ty/src/tests/never_type.rs b/crates/hir-ty/src/tests/never_type.rs
index 6a91356..af5290d 100644
--- a/crates/hir-ty/src/tests/never_type.rs
+++ b/crates/hir-ty/src/tests/never_type.rs
@@ -14,6 +14,8 @@
);
}
+// FIXME(next-solver): The never type fallback implemented in r-a no longer works properly because of
+// `Coerce` predicates. We should reimplement fallback like rustc.
#[test]
fn infer_never2() {
check_types(
@@ -24,7 +26,7 @@
let a = gen();
if false { a } else { loop {} };
a;
-} //^ !
+} //^ {unknown}
"#,
);
}
@@ -39,7 +41,7 @@
let a = gen();
if false { loop {} } else { a };
a;
- //^ !
+ //^ {unknown}
}
"#,
);
@@ -54,7 +56,7 @@
fn test() {
let a = if true { Option::None } else { Option::Some(return) };
a;
-} //^ Option<!>
+} //^ Option<{unknown}>
"#,
);
}
@@ -104,7 +106,7 @@
fn test() {
let a = if true { Option::None } else { Option::Some(return) };
a;
- //^ Option<&'static str>
+ //^ Option<&'? str>
match 42 {
42 => a,
_ => Option::Some("str"),
@@ -218,7 +220,7 @@
_ => loop {},
};
i;
-} //^ !
+} //^ {unknown}
"#,
);
}
@@ -362,12 +364,12 @@
140..141 'x': u32
149..175 '{ for ...; }; }': u32
151..172 'for a ...eak; }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
- 151..172 'for a ...eak; }': {unknown}
+ 151..172 'for a ...eak; }': <{unknown} as IntoIterator>::IntoIter
151..172 'for a ...eak; }': !
151..172 'for a ...eak; }': {unknown}
151..172 'for a ...eak; }': &'? mut {unknown}
151..172 'for a ...eak; }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
- 151..172 'for a ...eak; }': Option<{unknown}>
+ 151..172 'for a ...eak; }': Option<<{unknown} as Iterator>::Item>
151..172 'for a ...eak; }': ()
151..172 'for a ...eak; }': ()
151..172 'for a ...eak; }': ()
@@ -379,12 +381,12 @@
226..227 'x': u32
235..253 '{ for ... {}; }': u32
237..250 'for a in b {}': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
- 237..250 'for a in b {}': {unknown}
+ 237..250 'for a in b {}': <{unknown} as IntoIterator>::IntoIter
237..250 'for a in b {}': !
237..250 'for a in b {}': {unknown}
237..250 'for a in b {}': &'? mut {unknown}
237..250 'for a in b {}': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
- 237..250 'for a in b {}': Option<{unknown}>
+ 237..250 'for a in b {}': Option<<{unknown} as Iterator>::Item>
237..250 'for a in b {}': ()
237..250 'for a in b {}': ()
237..250 'for a in b {}': ()
@@ -395,12 +397,12 @@
304..305 'x': u32
313..340 '{ for ...; }; }': u32
315..337 'for a ...urn; }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
- 315..337 'for a ...urn; }': {unknown}
+ 315..337 'for a ...urn; }': <{unknown} as IntoIterator>::IntoIter
315..337 'for a ...urn; }': !
315..337 'for a ...urn; }': {unknown}
315..337 'for a ...urn; }': &'? mut {unknown}
315..337 'for a ...urn; }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
- 315..337 'for a ...urn; }': Option<{unknown}>
+ 315..337 'for a ...urn; }': Option<<{unknown} as Iterator>::Item>
315..337 'for a ...urn; }': ()
315..337 'for a ...urn; }': ()
315..337 'for a ...urn; }': ()
diff --git a/crates/hir-ty/src/tests/opaque_types.rs b/crates/hir-ty/src/tests/opaque_types.rs
index 256ca7d..40e4c28 100644
--- a/crates/hir-ty/src/tests/opaque_types.rs
+++ b/crates/hir-ty/src/tests/opaque_types.rs
@@ -158,6 +158,7 @@
191..193 '_a': impl Trait + ?Sized
205..211 'Struct': Struct
217..218 '5': i32
+ 205..211: expected impl Trait + ?Sized, got Struct
"#]],
)
}
diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs
index 02cb037..607daad 100644
--- a/crates/hir-ty/src/tests/patterns.rs
+++ b/crates/hir-ty/src/tests/patterns.rs
@@ -41,19 +41,19 @@
47..48 'x': &'? i32
58..59 'a': i32
62..63 'z': i32
- 73..79 '(c, d)': (i32, &'static str)
+ 73..79 '(c, d)': (i32, &'? str)
74..75 'c': i32
- 77..78 'd': &'static str
- 82..94 '(1, "hello")': (i32, &'static str)
+ 77..78 'd': &'? str
+ 82..94 '(1, "hello")': (i32, &'? str)
83..84 '1': i32
86..93 '"hello"': &'static str
101..151 'for (e... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
- 101..151 'for (e... }': {unknown}
+ 101..151 'for (e... }': <{unknown} as IntoIterator>::IntoIter
101..151 'for (e... }': !
101..151 'for (e... }': {unknown}
101..151 'for (e... }': &'? mut {unknown}
101..151 'for (e... }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
- 101..151 'for (e... }': Option<({unknown}, {unknown})>
+ 101..151 'for (e... }': Option<<{unknown} as Iterator>::Item>
101..151 'for (e... }': ()
101..151 'for (e... }': ()
101..151 'for (e... }': ()
@@ -653,7 +653,7 @@
fn infer_const_pattern() {
check(
r#"
-enum Option<T> { None }
+enum Option<T> { None, Some(T) }
use Option::None;
struct Foo;
const Bar: usize = 1;
@@ -719,28 +719,28 @@
51..58 'loop {}': !
56..58 '{}': ()
72..171 '{ ... x); }': ()
- 78..81 'foo': fn foo<&'? (i32, &'static str), i32, impl FnOnce(&'? (i32, &'static str)) -> i32>(&'? (i32, &'static str), impl FnOnce(&'? (i32, &'static str)) -> i32) -> i32
+ 78..81 'foo': fn foo<&'? (i32, &'? str), i32, impl FnOnce(&'? (i32, &'? str)) -> i32>(&'? (i32, &'? str), impl FnOnce(&'? (i32, &'? str)) -> i32) -> i32
78..105 'foo(&(...y)| x)': i32
- 82..91 '&(1, "a")': &'? (i32, &'static str)
- 83..91 '(1, "a")': (i32, &'static str)
+ 82..91 '&(1, "a")': &'? (i32, &'? str)
+ 83..91 '(1, "a")': (i32, &'? str)
84..85 '1': i32
87..90 '"a"': &'static str
- 93..104 '|&(x, y)| x': impl FnOnce(&'? (i32, &'static str)) -> i32
- 94..101 '&(x, y)': &'? (i32, &'static str)
- 95..101 '(x, y)': (i32, &'static str)
+ 93..104 '|&(x, y)| x': impl FnOnce(&'? (i32, &'? str)) -> i32
+ 94..101 '&(x, y)': &'? (i32, &'? str)
+ 95..101 '(x, y)': (i32, &'? str)
96..97 'x': i32
- 99..100 'y': &'static str
+ 99..100 'y': &'? str
103..104 'x': i32
- 142..145 'foo': fn foo<&'? (i32, &'static str), &'? i32, impl FnOnce(&'? (i32, &'static str)) -> &'? i32>(&'? (i32, &'static str), impl FnOnce(&'? (i32, &'static str)) -> &'? i32) -> &'? i32
+ 142..145 'foo': fn foo<&'? (i32, &'? str), &'? i32, impl FnOnce(&'? (i32, &'? str)) -> &'? i32>(&'? (i32, &'? str), impl FnOnce(&'? (i32, &'? str)) -> &'? i32) -> &'? i32
142..168 'foo(&(...y)| x)': &'? i32
- 146..155 '&(1, "a")': &'? (i32, &'static str)
- 147..155 '(1, "a")': (i32, &'static str)
+ 146..155 '&(1, "a")': &'? (i32, &'? str)
+ 147..155 '(1, "a")': (i32, &'? str)
148..149 '1': i32
151..154 '"a"': &'static str
- 157..167 '|(x, y)| x': impl FnOnce(&'? (i32, &'static str)) -> &'? i32
- 158..164 '(x, y)': (i32, &'static str)
+ 157..167 '|(x, y)| x': impl FnOnce(&'? (i32, &'? str)) -> &'? i32
+ 158..164 '(x, y)': (i32, &'? str)
159..160 'x': &'? i32
- 162..163 'y': &'? &'static str
+ 162..163 'y': &'? &'? str
166..167 'x': &'? i32
"#]],
);
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index 6a3f228..2ba1e23 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -88,6 +88,7 @@
#[test]
fn recursive_vars() {
+ // FIXME: This isn't nice, but I guess as long as we don't hang/crash that's fine?
check_infer(
r#"
fn test() {
@@ -97,12 +98,12 @@
"#,
expect![[r#"
10..47 '{ ...&y]; }': ()
- 20..21 'y': {unknown}
- 24..31 'unknown': {unknown}
- 37..44 '[y, &y]': [{unknown}; 2]
- 38..39 'y': {unknown}
- 41..43 '&y': &'? {unknown}
- 42..43 'y': {unknown}
+ 20..21 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 24..31 'unknown': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 37..44 '[y, &y]': [&'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}; 2]
+ 38..39 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 41..43 '&y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 42..43 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
"#]],
);
}
@@ -119,19 +120,19 @@
"#,
expect![[r#"
10..79 '{ ...x)]; }': ()
- 20..21 'x': &'? {unknown}
- 24..31 'unknown': &'? {unknown}
- 41..42 'y': {unknown}
- 45..52 'unknown': {unknown}
- 58..76 '[(x, y..., &x)]': [(&'? {unknown}, {unknown}); 2]
- 59..65 '(x, y)': (&'? {unknown}, {unknown})
- 60..61 'x': &'? {unknown}
- 63..64 'y': {unknown}
- 67..75 '(&y, &x)': (&'? {unknown}, {unknown})
- 68..70 '&y': &'? {unknown}
- 69..70 'y': {unknown}
- 72..74 '&x': &'? &'? {unknown}
- 73..74 'x': &'? {unknown}
+ 20..21 'x': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 24..31 'unknown': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 41..42 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 45..52 'unknown': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 58..76 '[(x, y..., &x)]': [(&'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}, &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}); 2]
+ 59..65 '(x, y)': (&'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}, &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown})
+ 60..61 'x': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 63..64 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 67..75 '(&y, &x)': (&'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}, &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown})
+ 68..70 '&y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 69..70 'y': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 72..74 '&x': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 73..74 'x': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
"#]],
);
}
@@ -268,37 +269,37 @@
expect![[r#"
26..322 '{ ... } }': ()
32..320 'for co... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
- 32..320 'for co... }': {unknown}
+ 32..320 'for co... }': <{unknown} as IntoIterator>::IntoIter
32..320 'for co... }': !
32..320 'for co... }': {unknown}
32..320 'for co... }': &'? mut {unknown}
32..320 'for co... }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
- 32..320 'for co... }': Option<{unknown}>
+ 32..320 'for co... }': Option<<{unknown} as Iterator>::Item>
32..320 'for co... }': ()
32..320 'for co... }': ()
32..320 'for co... }': ()
32..320 'for co... }': ()
- 36..43 'content': {unknown}
+ 36..43 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
47..60 'doesnt_matter': {unknown}
61..320 '{ ... }': ()
- 75..79 'name': &'? {unknown}
- 82..166 'if doe... }': &'? {unknown}
+ 75..79 'name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 82..166 'if doe... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
85..98 'doesnt_matter': bool
- 99..128 '{ ... }': &'? {unknown}
- 113..118 'first': &'? {unknown}
- 134..166 '{ ... }': &'? {unknown}
- 148..156 '&content': &'? {unknown}
- 149..156 'content': {unknown}
- 181..188 'content': &'? {unknown}
- 191..313 'if ICE... }': &'? {unknown}
- 194..231 'ICE_RE..._VALUE': {unknown}
+ 99..128 '{ ... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 113..118 'first': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 134..166 '{ ... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 148..156 '&content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 149..156 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 181..188 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 191..313 'if ICE... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 194..231 'ICE_RE..._VALUE': bool
194..247 'ICE_RE...&name)': bool
- 241..246 '&name': &'? &'? {unknown}
- 242..246 'name': &'? {unknown}
- 248..276 '{ ... }': &'? {unknown}
- 262..266 'name': &'? {unknown}
- 282..313 '{ ... }': {unknown}
- 296..303 'content': {unknown}
+ 241..246 '&name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 242..246 'name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 248..276 '{ ... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 262..266 'name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 282..313 '{ ... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
+ 296..303 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
"#]],
);
}
@@ -394,7 +395,7 @@
r#"
trait A {}
trait Write {}
- struct Response<T> {}
+ struct Response<T>(T);
trait D {
fn foo();
@@ -410,13 +411,13 @@
}
"#,
expect![[r#"
- 119..214 '{ ... }': ()
- 129..132 'end': fn end<{unknown}>()
- 129..134 'end()': ()
- 163..208 '{ ... }': ()
- 181..183 '_x': !
- 190..197 'loop {}': !
- 195..197 '{}': ()
+ 120..215 '{ ... }': ()
+ 130..133 'end': fn end<{unknown}>()
+ 130..135 'end()': ()
+ 164..209 '{ ... }': ()
+ 182..184 '_x': !
+ 191..198 'loop {}': !
+ 196..198 '{}': ()
"#]],
)
}
@@ -628,7 +629,7 @@
65..69 'self': Self
267..271 'self': Self
466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
- 488..522 '{ ... }': ()
+ 488..522 '{ ... }': <SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}> as BoxedDsl<DB>>::Output
498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
498..508 'self.order': O
498..515 'self.o...into()': dyn QueryFragment<DB> + '?
@@ -799,7 +800,7 @@
struct Map<F> { f: F }
- struct Vec<T> {}
+ struct Vec<T> { p: *mut T }
impl<T> core::ops::Deref for Vec<T> {
type Target = [T];
@@ -818,23 +819,23 @@
}
"#,
expect![[r#"
- 225..229 'iter': T
- 244..246 '{}': Vec<A>
- 258..402 '{ ...r(); }': ()
- 268..273 'inner': Map<impl Fn(&'? f64) -> f64>
- 276..300 'Map { ... 0.0 }': Map<impl Fn(&'? f64) -> f64>
- 285..298 '|_: &f64| 0.0': impl Fn(&'? f64) -> f64
- 286..287 '_': &'? f64
- 295..298 '0.0': f64
- 311..317 'repeat': Repeat<Map<impl Fn(&'? f64) -> f64>>
- 320..345 'Repeat...nner }': Repeat<Map<impl Fn(&'? f64) -> f64>>
- 338..343 'inner': Map<impl Fn(&'? f64) -> f64>
- 356..359 'vec': Vec<{unknown}>
- 362..371 'from_iter': fn from_iter<{unknown}, Repeat<Map<impl Fn(&'? f64) -> f64>>>(Repeat<Map<impl Fn(&'? f64) -> f64>>) -> Vec<{unknown}>
- 362..379 'from_i...epeat)': Vec<{unknown}>
- 372..378 'repeat': Repeat<Map<impl Fn(&'? f64) -> f64>>
- 386..389 'vec': Vec<{unknown}>
- 386..399 'vec.foo_bar()': {unknown}
+ 236..240 'iter': T
+ 255..257 '{}': Vec<A>
+ 269..413 '{ ...r(); }': ()
+ 279..284 'inner': Map<impl Fn(&'? f64) -> f64>
+ 287..311 'Map { ... 0.0 }': Map<impl Fn(&'? f64) -> f64>
+ 296..309 '|_: &f64| 0.0': impl Fn(&'? f64) -> f64
+ 297..298 '_': &'? f64
+ 306..309 '0.0': f64
+ 322..328 'repeat': Repeat<Map<impl Fn(&'? f64) -> f64>>
+ 331..356 'Repeat...nner }': Repeat<Map<impl Fn(&'? f64) -> f64>>
+ 349..354 'inner': Map<impl Fn(&'? f64) -> f64>
+ 367..370 'vec': Vec<{unknown}>
+ 373..382 'from_iter': fn from_iter<{unknown}, Repeat<Map<impl Fn(&'? f64) -> f64>>>(Repeat<Map<impl Fn(&'? f64) -> f64>>) -> Vec<{unknown}>
+ 373..390 'from_i...epeat)': Vec<{unknown}>
+ 383..389 'repeat': Repeat<Map<impl Fn(&'? f64) -> f64>>
+ 397..400 'vec': Vec<{unknown}>
+ 397..410 'vec.foo_bar()': {unknown}
"#]],
);
}
@@ -843,37 +844,40 @@
fn issue_6628() {
check_infer(
r#"
-//- minicore: fn
-struct S<T>();
+//- minicore: fn, phantom_data
+use core::marker::PhantomData;
+
+struct S<T>(PhantomData<T>);
impl<T> S<T> {
fn f(&self, _t: T) {}
fn g<F: FnOnce(&T)>(&self, _f: F) {}
}
fn main() {
- let s = S();
+ let s = S(PhantomData);
s.g(|_x| {});
s.f(10);
}
"#,
expect![[r#"
- 40..44 'self': &'? S<T>
- 46..48 '_t': T
- 53..55 '{}': ()
- 81..85 'self': &'? S<T>
- 87..89 '_f': F
- 94..96 '{}': ()
- 109..160 '{ ...10); }': ()
- 119..120 's': S<i32>
- 123..124 'S': fn S<i32>() -> S<i32>
- 123..126 'S()': S<i32>
- 132..133 's': S<i32>
- 132..144 's.g(|_x| {})': ()
- 136..143 '|_x| {}': impl FnOnce(&'? i32)
- 137..139 '_x': &'? i32
- 141..143 '{}': ()
- 150..151 's': S<i32>
- 150..157 's.f(10)': ()
- 154..156 '10': i32
+ 86..90 'self': &'? S<T>
+ 92..94 '_t': T
+ 99..101 '{}': ()
+ 127..131 'self': &'? S<T>
+ 133..135 '_f': F
+ 140..142 '{}': ()
+ 155..217 '{ ...10); }': ()
+ 165..166 's': S<i32>
+ 169..170 'S': fn S<i32>(PhantomData<i32>) -> S<i32>
+ 169..183 'S(PhantomData)': S<i32>
+ 171..182 'PhantomData': PhantomData<i32>
+ 189..190 's': S<i32>
+ 189..201 's.g(|_x| {})': ()
+ 193..200 '|_x| {}': impl FnOnce(&'? i32)
+ 194..196 '_x': &'? i32
+ 198..200 '{}': ()
+ 207..208 's': S<i32>
+ 207..214 's.f(10)': ()
+ 211..213 '10': i32
"#]],
);
}
@@ -931,7 +935,7 @@
check_types(
r#"
//- minicore: deref
-struct Box<T: ?Sized> {}
+struct Box<T: ?Sized>(T);
impl<T: ?Sized> core::ops::Deref for Box<T> {
type Target = T;
@@ -964,6 +968,9 @@
fn issue_8686() {
check_infer(
r#"
+//- minicore: phantom_data
+use core::marker::PhantomData;
+
pub trait Try: FromResidual {
type Output;
type Residual;
@@ -972,28 +979,32 @@
fn from_residual(residual: R) -> Self;
}
-struct ControlFlow<B, C>;
+struct ControlFlow<B, C>(PhantomData<(B, C)>);
impl<B, C> Try for ControlFlow<B, C> {
type Output = C;
type Residual = ControlFlow<B, !>;
}
impl<B, C> FromResidual for ControlFlow<B, C> {
- fn from_residual(r: ControlFlow<B, !>) -> Self { ControlFlow }
+ fn from_residual(r: ControlFlow<B, !>) -> Self { ControlFlow(PhantomData) }
}
fn test() {
- ControlFlow::from_residual(ControlFlow::<u32, !>);
+ ControlFlow::from_residual(ControlFlow::<u32, !>(PhantomData));
}
"#,
expect![[r#"
- 144..152 'residual': R
- 365..366 'r': ControlFlow<B, !>
- 395..410 '{ ControlFlow }': ControlFlow<B, C>
- 397..408 'ControlFlow': ControlFlow<B, C>
- 424..482 '{ ...!>); }': ()
- 430..456 'Contro...sidual': fn from_residual<ControlFlow<u32, {unknown}>, ControlFlow<u32, !>>(ControlFlow<u32, !>) -> ControlFlow<u32, {unknown}>
- 430..479 'Contro...2, !>)': ControlFlow<u32, {unknown}>
- 457..478 'Contro...32, !>': ControlFlow<u32, !>
+ 176..184 'residual': R
+ 418..419 'r': ControlFlow<B, !>
+ 448..476 '{ Cont...ata) }': ControlFlow<B, C>
+ 450..461 'ControlFlow': fn ControlFlow<B, C>(PhantomData<(B, C)>) -> ControlFlow<B, C>
+ 450..474 'Contro...mData)': ControlFlow<B, C>
+ 462..473 'PhantomData': PhantomData<(B, C)>
+ 490..561 '{ ...a)); }': ()
+ 496..522 'Contro...sidual': fn from_residual<ControlFlow<u32, {unknown}>, ControlFlow<u32, !>>(ControlFlow<u32, !>) -> ControlFlow<u32, {unknown}>
+ 496..558 'Contro...Data))': ControlFlow<u32, {unknown}>
+ 523..544 'Contro...32, !>': fn ControlFlow<u32, !>(PhantomData<(u32, !)>) -> ControlFlow<u32, !>
+ 523..557 'Contro...mData)': ControlFlow<u32, !>
+ 545..556 'PhantomData': PhantomData<(u32, !)>
"#]],
);
}
@@ -1052,12 +1063,13 @@
check_types(
r#"
//- minicore: sized
-struct Option<T>;
+struct Option<T>(T);
impl<T> Option<T> {
fn unwrap(self) -> T { loop {} }
}
-fn make() -> Option<impl Copy> { Option }
+fn make() -> Option<impl Copy> { Option(()) }
trait Copy {}
+impl Copy for () {}
fn test() {
let o = make();
o.unwrap();
@@ -1163,9 +1175,9 @@
pub struct Lsb0;
-pub struct BitArray<V: BitView> { }
+pub struct BitArray<V: BitView>(V);
-pub struct BitSlice<T> { }
+pub struct BitSlice<T>(T);
impl<V: BitView> core::ops::Deref for BitArray<V> {
type Target = BitSlice<V::Store>;
@@ -1243,12 +1255,12 @@
expect![[r#"
10..68 '{ ... } }': ()
16..66 'for _ ... }': fn into_iter<()>(()) -> <() as IntoIterator>::IntoIter
- 16..66 'for _ ... }': {unknown}
+ 16..66 'for _ ... }': <() as IntoIterator>::IntoIter
16..66 'for _ ... }': !
- 16..66 'for _ ... }': {unknown}
- 16..66 'for _ ... }': &'? mut {unknown}
+ 16..66 'for _ ... }': <() as IntoIterator>::IntoIter
+ 16..66 'for _ ... }': &'? mut <() as IntoIterator>::IntoIter
16..66 'for _ ... }': fn next<{unknown}>(&'? mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
- 16..66 'for _ ... }': Option<{unknown}>
+ 16..66 'for _ ... }': Option<<{unknown} as Iterator>::Item>
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
@@ -1779,7 +1791,7 @@
r#"
pub type Ty = Unknown;
-pub struct Inner<T>();
+pub struct Inner<T>(T);
pub struct Outer {
pub inner: Inner<Ty>,
@@ -1787,7 +1799,7 @@
fn main() {
_ = Outer {
- inner: Inner::<i32>(),
+ inner: Inner::<i32>(0),
};
}
"#,
@@ -1939,7 +1951,7 @@
Alias::Braced;
//^^^^^^^^^^^^^ {unknown}
let Alias::Braced = loop {};
- //^^^^^^^^^^^^^ !
+ //^^^^^^^^^^^^^ {unknown}
let Alias::Braced(..) = loop {};
//^^^^^^^^^^^^^^^^^ Enum
@@ -2017,12 +2029,12 @@
fn lifetime_params_move_param_defaults() {
check_types(
r#"
-pub struct Thing<'s, T = u32>;
+pub struct Thing<'s, T = u32>(&'s T);
impl <'s> Thing<'s> {
pub fn new() -> Thing<'s> {
- Thing
- //^^^^^ Thing<'?, u32>
+ Thing(&0)
+ //^^^^^^^^^ Thing<'?, u32>
}
}
@@ -2042,11 +2054,11 @@
r#"
fn test() {
let x = S::foo::<'static, &()>(&S);
- // ^ Wrap<'static, ()>
+ // ^ Wrap<'?, ()>
let x = S::foo::<&()>(&S);
// ^ Wrap<'?, ()>
let x = S.foo::<'static, &()>();
- // ^ Wrap<'static, ()>
+ // ^ Wrap<'?, ()>
let x = S.foo::<&()>();
// ^ Wrap<'?, ()>
}
@@ -2059,7 +2071,7 @@
}
}
-struct Wrap<'a, T>(T);
+struct Wrap<'a, T>(&'a T);
trait Trait<'a> {
type Proj;
}
diff --git a/crates/hir-ty/src/tests/regression/new_solver.rs b/crates/hir-ty/src/tests/regression/new_solver.rs
index 82d670c..ead79a8 100644
--- a/crates/hir-ty/src/tests/regression/new_solver.rs
+++ b/crates/hir-ty/src/tests/regression/new_solver.rs
@@ -307,3 +307,114 @@
"#]],
)
}
+
+#[test]
+fn fn_coercion() {
+ check_no_mismatches(
+ r#"
+fn foo() {
+ let _is_suffix_start: fn(&(usize, char)) -> bool = match true {
+ true => |(_, c)| *c == ' ',
+ _ => |(_, c)| *c == 'v',
+ };
+}
+ "#,
+ );
+}
+
+#[test]
+fn coercion_with_errors() {
+ check_no_mismatches(
+ r#"
+//- minicore: unsize, coerce_unsized
+fn foo(_v: i32) -> [u8; _] { loop {} }
+fn bar(_v: &[u8]) {}
+
+fn main() {
+ bar(&foo());
+}
+ "#,
+ );
+}
+
+#[test]
+fn another_20654_case() {
+ check_no_mismatches(
+ r#"
+//- minicore: sized, unsize, coerce_unsized, dispatch_from_dyn, fn
+struct Region<'db>(&'db ());
+
+trait TypeFoldable<I: Interner> {}
+
+trait Interner {
+ type Region;
+ type GenericArg;
+}
+
+struct DbInterner<'db>(&'db ());
+impl<'db> Interner for DbInterner<'db> {
+ type Region = Region<'db>;
+ type GenericArg = GenericArg<'db>;
+}
+
+trait GenericArgExt<I: Interner<GenericArg = Self>> {
+ fn expect_region(&self) -> I::Region {
+ loop {}
+ }
+}
+impl<'db> GenericArgExt<DbInterner<'db>> for GenericArg<'db> {}
+
+enum GenericArg<'db> {
+ Region(Region<'db>),
+}
+
+fn foo<'db, T: TypeFoldable<DbInterner<'db>>>(arg: GenericArg<'db>) {
+ let regions = &mut || arg.expect_region();
+ let f: &'_ mut (dyn FnMut() -> Region<'db> + '_) = regions;
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_solving_with_error() {
+ check_infer(
+ r#"
+//- minicore: size_of
+struct Vec<T>(T);
+
+trait Foo {
+ type Item;
+ fn to_vec(self) -> Vec<Self::Item> {
+ loop {}
+ }
+}
+
+impl<'a, T, const N: usize> Foo for &'a [T; N] {
+ type Item = T;
+}
+
+fn to_bytes() -> [u8; _] {
+ loop {}
+}
+
+fn foo() {
+ let _x = to_bytes().to_vec();
+}
+ "#,
+ expect![[r#"
+ 60..64 'self': Self
+ 85..108 '{ ... }': Vec<<Self as Foo>::Item>
+ 95..102 'loop {}': !
+ 100..102 '{}': ()
+ 208..223 '{ loop {} }': [u8; _]
+ 214..221 'loop {}': !
+ 219..221 '{}': ()
+ 234..271 '{ ...c(); }': ()
+ 244..246 '_x': {unknown}
+ 249..257 'to_bytes': fn to_bytes() -> [u8; _]
+ 249..259 'to_bytes()': [u8; _]
+ 249..268 'to_byt..._vec()': Vec<<[u8; _] as Foo>::Item>
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index 60ad0f4..9d02a44 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -439,11 +439,11 @@
256..260 'true': bool
274..370 'r#" ... "#': &'static str
384..394 'br#"yolo"#': &'static [u8; 4]
- 412..413 'a': &'static [u8; 4]
+ 412..413 'a': &'? [u8; 4]
416..440 'b"a\x2... c"': &'static [u8; 4]
- 458..459 'b': &'static [u8; 4]
+ 458..459 'b': &'? [u8; 4]
462..470 'br"g\ h"': &'static [u8; 4]
- 488..489 'c': &'static [u8; 6]
+ 488..489 'c': &'? [u8; 6]
492..504 'br#"x"\"yb"#': &'static [u8; 6]
"##]],
);
@@ -1124,13 +1124,13 @@
116..122 '(c, x)': ((isize, &'? str), &'? str)
117..118 'c': (isize, &'? str)
120..121 'x': &'? str
- 132..133 'e': (i32, &'static str)
- 136..144 '(1, "e")': (i32, &'static str)
+ 132..133 'e': (i32, &'? str)
+ 136..144 '(1, "e")': (i32, &'? str)
137..138 '1': i32
140..143 '"e"': &'static str
- 154..155 'f': ((i32, &'static str), &'static str)
- 158..166 '(e, "d")': ((i32, &'static str), &'static str)
- 159..160 'e': (i32, &'static str)
+ 154..155 'f': ((i32, &'? str), &'? str)
+ 158..166 '(e, "d")': ((i32, &'? str), &'? str)
+ 159..160 'e': (i32, &'? str)
162..165 '"d"': &'static str
"#]],
);
@@ -1201,8 +1201,8 @@
209..215 '[1, 2]': [i32; 2]
210..211 '1': i32
213..214 '2': i32
- 225..226 'i': [&'static str; 2]
- 229..239 '["a", "b"]': [&'static str; 2]
+ 225..226 'i': [&'? str; 2]
+ 229..239 '["a", "b"]': [&'? str; 2]
230..233 '"a"': &'static str
235..238 '"b"': &'static str
250..251 'b': [[&'? str; 1]; 2]
@@ -1283,11 +1283,11 @@
92..93 'A': fn A<u128>(u128) -> A<u128>
92..101 'A(42u128)': A<u128>
94..100 '42u128': u128
- 107..111 'Some': fn Some<&'static str>(&'static str) -> Option<&'static str>
- 107..116 'Some("x")': Option<&'static str>
+ 107..111 'Some': fn Some<&'? str>(&'? str) -> Option<&'? str>
+ 107..116 'Some("x")': Option<&'? str>
112..115 '"x"': &'static str
- 122..134 'Option::Some': fn Some<&'static str>(&'static str) -> Option<&'static str>
- 122..139 'Option...e("x")': Option<&'static str>
+ 122..134 'Option::Some': fn Some<&'? str>(&'? str) -> Option<&'? str>
+ 122..139 'Option...e("x")': Option<&'? str>
135..138 '"x"': &'static str
145..149 'None': Option<{unknown}>
159..160 'x': Option<i64>
@@ -1946,9 +1946,9 @@
"#,
expect![[r#"
16..46 '{ ..." }; }': u32
- 26..27 'x': impl Fn() -> &'static str
- 30..43 '|| { "test" }': impl Fn() -> &'static str
- 33..43 '{ "test" }': &'static str
+ 26..27 'x': impl Fn() -> &'? str
+ 30..43 '|| { "test" }': impl Fn() -> &'? str
+ 33..43 '{ "test" }': &'? str
35..41 '"test"': &'static str
"#]],
);
@@ -1983,10 +1983,10 @@
70..71 'v': i64
78..80 '{}': ()
91..362 '{ ... } }': ()
- 101..106 'mut g': |usize| yields i64 -> &'static str
- 109..218 '|r| { ... }': |usize| yields i64 -> &'static str
+ 101..106 'mut g': |usize| yields i64 -> &'? str
+ 109..218 '|r| { ... }': |usize| yields i64 -> &'? str
110..111 'r': usize
- 113..218 '{ ... }': &'static str
+ 113..218 '{ ... }': &'? str
127..128 'a': usize
131..138 'yield 0': usize
137..138 '0': i64
@@ -1998,20 +1998,20 @@
187..188 '2': i64
198..212 '"return value"': &'static str
225..360 'match ... }': ()
- 231..239 'Pin::new': fn new<&'? mut |usize| yields i64 -> &'static str>(&'? mut |usize| yields i64 -> &'static str) -> Pin<&'? mut |usize| yields i64 -> &'static str>
- 231..247 'Pin::n...mut g)': Pin<&'? mut |usize| yields i64 -> &'static str>
- 231..262 'Pin::n...usize)': CoroutineState<i64, &'static str>
- 240..246 '&mut g': &'? mut |usize| yields i64 -> &'static str
- 245..246 'g': |usize| yields i64 -> &'static str
+ 231..239 'Pin::new': fn new<&'? mut |usize| yields i64 -> &'? str>(&'? mut |usize| yields i64 -> &'? str) -> Pin<&'? mut |usize| yields i64 -> &'? str>
+ 231..247 'Pin::n...mut g)': Pin<&'? mut |usize| yields i64 -> &'? str>
+ 231..262 'Pin::n...usize)': CoroutineState<i64, &'? str>
+ 240..246 '&mut g': &'? mut |usize| yields i64 -> &'? str
+ 245..246 'g': |usize| yields i64 -> &'? str
255..261 '0usize': usize
- 273..299 'Corout...ded(y)': CoroutineState<i64, &'static str>
+ 273..299 'Corout...ded(y)': CoroutineState<i64, &'? str>
297..298 'y': i64
303..312 '{ f(y); }': ()
305..306 'f': fn f(i64)
305..309 'f(y)': ()
307..308 'y': i64
- 321..348 'Corout...ete(r)': CoroutineState<i64, &'static str>
- 346..347 'r': &'static str
+ 321..348 'Corout...ete(r)': CoroutineState<i64, &'? str>
+ 346..347 'r': &'? str
352..354 '{}': ()
"#]],
);
@@ -2707,11 +2707,11 @@
#[lang = "owned_box"]
#[fundamental]
-pub struct Box<T: ?Sized, A: Allocator = Global>;
+pub struct Box<T: ?Sized, A: Allocator = Global>(T, A);
impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
-pub struct Vec<T, A: Allocator = Global> {}
+pub struct Vec<T, A: Allocator = Global>(T, A);
#[lang = "slice"]
impl<T> [T] {}
@@ -2734,22 +2734,22 @@
impl B for Astruct {}
"#,
expect![[r#"
- 604..608 'self': Box<[T], A>
- 637..669 '{ ... }': Vec<T, A>
- 683..853 '{ ...])); }': ()
- 693..696 'vec': Vec<i32, Global>
- 699..714 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
- 699..745 '<[_]>:...i32]))': Vec<i32, Global>
- 715..744 '#[rust...1i32])': Box<[i32; 1], Global>
- 737..743 '[1i32]': [i32; 1]
- 738..742 '1i32': i32
- 755..756 'v': Vec<Box<dyn B + '?, Global>, Global>
- 776..793 '<[_]> ...to_vec': fn into_vec<Box<dyn B + '?, Global>, Global>(Box<[Box<dyn B + '?, Global>], Global>) -> Vec<Box<dyn B + '?, Global>, Global>
- 776..850 '<[_]> ...ct)]))': Vec<Box<dyn B + '?, Global>, Global>
- 794..849 '#[rust...uct)])': Box<[Box<dyn B + '?, Global>; 1], Global>
- 816..848 '[#[rus...ruct)]': [Box<dyn B + '?, Global>; 1]
- 817..847 '#[rust...truct)': Box<Astruct, Global>
- 839..846 'Astruct': Astruct
+ 614..618 'self': Box<[T], A>
+ 647..679 '{ ... }': Vec<T, A>
+ 693..863 '{ ...])); }': ()
+ 703..706 'vec': Vec<i32, Global>
+ 709..724 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
+ 709..755 '<[_]>:...i32]))': Vec<i32, Global>
+ 725..754 '#[rust...1i32])': Box<[i32; 1], Global>
+ 747..753 '[1i32]': [i32; 1]
+ 748..752 '1i32': i32
+ 765..766 'v': Vec<Box<dyn B + '?, Global>, Global>
+ 786..803 '<[_]> ...to_vec': fn into_vec<Box<dyn B + '?, Global>, Global>(Box<[Box<dyn B + '?, Global>], Global>) -> Vec<Box<dyn B + '?, Global>, Global>
+ 786..860 '<[_]> ...ct)]))': Vec<Box<dyn B + '?, Global>, Global>
+ 804..859 '#[rust...uct)])': Box<[Box<dyn B + '?, Global>; 1], Global>
+ 826..858 '[#[rus...ruct)]': [Box<dyn B + '?, Global>; 1]
+ 827..857 '#[rust...truct)': Box<Astruct, Global>
+ 849..856 'Astruct': Astruct
"#]],
)
}
@@ -3889,9 +3889,9 @@
74..75 'f': F
80..82 '{}': ()
94..191 '{ ... }); }': ()
- 100..113 'async_closure': fn async_closure<impl AsyncFnOnce(i32) -> impl Future<Output = ()>>(impl AsyncFnOnce(i32) -> impl Future<Output = ()>)
+ 100..113 'async_closure': fn async_closure<impl FnOnce(i32)>(impl FnOnce(i32))
100..147 'async_... })': ()
- 114..146 'async ... }': impl AsyncFnOnce(i32) -> impl Future<Output = ()>
+ 114..146 'async ... }': impl FnOnce(i32)
121..124 'arg': i32
126..146 '{ ... }': ()
136..139 'arg': i32
@@ -3924,7 +3924,7 @@
expect![[r#"
110..127 '{ ...z(); }': ()
116..122 'T::baz': fn baz<T>() -> <{unknown} as Foo>::Gat<'?>
- 116..124 'T::baz()': {unknown}
+ 116..124 'T::baz()': <{unknown} as Foo>::Gat<'?>
"#]],
);
}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index 22332fd..41f8d4e 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -85,6 +85,7 @@
}
#[test]
+#[ignore = "FIXME(next-solver): fix async closures"]
fn infer_async_closure() {
check_types(
r#"
@@ -164,16 +165,16 @@
#[lang = "owned_box"]
#[fundamental]
-pub struct Box<T: ?Sized, A: Allocator = Global>(T);
+pub struct Box<T: ?Sized, A: Allocator = Global>(T, A);
impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
fn send() -> Box<dyn Future<Output = ()> + Send + 'static>{
- Box(async move {})
+ Box(async move {}, Global)
}
fn not_send() -> Box<dyn Future<Output = ()> + 'static> {
- Box(async move {})
+ Box(async move {}, Global)
}
"#,
);
@@ -248,15 +249,15 @@
v.push("foo");
for x in v {
x;
- } //^ &'static str
+ } //^ &'? str
}
//- /alloc.rs crate:alloc
#![no_std]
pub mod collections {
- pub struct Vec<T> {}
+ pub struct Vec<T> { p: *const T }
impl<T> Vec<T> {
- pub fn new() -> Self { Vec {} }
+ pub fn new() -> Self { Vec { p: 0 as _ } }
pub fn push(&mut self, t: T) { }
}
@@ -722,8 +723,8 @@
check_types(
r#"
//- minicore: deref
-struct Arc<T: ?Sized>;
-fn new_arc<T: ?Sized>() -> Arc<T> { Arc }
+struct Arc<T: ?Sized>(T);
+fn new_arc<T: ?Sized>() -> Arc<T> { loop {} }
impl<T: ?Sized> core::ops::Deref for Arc<T> {
type Target = T;
}
@@ -785,13 +786,15 @@
fn deref_trait_with_implicit_sized_requirement_on_inference_var() {
check_types(
r#"
-//- minicore: deref
-struct Foo<T>;
+//- minicore: deref, phantom_data
+use core::marker::PhantomData;
+
+struct Foo<T>(PhantomData<T>);
impl<T> core::ops::Deref for Foo<T> {
type Target = ();
}
fn test() {
- let foo = Foo;
+ let foo = Foo(PhantomData);
*foo;
//^^^^ ()
let _: Foo<u8> = foo;
@@ -1456,7 +1459,7 @@
fn foo2(&self) -> i64;
}
-struct Box<T: ?Sized> {}
+struct Box<T: ?Sized>(*const T);
impl<T: ?Sized> core::ops::Deref for Box<T> {
type Target = T;
}
@@ -1477,27 +1480,27 @@
expect![[r#"
29..33 'self': &'? Self
54..58 'self': &'? Self
- 198..200 '{}': Box<dyn Trait<u64> + '?>
- 210..211 'x': Box<dyn Trait<u64> + '?>
- 234..235 'y': &'? (dyn Trait<u64> + '?)
- 254..371 '{ ...2(); }': ()
- 260..261 'x': Box<dyn Trait<u64> + '?>
- 267..268 'y': &'? (dyn Trait<u64> + '?)
- 278..279 'z': Box<dyn Trait<u64> + '?>
- 282..285 'bar': fn bar() -> Box<dyn Trait<u64> + '?>
- 282..287 'bar()': Box<dyn Trait<u64> + '?>
- 293..294 'x': Box<dyn Trait<u64> + '?>
- 293..300 'x.foo()': u64
- 306..307 'y': &'? (dyn Trait<u64> + '?)
- 306..313 'y.foo()': u64
- 319..320 'z': Box<dyn Trait<u64> + '?>
- 319..326 'z.foo()': u64
- 332..333 'x': Box<dyn Trait<u64> + '?>
- 332..340 'x.foo2()': i64
- 346..347 'y': &'? (dyn Trait<u64> + '?)
- 346..354 'y.foo2()': i64
- 360..361 'z': Box<dyn Trait<u64> + '?>
- 360..368 'z.foo2()': i64
+ 206..208 '{}': Box<dyn Trait<u64> + '?>
+ 218..219 'x': Box<dyn Trait<u64> + '?>
+ 242..243 'y': &'? (dyn Trait<u64> + '?)
+ 262..379 '{ ...2(); }': ()
+ 268..269 'x': Box<dyn Trait<u64> + '?>
+ 275..276 'y': &'? (dyn Trait<u64> + '?)
+ 286..287 'z': Box<dyn Trait<u64> + '?>
+ 290..293 'bar': fn bar() -> Box<dyn Trait<u64> + '?>
+ 290..295 'bar()': Box<dyn Trait<u64> + '?>
+ 301..302 'x': Box<dyn Trait<u64> + '?>
+ 301..308 'x.foo()': u64
+ 314..315 'y': &'? (dyn Trait<u64> + '?)
+ 314..321 'y.foo()': u64
+ 327..328 'z': Box<dyn Trait<u64> + '?>
+ 327..334 'z.foo()': u64
+ 340..341 'x': Box<dyn Trait<u64> + '?>
+ 340..348 'x.foo2()': i64
+ 354..355 'y': &'? (dyn Trait<u64> + '?)
+ 354..362 'y.foo2()': i64
+ 368..369 'z': Box<dyn Trait<u64> + '?>
+ 368..376 'z.foo2()': i64
"#]],
);
}
@@ -1674,7 +1677,9 @@
fn assoc_type_bindings() {
check_infer(
r#"
-//- minicore: sized
+//- minicore: sized, phantom_data
+use core::marker::PhantomData;
+
trait Trait {
type Type;
}
@@ -1683,7 +1688,7 @@
fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
fn set<T: Trait<Type = u64>>(t: T) -> T {t}
-struct S<T>;
+struct S<T>(PhantomData<T>);
impl<T> Trait for S<T> { type Type = T; }
fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
@@ -1691,46 +1696,52 @@
get2(x);
get(y);
get2(y);
- get(set(S));
- get2(set(S));
- get2(S::<usize>);
+ get(set(S(PhantomData)));
+ get2(set(S(PhantomData)));
+ get2(S::<usize>(PhantomData));
}"#,
expect![[r#"
- 49..50 't': T
- 77..79 '{}': <T as Trait>::Type
- 111..112 't': T
- 122..124 '{}': U
- 154..155 't': T
- 165..168 '{t}': T
- 166..167 't': T
- 256..257 'x': T
- 262..263 'y': impl Trait<Type = i64>
- 289..399 '{ ...e>); }': ()
- 295..298 'get': fn get<T>(T) -> <T as Trait>::Type
- 295..301 'get(x)': u32
- 299..300 'x': T
- 307..311 'get2': fn get2<u32, T>(T) -> u32
- 307..314 'get2(x)': u32
- 312..313 'x': T
- 320..323 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
- 320..326 'get(y)': i64
- 324..325 'y': impl Trait<Type = i64>
- 332..336 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
- 332..339 'get2(y)': i64
- 337..338 'y': impl Trait<Type = i64>
- 345..348 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
- 345..356 'get(set(S))': u64
- 349..352 'set': fn set<S<u64>>(S<u64>) -> S<u64>
- 349..355 'set(S)': S<u64>
- 353..354 'S': S<u64>
- 362..366 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
- 362..374 'get2(set(S))': u64
- 367..370 'set': fn set<S<u64>>(S<u64>) -> S<u64>
- 367..373 'set(S)': S<u64>
- 371..372 'S': S<u64>
- 380..384 'get2': fn get2<usize, S<usize>>(S<usize>) -> usize
- 380..396 'get2(S...size>)': usize
- 385..395 'S::<usize>': S<usize>
+ 81..82 't': T
+ 109..111 '{}': <T as Trait>::Type
+ 143..144 't': T
+ 154..156 '{}': U
+ 186..187 't': T
+ 197..200 '{t}': T
+ 198..199 't': T
+ 304..305 'x': T
+ 310..311 'y': impl Trait<Type = i64>
+ 337..486 '{ ...a)); }': ()
+ 343..346 'get': fn get<T>(T) -> <T as Trait>::Type
+ 343..349 'get(x)': u32
+ 347..348 'x': T
+ 355..359 'get2': fn get2<u32, T>(T) -> u32
+ 355..362 'get2(x)': u32
+ 360..361 'x': T
+ 368..371 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
+ 368..374 'get(y)': i64
+ 372..373 'y': impl Trait<Type = i64>
+ 380..384 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
+ 380..387 'get2(y)': i64
+ 385..386 'y': impl Trait<Type = i64>
+ 393..396 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
+ 393..417 'get(se...ata)))': u64
+ 397..400 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 397..416 'set(S(...Data))': S<u64>
+ 401..402 'S': fn S<u64>(PhantomData<u64>) -> S<u64>
+ 401..415 'S(PhantomData)': S<u64>
+ 403..414 'PhantomData': PhantomData<u64>
+ 423..427 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 423..448 'get2(s...ata)))': u64
+ 428..431 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 428..447 'set(S(...Data))': S<u64>
+ 432..433 'S': fn S<u64>(PhantomData<u64>) -> S<u64>
+ 432..446 'S(PhantomData)': S<u64>
+ 434..445 'PhantomData': PhantomData<u64>
+ 454..458 'get2': fn get2<usize, S<usize>>(S<usize>) -> usize
+ 454..483 'get2(S...Data))': usize
+ 459..469 'S::<usize>': fn S<usize>(PhantomData<usize>) -> S<usize>
+ 459..482 'S::<us...mData)': S<usize>
+ 470..481 'PhantomData': PhantomData<usize>
"#]],
);
}
@@ -1747,7 +1758,7 @@
impl Language for RustLanguage {
type Kind = SyntaxKind;
}
-struct SyntaxNode<L> {}
+struct SyntaxNode<L>(L);
fn foo() -> impl Iterator<Item = SyntaxNode<RustLanguage>> {}
trait Clone {
@@ -1886,31 +1897,36 @@
fn super_trait_assoc_type_bounds() {
check_infer(
r#"
+//- minicore: phantom_data
+use core::marker::PhantomData;
+
trait SuperTrait { type Type; }
trait Trait where Self: SuperTrait {}
fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
fn set<T: Trait<Type = u64>>(t: T) -> T {t}
-struct S<T>;
+struct S<T>(PhantomData<T>);
impl<T> SuperTrait for S<T> { type Type = T; }
impl<T> Trait for S<T> {}
fn test() {
- get2(set(S));
+ get2(set(S(PhantomData)));
}"#,
expect![[r#"
- 102..103 't': T
- 113..115 '{}': U
- 145..146 't': T
- 156..159 '{t}': T
- 157..158 't': T
- 258..279 '{ ...S)); }': ()
- 264..268 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
- 264..276 'get2(set(S))': u64
- 269..272 'set': fn set<S<u64>>(S<u64>) -> S<u64>
- 269..275 'set(S)': S<u64>
- 273..274 'S': S<u64>
+ 134..135 't': T
+ 145..147 '{}': U
+ 177..178 't': T
+ 188..191 '{t}': T
+ 189..190 't': T
+ 306..340 '{ ...))); }': ()
+ 312..316 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 312..337 'get2(s...ata)))': u64
+ 317..320 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 317..336 'set(S(...Data))': S<u64>
+ 321..322 'S': fn S<u64>(PhantomData<u64>) -> S<u64>
+ 321..335 'S(PhantomData)': S<u64>
+ 323..334 'PhantomData': PhantomData<u64>
"#]],
);
}
@@ -2000,7 +2016,7 @@
fn foo(&self) -> usize {}
}
-struct Lazy<T, F = fn() -> T>(F);
+struct Lazy<T, F = fn() -> T>(T, F);
impl<T, F> Lazy<T, F> {
pub fn new(f: F) -> Lazy<T, F> {}
@@ -2014,7 +2030,7 @@
let lazy1: Lazy<Foo, _> = Lazy::new(|| Foo);
let r1 = lazy1.foo();
- fn make_foo_fn() -> Foo {}
+fn make_foo_fn() -> Foo {}
let make_foo_fn_ptr: fn() -> Foo = make_foo_fn;
let lazy2: Lazy<Foo, _> = Lazy::new(make_foo_fn_ptr);
let r2 = lazy2.foo();
@@ -2022,27 +2038,27 @@
expect![[r#"
36..40 'self': &'? Foo
51..53 '{}': usize
- 131..132 'f': F
- 151..153 '{}': Lazy<T, F>
- 251..497 '{ ...o(); }': ()
- 261..266 'lazy1': Lazy<Foo, impl Fn() -> Foo>
- 283..292 'Lazy::new': fn new<Foo, impl Fn() -> Foo>(impl Fn() -> Foo) -> Lazy<Foo, impl Fn() -> Foo>
- 283..300 'Lazy::...| Foo)': Lazy<Foo, impl Fn() -> Foo>
- 293..299 '|| Foo': impl Fn() -> Foo
- 296..299 'Foo': Foo
- 310..312 'r1': usize
- 315..320 'lazy1': Lazy<Foo, impl Fn() -> Foo>
- 315..326 'lazy1.foo()': usize
- 368..383 'make_foo_fn_ptr': fn() -> Foo
- 399..410 'make_foo_fn': fn make_foo_fn() -> Foo
- 420..425 'lazy2': Lazy<Foo, fn() -> Foo>
- 442..451 'Lazy::new': fn new<Foo, fn() -> Foo>(fn() -> Foo) -> Lazy<Foo, fn() -> Foo>
- 442..468 'Lazy::...n_ptr)': Lazy<Foo, fn() -> Foo>
- 452..467 'make_foo_fn_ptr': fn() -> Foo
- 478..480 'r2': usize
- 483..488 'lazy2': Lazy<Foo, fn() -> Foo>
- 483..494 'lazy2.foo()': usize
- 357..359 '{}': Foo
+ 134..135 'f': F
+ 154..156 '{}': Lazy<T, F>
+ 254..496 '{ ...o(); }': ()
+ 264..269 'lazy1': Lazy<Foo, impl Fn() -> Foo>
+ 286..295 'Lazy::new': fn new<Foo, impl Fn() -> Foo>(impl Fn() -> Foo) -> Lazy<Foo, impl Fn() -> Foo>
+ 286..303 'Lazy::...| Foo)': Lazy<Foo, impl Fn() -> Foo>
+ 296..302 '|| Foo': impl Fn() -> Foo
+ 299..302 'Foo': Foo
+ 313..315 'r1': usize
+ 318..323 'lazy1': Lazy<Foo, impl Fn() -> Foo>
+ 318..329 'lazy1.foo()': usize
+ 367..382 'make_foo_fn_ptr': fn() -> Foo
+ 398..409 'make_foo_fn': fn make_foo_fn() -> Foo
+ 419..424 'lazy2': Lazy<Foo, fn() -> Foo>
+ 441..450 'Lazy::new': fn new<Foo, fn() -> Foo>(fn() -> Foo) -> Lazy<Foo, fn() -> Foo>
+ 441..467 'Lazy::...n_ptr)': Lazy<Foo, fn() -> Foo>
+ 451..466 'make_foo_fn_ptr': fn() -> Foo
+ 477..479 'r2': usize
+ 482..487 'lazy2': Lazy<Foo, fn() -> Foo>
+ 482..493 'lazy2.foo()': usize
+ 356..358 '{}': Foo
"#]],
);
}
@@ -2341,7 +2357,7 @@
type Result;
}
-struct Ty<I: Interner> {}
+struct Ty<I: Interner>(I);
impl<I: Interner, TI: Interner> Fold<I, TI> for Ty<I> {
type Result = Ty<TI>;
}
@@ -2383,17 +2399,20 @@
fn trait_impl_self_ty_cycle() {
check_types(
r#"
+//- minicore: phantom_data
+use core::marker::PhantomData;
+
trait Trait {
fn foo(&self);
}
-struct S<T>;
+struct S<T>(T);
impl Trait for S<Self> {}
fn test() {
- S.foo();
-} //^^^^^^^ {unknown}
+ S(PhantomData).foo();
+} //^^^^^^^^^^^^^^^^^^^^ {unknown}
"#,
);
}
@@ -2743,7 +2762,7 @@
check_types(
r#"
//- minicore: deref, unsize, dispatch_from_dyn
-struct Box<T: ?Sized> {}
+struct Box<T: ?Sized>(*const T);
impl<T: ?Sized> core::ops::Deref for Box<T> {
type Target = T;
}
@@ -2802,7 +2821,7 @@
fn into_iter(self) -> Self::IntoIter;
}
-pub struct FilterMap<I, F> { }
+pub struct FilterMap<I, F>(I, F);
impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
where
F: FnMut(I::Item) -> Option<B>,
@@ -2820,7 +2839,7 @@
}
}
-struct Vec<T> {}
+struct Vec<T>(T);
impl<T> Vec<T> {
fn new() -> Self { loop {} }
}
@@ -2830,7 +2849,7 @@
type IntoIter = IntoIter<T>;
}
-pub struct IntoIter<T> { }
+pub struct IntoIter<T>(T);
impl<T> Iterator for IntoIter<T> {
type Item = T;
}
@@ -2852,35 +2871,35 @@
242..249 'loop {}': !
247..249 '{}': ()
360..364 'self': Self
- 689..693 'self': I
- 700..720 '{ ... }': I
- 710..714 'self': I
- 779..790 '{ loop {} }': Vec<T>
- 781..788 'loop {}': !
- 786..788 '{}': ()
- 977..1104 '{ ... }); }': ()
- 983..998 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
- 983..1000 'Vec::<...:new()': Vec<i32>
- 983..1012 'Vec::<...iter()': IntoIter<i32>
- 983..1075 'Vec::<...one })': FilterMap<IntoIter<i32>, impl FnMut(i32) -> Option<u32>>
- 983..1101 'Vec::<... y; })': ()
- 1029..1074 '|x| if...None }': impl FnMut(i32) -> Option<u32>
- 1030..1031 'x': i32
- 1033..1074 'if x >...None }': Option<u32>
- 1036..1037 'x': i32
- 1036..1041 'x > 0': bool
- 1040..1041 '0': i32
- 1042..1060 '{ Some...u32) }': Option<u32>
- 1044..1048 'Some': fn Some<u32>(u32) -> Option<u32>
- 1044..1058 'Some(x as u32)': Option<u32>
- 1049..1050 'x': i32
- 1049..1057 'x as u32': u32
- 1066..1074 '{ None }': Option<u32>
- 1068..1072 'None': Option<u32>
- 1090..1100 '|y| { y; }': impl FnMut(u32)
- 1091..1092 'y': u32
- 1094..1100 '{ y; }': ()
- 1096..1097 'y': u32
+ 692..696 'self': I
+ 703..723 '{ ... }': I
+ 713..717 'self': I
+ 783..794 '{ loop {} }': Vec<T>
+ 785..792 'loop {}': !
+ 790..792 '{}': ()
+ 981..1108 '{ ... }); }': ()
+ 987..1002 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
+ 987..1004 'Vec::<...:new()': Vec<i32>
+ 987..1016 'Vec::<...iter()': IntoIter<i32>
+ 987..1079 'Vec::<...one })': FilterMap<IntoIter<i32>, impl FnMut(i32) -> Option<u32>>
+ 987..1105 'Vec::<... y; })': ()
+ 1033..1078 '|x| if...None }': impl FnMut(i32) -> Option<u32>
+ 1034..1035 'x': i32
+ 1037..1078 'if x >...None }': Option<u32>
+ 1040..1041 'x': i32
+ 1040..1045 'x > 0': bool
+ 1044..1045 '0': i32
+ 1046..1064 '{ Some...u32) }': Option<u32>
+ 1048..1052 'Some': fn Some<u32>(u32) -> Option<u32>
+ 1048..1062 'Some(x as u32)': Option<u32>
+ 1053..1054 'x': i32
+ 1053..1061 'x as u32': u32
+ 1070..1078 '{ None }': Option<u32>
+ 1072..1076 'None': Option<u32>
+ 1094..1104 '|y| { y; }': impl FnMut(u32)
+ 1095..1096 'y': u32
+ 1098..1104 '{ y; }': ()
+ 1100..1101 'y': u32
"#]],
);
}
@@ -3134,7 +3153,6 @@
#[test]
fn dyn_fn_param_informs_call_site_closure_signature() {
- cov_mark::check!(dyn_fn_param_informs_call_site_closure_signature);
check_types(
r#"
//- minicore: fn, coerce_unsized, dispatch_from_dyn
@@ -3617,7 +3635,7 @@
impl Add<u32> for u32 { type Output = u32 }
impl Add<&u32> for u32 { type Output = u32 }
-struct V<T>;
+struct V<T>(T);
impl<T> V<T> {
fn default() -> Self { loop {} }
fn get(&self, _: &T) -> &T { loop {} }
@@ -3646,7 +3664,7 @@
// fallback to integer type variable for `42`.
impl Add<&()> for i32 { type Output = (); }
-struct V<T>;
+struct V<T>(T);
impl<T> V<T> {
fn default() -> Self { loop {} }
fn get(&self) -> &T { loop {} }
@@ -4213,21 +4231,21 @@
}
fn g<'a, T: 'a>(v: impl Trait<Assoc<T> = &'a T>) {
let a = v.get::<T>();
- //^ &'a T
+ //^ &'? T
let a = v.get::<()>();
//^ <impl Trait<Assoc<T> = &'a T> as Trait>::Assoc<()>
}
fn h<'a>(v: impl Trait<Assoc<i32> = &'a i32> + Trait<Assoc<i64> = &'a i64>) {
let a = v.get::<i32>();
- //^ &'a i32
+ //^ &'? i32
let a = v.get::<i64>();
- //^ &'a i64
+ //^ &'? i64
}
fn i<'a>(v: impl Trait<Assoc<i32> = &'a i32, Assoc<i64> = &'a i64>) {
let a = v.get::<i32>();
- //^ &'a i32
+ //^ &'? i32
let a = v.get::<i64>();
- //^ &'a i64
+ //^ &'? i64
}
"#,
);
@@ -4257,8 +4275,8 @@
127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + '?)
164..195 '{ ...f(); }': ()
170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + '?)
- 170..184 'v.get::<i32>()': {unknown}
- 170..192 'v.get:...eref()': &'? {unknown}
+ 170..184 'v.get::<i32>()': <dyn Trait<Assoc<i32> = &'a i32> + '? as Trait>::Assoc<i32>
+ 170..192 'v.get:...eref()': {unknown}
"#]],
);
}
@@ -4485,7 +4503,9 @@
fn derive_macro_bounds() {
check_types(
r#"
- //- minicore: clone, derive
+ //- minicore: clone, derive, phantom_data
+ use core::marker::PhantomData;
+
#[derive(Clone)]
struct Copy;
struct NotCopy;
@@ -4508,7 +4528,7 @@
struct AssocGeneric3<T: Tr>(Generic<T::Assoc>);
#[derive(Clone)]
- struct Vec<T>();
+ struct Vec<T>(PhantomData<T>);
#[derive(Clone)]
struct R1(Vec<R2>);
@@ -4532,9 +4552,9 @@
let x: &AssocGeneric3<Copy> = &AssocGeneric3(Generic(NotCopy));
let x = x.clone();
//^ &'? AssocGeneric3<Copy>
- let x = (&R1(Vec())).clone();
+ let x = (&R1(Vec(PhantomData))).clone();
//^ R1
- let x = (&R2(R1(Vec()))).clone();
+ let x = (&R2(R1(Vec(PhantomData)))).clone();
//^ R2
}
"#,
@@ -4624,8 +4644,10 @@
fn infer_borrow() {
check_types(
r#"
-//- minicore: index
-pub struct SomeMap<K>;
+//- minicore: index, phantom_data
+use core::marker::PhantomData;
+
+pub struct SomeMap<K>(PhantomData<K>);
pub trait Borrow<Borrowed: ?Sized> {
fn borrow(&self) -> &Borrowed;
@@ -4658,7 +4680,7 @@
}
fn foo() {
- let mut map = SomeMap;
+ let mut map = SomeMap(PhantomData);
map["a"] = ();
map;
//^^^ SomeMap<&'static str>
@@ -4908,6 +4930,7 @@
#[test]
fn async_fn_return_type() {
+ // FIXME(next-solver): Async closures are lowered as closures currently. We should fix that.
check_infer(
r#"
//- minicore: async_fn
@@ -4925,9 +4948,9 @@
46..53 'loop {}': !
51..53 '{}': ()
67..97 '{ ...()); }': ()
- 73..76 'foo': fn foo<impl AsyncFn() -> impl Future<Output = ()>, ()>(impl AsyncFn() -> impl Future<Output = ()>)
+ 73..76 'foo': fn foo<impl Fn(), ()>(impl Fn())
73..94 'foo(as...|| ())': ()
- 77..93 'async ... || ()': impl AsyncFn() -> impl Future<Output = ()>
+ 77..93 'async ... || ()': impl Fn()
91..93 '()': ()
"#]],
);
@@ -5020,7 +5043,7 @@
223..227 'iter': Box<dyn Iterator<Item = &'? [u8]> + 'static>
273..280 'loop {}': !
278..280 '{}': ()
- 290..291 '_': Box<dyn Iterator<Item = &'? [u8]> + 'static>
+ 290..291 '_': Box<dyn Iterator<Item = &'? [u8]> + '?>
294..298 'iter': Box<dyn Iterator<Item = &'? [u8]> + 'static>
294..310 'iter.i...iter()': Box<dyn Iterator<Item = &'? [u8]> + 'static>
152..156 'self': &'? mut Box<I>
diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs
index a637724..8095d70 100644
--- a/crates/hir-ty/src/traits.rs
+++ b/crates/hir-ty/src/traits.rs
@@ -4,7 +4,6 @@
use std::hash::Hash;
use chalk_ir::{DebruijnIndex, GoalData, fold::TypeFoldable};
-use chalk_solve::rust_ir;
use base_db::Crate;
use hir_def::{BlockId, TraitId, lang_item::LangItem};
@@ -277,7 +276,7 @@
Ok((_, Certainty::Yes, args)) => NextTraitSolveResult::Certain(
convert_canonical_args_for_result(DbInterner::new_with(db, Some(krate), block), args),
),
- Ok((_, Certainty::Maybe(_), args)) => {
+ Ok((_, Certainty::Maybe { .. }, args)) => {
let subst = convert_canonical_args_for_result(
DbInterner::new_with(db, Some(krate), block),
args,
@@ -316,7 +315,7 @@
Ok((_, Certainty::Yes, args)) => NextTraitSolveResult::Certain(
convert_canonical_args_for_result(infer_ctxt.interner, args),
),
- Ok((_, Certainty::Maybe(_), args)) => {
+ Ok((_, Certainty::Maybe { .. }, args)) => {
let subst = convert_canonical_args_for_result(infer_ctxt.interner, args);
NextTraitSolveResult::Uncertain(chalk_ir::Canonical {
binders: subst.binders,
@@ -405,15 +404,6 @@
}
}
- pub const fn to_chalk_ir(self) -> rust_ir::ClosureKind {
- // Chalk doesn't support async fn traits.
- match self {
- FnTrait::AsyncFnOnce | FnTrait::FnOnce => rust_ir::ClosureKind::FnOnce,
- FnTrait::AsyncFnMut | FnTrait::FnMut => rust_ir::ClosureKind::FnMut,
- FnTrait::AsyncFn | FnTrait::Fn => rust_ir::ClosureKind::Fn,
- }
- }
-
pub fn method_name(self) -> Name {
match self {
FnTrait::FnOnce => Name::new_symbol_root(sym::call_once),
@@ -428,9 +418,4 @@
pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option<TraitId> {
self.lang_item().resolve_trait(db, krate)
}
-
- #[inline]
- pub(crate) fn is_async(self) -> bool {
- matches!(self, FnTrait::AsyncFn | FnTrait::AsyncFnMut | FnTrait::AsyncFnOnce)
- }
}
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index 07679d2..427c4bb 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -20,14 +20,14 @@
use intern::sym;
use rustc_abi::TargetDataLayout;
use rustc_hash::FxHashSet;
-use rustc_type_ir::inherent::{IntoKind, SliceLike};
+use rustc_type_ir::inherent::{GenericArgs, IntoKind, SliceLike};
use smallvec::{SmallVec, smallvec};
use span::Edition;
-use stdx::never;
+use crate::next_solver::mapping::NextSolverToChalk;
use crate::{
- ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TargetFeatures, TraitRef,
- TraitRefExt, Ty, WhereClause,
+ ChalkTraitId, Const, ConstScalar, Interner, Substitution, TargetFeatures, TraitRef,
+ TraitRefExt, Ty,
consteval::unknown_const,
db::HirDatabase,
layout::{Layout, TagEncoding},
@@ -120,52 +120,6 @@
}
}
-pub(super) fn elaborate_clause_supertraits(
- db: &dyn HirDatabase,
- clauses: impl Iterator<Item = WhereClause>,
-) -> ClauseElaborator<'_> {
- let mut elaborator = ClauseElaborator { db, stack: Vec::new(), seen: FxHashSet::default() };
- elaborator.extend_deduped(clauses);
-
- elaborator
-}
-
-pub(super) struct ClauseElaborator<'a> {
- db: &'a dyn HirDatabase,
- stack: Vec<WhereClause>,
- seen: FxHashSet<WhereClause>,
-}
-
-impl ClauseElaborator<'_> {
- fn extend_deduped(&mut self, clauses: impl IntoIterator<Item = WhereClause>) {
- self.stack.extend(clauses.into_iter().filter(|c| self.seen.insert(c.clone())))
- }
-
- fn elaborate_supertrait(&mut self, clause: &WhereClause) {
- if let WhereClause::Implemented(trait_ref) = clause {
- direct_super_trait_refs(self.db, trait_ref, |t| {
- let clause = WhereClause::Implemented(t);
- if self.seen.insert(clause.clone()) {
- self.stack.push(clause);
- }
- });
- }
- }
-}
-
-impl Iterator for ClauseElaborator<'_> {
- type Item = WhereClause;
-
- fn next(&mut self) -> Option<Self::Item> {
- if let Some(next) = self.stack.pop() {
- self.elaborate_supertrait(&next);
- Some(next)
- } else {
- None
- }
- }
-}
-
fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) {
let resolver = LazyCell::new(|| trait_.resolver(db));
let (generic_params, store) = db.generic_params_and_store(trait_.into());
@@ -239,34 +193,25 @@
})
}
-/// It is a bit different from the rustc equivalent. Currently it stores:
-/// - 0..n-1: generics of the parent
-/// - n: the function signature, encoded as a function pointer type
-///
-/// and it doesn't store the closure types and fields.
-///
-/// Codes should not assume this ordering, and should always use methods available
-/// on this struct for retrieving, and `TyBuilder::substs_for_closure` for creating.
pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution);
impl<'a> ClosureSubst<'a> {
- pub(crate) fn parent_subst(&self) -> &'a [GenericArg] {
- match self.0.as_slice(Interner) {
- [x @ .., _] => x,
- _ => {
- never!("Closure missing parameter");
- &[]
- }
- }
+ pub(crate) fn parent_subst(&self, db: &dyn HirDatabase) -> Substitution {
+ let interner = DbInterner::new_with(db, None, None);
+ let subst =
+ <Substitution as ChalkToNextSolver<crate::next_solver::GenericArgs<'_>>>::to_nextsolver(
+ self.0, interner,
+ );
+ subst.split_closure_args().parent_args.to_chalk(interner)
}
- pub(crate) fn sig_ty(&self) -> &'a Ty {
- match self.0.as_slice(Interner) {
- [.., x] => x.assert_ty_ref(Interner),
- _ => {
- unreachable!("Closure missing sig_ty parameter");
- }
- }
+ pub(crate) fn sig_ty(&self, db: &dyn HirDatabase) -> Ty {
+ let interner = DbInterner::new_with(db, None, None);
+ let subst =
+ <Substitution as ChalkToNextSolver<crate::next_solver::GenericArgs<'_>>>::to_nextsolver(
+ self.0, interner,
+ );
+ subst.split_closure_args_untupled().closure_sig_as_fn_ptr_ty.to_chalk(interner)
}
}
@@ -278,8 +223,17 @@
DeprecatedSafe2024,
}
-pub fn target_feature_is_safe_in_target(target: &TargetData) -> bool {
- matches!(target.arch, target::Arch::Wasm32 | target::Arch::Wasm64)
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum TargetFeatureIsSafeInTarget {
+ No,
+ Yes,
+}
+
+pub fn target_feature_is_safe_in_target(target: &TargetData) -> TargetFeatureIsSafeInTarget {
+ match target.arch {
+ target::Arch::Wasm32 | target::Arch::Wasm64 => TargetFeatureIsSafeInTarget::Yes,
+ _ => TargetFeatureIsSafeInTarget::No,
+ }
}
pub fn is_fn_unsafe_to_call(
@@ -287,14 +241,14 @@
func: FunctionId,
caller_target_features: &TargetFeatures,
call_edition: Edition,
- target_feature_is_safe: bool,
+ target_feature_is_safe: TargetFeatureIsSafeInTarget,
) -> Unsafety {
let data = db.function_signature(func);
if data.is_unsafe() {
return Unsafety::Unsafe;
}
- if data.has_target_feature() && !target_feature_is_safe {
+ if data.has_target_feature() && target_feature_is_safe == TargetFeatureIsSafeInTarget::No {
// RFC 2396 <https://rust-lang.github.io/rfcs/2396-target-feature-1.1.html>.
let callee_target_features =
TargetFeatures::from_attrs_no_implications(&db.attrs(func.into()));
diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs
index a1ebff0..8593dba 100644
--- a/crates/hir-ty/src/variance.rs
+++ b/crates/hir-ty/src/variance.rs
@@ -49,7 +49,23 @@
if count == 0 {
return None;
}
- let variances = Context { generics, variances: vec![Variance::Bivariant; count], db }.solve();
+ let mut variances =
+ Context { generics, variances: vec![Variance::Bivariant; count], db }.solve();
+
+ // FIXME(next-solver): This is *not* the correct behavior. I don't know if it has an actual effect,
+ // since bivariance is prohibited in Rust, but rustc definitely does not fallback bivariance.
+ // So why do we do this? Because, with the new solver, the effects of bivariance are catastrophic:
+ // it leads to not relating types properly, and to very, very hard to debug bugs (speaking from experience).
+ // Furthermore, our variance infra is known to not handle cycles properly. Therefore, at least until we fix
+ // cycles, and perhaps forever at least for out tests, not allowing bivariance makes sense.
+ // Why specifically invariance? I don't have a strong reason, mainly that invariance is a stronger relationship
+ // (therefore, less room for mistakes) and that IMO incorrect covariance can be more problematic that incorrect
+ // bivariance, at least while we don't handle lifetimes anyway.
+ for variance in &mut variances {
+ if *variance == Variance::Bivariant {
+ *variance = Variance::Invariant;
+ }
+ }
variances.is_empty().not().then(|| Arc::from_iter(variances))
}
@@ -73,7 +89,8 @@
if count == 0 {
return None;
}
- Some(Arc::from(vec![Variance::Bivariant; count]))
+ // FIXME(next-solver): Returns `Invariance` and not `Bivariance` here, see the comment in the main query.
+ Some(Arc::from(vec![Variance::Invariant; count]))
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
@@ -581,8 +598,8 @@
}
"#,
expect![[r#"
- Hello['a: bivariant]
- Other['a: bivariant]
+ Hello['a: invariant]
+ Other['a: invariant]
"#]],
);
}
@@ -601,7 +618,7 @@
}
"#,
expect![[r#"
- Foo[T: bivariant]
+ Foo[T: invariant]
"#]],
);
}
@@ -683,9 +700,9 @@
get[Self: contravariant, T: covariant]
get[Self: contravariant, T: contravariant]
TestStruct[U: covariant, T: covariant]
- TestEnum[U: bivariant, T: covariant]
- TestContraStruct[U: bivariant, T: covariant]
- TestBox[U: bivariant, T: covariant]
+ TestEnum[U: invariant, T: covariant]
+ TestContraStruct[U: invariant, T: covariant]
+ TestBox[U: invariant, T: covariant]
"#]],
);
}
@@ -805,8 +822,8 @@
trait SomeTrait<'a> { fn foo(&self); } // OK on traits.
"#,
expect![[r#"
- SomeStruct['a: bivariant]
- SomeEnum['a: bivariant]
+ SomeStruct['a: invariant]
+ SomeEnum['a: invariant]
foo[Self: contravariant, 'a: invariant]
"#]],
);
@@ -834,14 +851,14 @@
"#,
expect![[r#"
- SomeStruct[A: bivariant]
- SomeEnum[A: bivariant]
- ListCell[T: bivariant]
- SelfTyAlias[T: bivariant]
- WithBounds[T: bivariant]
- WithWhereBounds[T: bivariant]
- WithOutlivesBounds[T: bivariant]
- DoubleNothing[T: bivariant]
+ SomeStruct[A: invariant]
+ SomeEnum[A: invariant]
+ ListCell[T: invariant]
+ SelfTyAlias[T: invariant]
+ WithBounds[T: invariant]
+ WithWhereBounds[T: invariant]
+ WithOutlivesBounds[T: invariant]
+ DoubleNothing[T: invariant]
"#]],
);
}
@@ -952,7 +969,7 @@
"#,
expect![[r#"
S[T: covariant]
- S2[T: bivariant]
+ S2[T: invariant]
S3[T: covariant]
"#]],
);
@@ -965,7 +982,7 @@
struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
"#,
expect![[r#"
- FixedPoint[T: bivariant, U: bivariant, V: bivariant]
+ FixedPoint[T: invariant, U: invariant, V: invariant]
"#]],
);
}
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index bfcede8..f8dacf0 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -2549,11 +2549,13 @@
let target_feature_is_safe_in_target =
match &caller.krate(db).id.workspace_data(db).target {
Ok(target) => hir_ty::target_feature_is_safe_in_target(target),
- Err(_) => false,
+ Err(_) => hir_ty::TargetFeatureIsSafeInTarget::No,
};
(target_features, target_feature_is_safe_in_target)
})
- .unwrap_or_else(|| (hir_ty::TargetFeatures::default(), false));
+ .unwrap_or_else(|| {
+ (hir_ty::TargetFeatures::default(), hir_ty::TargetFeatureIsSafeInTarget::No)
+ });
matches!(
hir_ty::is_fn_unsafe_to_call(
db,
@@ -4660,7 +4662,7 @@
.iter()
.map(|capture| Type {
env: db.trait_environment_for_body(owner),
- ty: capture.ty(&self.subst),
+ ty: capture.ty(db, &self.subst),
_pd: PhantomCovariantLifetime::new(),
})
.collect()
diff --git a/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
index 5f526ec..3dd435d 100644
--- a/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
+++ b/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
@@ -805,6 +805,7 @@
);
}
+ #[ignore = "FIXME(next-solver): Fix async closures"]
#[test]
fn replaces_async_closure_with_async_fn() {
check_assist(
@@ -1066,7 +1067,7 @@
r#"
fn foo() {
let (mut a, b) = (0.1, "abc");
- fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) {
+ fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) {
*a = 1.2;
let c = *b;
}
@@ -1098,7 +1099,7 @@
r#"
fn foo() {
let (mut a, b) = (0.1, "abc");
- fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) {
+ fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) {
let _: &mut bool = p2;
*a = 1.2;
let c = *b;
@@ -1136,7 +1137,7 @@
r#"
fn foo() {
let (mut a, b) = (0.1, "abc");
- fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) {
+ fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) {
let _: &mut bool = p2;
*a = 1.2;
let c = *b;
diff --git a/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/crates/ide-assists/src/handlers/destructure_struct_binding.rs
index 397327c..63a41ae 100644
--- a/crates/ide-assists/src/handlers/destructure_struct_binding.rs
+++ b/crates/ide-assists/src/handlers/destructure_struct_binding.rs
@@ -7,7 +7,7 @@
search::{FileReference, SearchScope},
};
use itertools::Itertools;
-use syntax::ast::syntax_factory::SyntaxFactory;
+use syntax::ast::{HasName, syntax_factory::SyntaxFactory};
use syntax::syntax_editor::SyntaxEditor;
use syntax::{AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr, ast};
@@ -71,13 +71,14 @@
struct StructEditData {
ident_pat: ast::IdentPat,
+ name: ast::Name,
kind: hir::StructKind,
struct_def_path: hir::ModPath,
visible_fields: Vec<hir::Field>,
usages: Vec<FileReference>,
names_in_scope: FxHashSet<SmolStr>,
has_private_members: bool,
- is_nested: bool,
+ need_record_field_name: bool,
is_ref: bool,
edition: Edition,
}
@@ -114,7 +115,11 @@
}
let is_ref = ty.is_reference();
- let is_nested = ident_pat.syntax().parent().and_then(ast::RecordPatField::cast).is_some();
+ let need_record_field_name = ident_pat
+ .syntax()
+ .parent()
+ .and_then(ast::RecordPatField::cast)
+ .is_some_and(|field| field.colon_token().is_none());
let usages = ctx
.sema
@@ -133,6 +138,7 @@
let names_in_scope = get_names_in_scope(ctx, &ident_pat, &usages).unwrap_or_default();
Some(StructEditData {
+ name: ident_pat.name()?,
ident_pat,
kind,
struct_def_path,
@@ -140,7 +146,7 @@
has_private_members,
visible_fields,
names_in_scope,
- is_nested,
+ need_record_field_name,
is_ref,
edition: module.krate().edition(ctx.db()),
})
@@ -177,6 +183,7 @@
field_names: &[(SmolStr, SmolStr)],
) {
let ident_pat = &data.ident_pat;
+ let name = &data.name;
let struct_path = mod_path_to_ast(&data.struct_def_path, data.edition);
let is_ref = ident_pat.ref_token().is_some();
@@ -194,9 +201,9 @@
hir::StructKind::Record => {
let fields = field_names.iter().map(|(old_name, new_name)| {
// Use shorthand syntax if possible
- if old_name == new_name && !is_mut {
+ if old_name == new_name {
make.record_pat_field_shorthand(
- make.ident_pat(false, false, make.name(old_name)).into(),
+ make.ident_pat(is_ref, is_mut, make.name(old_name)).into(),
)
} else {
make.record_pat_field(
@@ -215,8 +222,8 @@
// If the binding is nested inside a record, we need to wrap the new
// destructured pattern in a non-shorthand record field
- let destructured_pat = if data.is_nested {
- make.record_pat_field(make.name_ref(&ident_pat.to_string()), new_pat).syntax().clone()
+ let destructured_pat = if data.need_record_field_name {
+ make.record_pat_field(make.name_ref(&name.to_string()), new_pat).syntax().clone()
} else {
new_pat.syntax().clone()
};
@@ -579,7 +586,7 @@
struct Foo { bar: i32, baz: i32 }
fn main() {
- let Foo { bar: mut bar, baz: mut baz } = Foo { bar: 1, baz: 2 };
+ let Foo { mut bar, mut baz } = Foo { bar: 1, baz: 2 };
let bar2 = bar;
let baz2 = &baz;
}
@@ -588,6 +595,86 @@
}
#[test]
+ fn mut_record_field() {
+ check_assist(
+ destructure_struct_binding,
+ r#"
+ struct Foo { x: () }
+ struct Bar { foo: Foo }
+ fn f(Bar { mut $0foo }: Bar) {}
+ "#,
+ r#"
+ struct Foo { x: () }
+ struct Bar { foo: Foo }
+ fn f(Bar { foo: Foo { mut x } }: Bar) {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn ref_record_field() {
+ check_assist(
+ destructure_struct_binding,
+ r#"
+ struct Foo { x: () }
+ struct Bar { foo: Foo }
+ fn f(Bar { ref $0foo }: Bar) {
+ let _ = foo.x;
+ }
+ "#,
+ r#"
+ struct Foo { x: () }
+ struct Bar { foo: Foo }
+ fn f(Bar { foo: Foo { ref x } }: Bar) {
+ let _ = *x;
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn ref_mut_record_field() {
+ check_assist(
+ destructure_struct_binding,
+ r#"
+ struct Foo { x: () }
+ struct Bar { foo: Foo }
+ fn f(Bar { ref mut $0foo }: Bar) {
+ let _ = foo.x;
+ }
+ "#,
+ r#"
+ struct Foo { x: () }
+ struct Bar { foo: Foo }
+ fn f(Bar { foo: Foo { ref mut x } }: Bar) {
+ let _ = *x;
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn ref_mut_record_renamed_field() {
+ check_assist(
+ destructure_struct_binding,
+ r#"
+ struct Foo { x: () }
+ struct Bar { foo: Foo }
+ fn f(Bar { foo: ref mut $0foo1 }: Bar) {
+ let _ = foo1.x;
+ }
+ "#,
+ r#"
+ struct Foo { x: () }
+ struct Bar { foo: Foo }
+ fn f(Bar { foo: Foo { ref mut x } }: Bar) {
+ let _ = *x;
+ }
+ "#,
+ )
+ }
+
+ #[test]
fn mut_ref() {
check_assist(
destructure_struct_binding,
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index e79ce56..90a5139 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -5044,7 +5044,7 @@
fun_name(bar);
}
-fn $0fun_name(bar: &'static str) {
+fn $0fun_name(bar: &str) {
m!(bar);
}
"#,
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index 3e422ae..c0637a7 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -180,6 +180,7 @@
// There is no way to choose what assist within a group you want to test against,
// so this is here to allow you choose.
+#[track_caller]
pub(crate) fn check_assist_by_label(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index b9ef68c..8b4f315 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -1501,7 +1501,9 @@
bar.$0
}
"#,
- expect![[r#""#]],
+ expect![[r#"
+ me foo() fn(self: Bar)
+ "#]],
);
}
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index db9a7b2..c420953 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -2137,7 +2137,7 @@
en Enum Enum
fn function() fn()
fn main() fn()
- lc variable &'static str
+ lc variable &str
ma helper!(…) macro_rules! helper
ma m!(…) macro_rules! m
ma makro!(…) macro_rules! makro
@@ -2612,6 +2612,7 @@
md rust_2024 (use core::prelude::rust_2024)
tt Clone
tt Copy
+ tt FromIterator
tt IntoIterator
tt Iterator
ta Result (use core::fmt::Result)
diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs
index c29e525..577c582 100644
--- a/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -152,7 +152,7 @@
fn main() {
let mut x = t();
x = _;
- //^ 💡 error: invalid `_` expression, expected type `&'static str`
+ //^ 💡 error: invalid `_` expression, expected type `&str`
x = "";
}
fn t<T>() -> T { loop {} }
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index dcca85d..bd5d134 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -268,7 +268,7 @@
}
fn main() {
let a = A {a: 0, b: ""};
- A::<i32, &'static str>::foo();
+ A::<i32, &str>::foo();
}
"#,
);
@@ -351,4 +351,26 @@
"#,
);
}
+
+ #[test]
+ fn iter_collect() {
+ check_diagnostics(
+ r#"
+//- minicore: unsize, coerce_unsized, iterator, iterators, sized
+struct Map<K, V>(K, V);
+impl<K, V> FromIterator<(K, V)> for Map<K, V> {
+ fn from_iter<T: IntoIterator<Item = (K, V)>>(_iter: T) -> Self {
+ loop {}
+ }
+}
+
+fn foo() -> Map<i32, &'static [&'static str]> {
+ [
+ (123, &["abc", "def"] as _),
+ (456, &["ghi"] as _),
+ ].into_iter().collect()
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index de10652..1ea11a2 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -4738,7 +4738,7 @@
*value*
```rust
- let value: Const<_>
+ let value: Const<-1>
```
---
@@ -7195,7 +7195,7 @@
"#,
expect![[r#"
```rust
- &'static str
+ &str
```"#]],
);
}
@@ -8459,7 +8459,7 @@
*aaaaa*
```rust
- let aaaaa: &'static str
+ let aaaaa: &str
```
"#]],
);
@@ -8479,7 +8479,7 @@
*aaaaa*
```rust
- let aaaaa: &'static str
+ let aaaaa: &str
```
"#]],
);
@@ -8499,7 +8499,7 @@
*aaaaa*
```rust
- let aaaaa: &'static str
+ let aaaaa: &str
```
"#]],
);
@@ -8524,7 +8524,7 @@
*aaaaa*
```rust
- let aaaaa: &'static str
+ let aaaaa: &str
```
"#]],
);
@@ -10168,7 +10168,7 @@
---
- `U` = `i32`, `T` = `&'static str`
+ `U` = `i32`, `T` = `&str`
"#]],
);
}
@@ -10261,7 +10261,7 @@
---
- `T` = `i8`, `U` = `&'static str`
+ `T` = `i8`, `U` = `&str`
"#]],
);
}
diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs
index 7a4af4f..104740c 100644
--- a/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/crates/ide/src/inlay_hints/bind_pat.rs
@@ -378,9 +378,9 @@
let foo = foo3();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo4();
- // ^^^ &'static dyn Fn(f64, f64) -> u32
+ // ^^^ &dyn Fn(f64, f64) -> u32
let foo = foo5();
- // ^^^ &'static dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
+ // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
let foo = foo6();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo7();
@@ -411,7 +411,7 @@
let foo = foo3();
// ^^^ impl Fn(f64, f64) -> u32
let foo = foo4();
- // ^^^ &'static dyn Fn(f64, f64) -> u32
+ // ^^^ &dyn Fn(f64, f64) -> u32
let foo = foo5();
let foo = foo6();
let foo = foo7();
@@ -526,7 +526,7 @@
//^^^^ i32
let _ = 22;
let test = "test";
- //^^^^ &'static str
+ //^^^^ &str
let test = InnerStruct {};
//^^^^ InnerStruct
@@ -616,12 +616,12 @@
fn main() {
let mut data = Vec::new();
- //^^^^ Vec<&'static str>
+ //^^^^ Vec<&str>
data.push("foo");
for i in data {
- //^ &'static str
+ //^ &str
let z = i;
- //^ &'static str
+ //^ &str
}
}
"#,
@@ -1015,7 +1015,7 @@
"#,
expect![[r#"
fn test<T>(t: T) {
- let f = |a: i32, b: &'static str, c: T| {};
+ let f = |a: i32, b: &str, c: T| {};
let result: () = f(42, "", t);
}
"#]],
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index 983ed36..1db4f8e 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -437,6 +437,7 @@
rustc_safe_intrinsic,
rustc_skip_array_during_method_dispatch,
rustc_skip_during_method_dispatch,
+ rustc_force_inline,
semitransparent,
shl_assign,
shl,
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 1b940c7..8a04bc7 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -1061,7 +1061,7 @@
),
work_done_progress_params: Default::default(),
});
- assert!(res.to_string().contains("&'static str"));
+ assert!(res.to_string().contains("&str"));
let res = server.send_request::<HoverRequest>(HoverParams {
text_document_position_params: TextDocumentPositionParams::new(
@@ -1070,7 +1070,7 @@
),
work_done_progress_params: Default::default(),
});
- assert!(res.to_string().contains("&'static str"));
+ assert!(res.to_string().contains("&str"));
server.request::<GotoTypeDefinition>(
GotoDefinitionParams {
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index 7c3e7fe..696928b 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -169,6 +169,17 @@
// region:phantom_data
#[lang = "phantom_data"]
pub struct PhantomData<T: PointeeSized>;
+
+ // region:clone
+ impl<T: PointeeSized> Clone for PhantomData<T> {
+ fn clone(&self) -> Self { Self }
+ }
+ // endregion:clone
+
+ // region:copy
+ impl<T: PointeeSized> Copy for PhantomData<T> {}
+ // endregion:copy
+
// endregion:phantom_data
// region:discriminant
@@ -1147,7 +1158,7 @@
pub struct Error;
pub type Result = crate::result::Result<(), Error>;
- pub struct Formatter<'a>;
+ pub struct Formatter<'a>(&'a ());
pub struct DebugTuple;
pub struct DebugStruct;
impl Formatter<'_> {
@@ -1620,6 +1631,12 @@
{
loop {}
}
+ fn collect<B: FromIterator<Self::Item>>(self) -> B
+ where
+ Self: Sized,
+ {
+ loop {}
+ }
// endregion:iterators
}
impl<I: Iterator + PointeeSized> Iterator for &mut I {
@@ -1689,10 +1706,13 @@
loop {}
}
}
+ pub trait FromIterator<A>: Sized {
+ fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self;
+ }
}
- pub use self::collect::IntoIterator;
+ pub use self::collect::{IntoIterator, FromIterator};
}
- pub use self::traits::{IntoIterator, Iterator};
+ pub use self::traits::{IntoIterator, FromIterator, Iterator};
}
// endregion:iterator
@@ -1988,7 +2008,7 @@
convert::AsRef, // :as_ref
convert::{From, Into, TryFrom, TryInto}, // :from
default::Default, // :default
- iter::{IntoIterator, Iterator}, // :iterator
+ iter::{IntoIterator, Iterator, FromIterator}, // :iterator
macros::builtin::{derive, derive_const}, // :derive
marker::Copy, // :copy
marker::Send, // :send
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index f91192b..0462835 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -235,6 +235,10 @@
return;
}
+ if is_ported_from_rustc(path, &["crates/hir-ty/src/next_solver"]) {
+ return;
+ }
+
let first_line = match text.lines().next() {
Some(it) => it,
None => return,
@@ -290,6 +294,11 @@
.any(|it| dirs_to_exclude.contains(&it))
}
+fn is_ported_from_rustc(p: &Path, dirs_to_exclude: &[&str]) -> bool {
+ let p = p.strip_prefix(project_root()).unwrap();
+ dirs_to_exclude.iter().any(|exclude| p.starts_with(exclude))
+}
+
#[derive(Default)]
struct TidyMarks {
hits: HashSet<String>,