Rollup merge of #143424 - hkBst:auto-deref, r=jhpratt
clippy fix: rely on autoderef
Changes instances of `&**self` to `self`.
diff --git a/Cargo.lock b/Cargo.lock
index caa8f28..c471234 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -154,6 +154,22 @@
]
[[package]]
+name = "cargo-util-schemas"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7dc1a6f7b5651af85774ae5a34b4e8be397d9cf4bc063b7e6dbd99a841837830"
+dependencies = [
+ "semver",
+ "serde",
+ "serde-untagged",
+ "serde-value",
+ "thiserror 2.0.12",
+ "toml",
+ "unicode-xid",
+ "url",
+]
+
+[[package]]
name = "cargo_metadata"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -161,7 +177,22 @@
dependencies = [
"camino",
"cargo-platform",
- "cargo-util-schemas",
+ "cargo-util-schemas 0.2.0",
+ "semver",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.12",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.21.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5cfca2aaa699835ba88faf58a06342a314a950d2b9686165e038286c30316868"
+dependencies = [
+ "camino",
+ "cargo-platform",
+ "cargo-util-schemas 0.8.2",
"semver",
"serde",
"serde_json",
@@ -570,12 +601,6 @@
]
[[package]]
-name = "heck"
-version = "0.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
-
-[[package]]
name = "hermit-abi"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1021,6 +1046,15 @@
]
[[package]]
+name = "intrusive-collections"
+version = "0.9.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86"
+dependencies = [
+ "memoffset",
+]
+
+[[package]]
name = "itertools"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1187,13 +1221,16 @@
name = "lsp-server"
version = "0.7.8"
dependencies = [
+ "anyhow",
"crossbeam-channel",
"ctrlc",
"log",
"lsp-types",
+ "rustc-hash 2.1.1",
"serde",
"serde_derive",
"serde_json",
+ "toolchain",
]
[[package]]
@@ -1231,7 +1268,7 @@
"expect-test",
"intern",
"parser",
- "ra-ap-rustc_lexer",
+ "ra-ap-rustc_lexer 0.122.0",
"rustc-hash 2.1.1",
"smallvec",
"span",
@@ -1428,6 +1465,16 @@
]
[[package]]
+name = "papaya"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f"
+dependencies = [
+ "equivalent",
+ "seize",
+]
+
+[[package]]
name = "parking_lot"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1457,8 +1504,8 @@
"drop_bomb",
"edition",
"expect-test",
- "ra-ap-rustc_lexer",
- "rustc-literal-escaper 0.0.4",
+ "ra-ap-rustc_lexer 0.122.0",
+ "rustc-literal-escaper",
"stdx",
"tracing",
]
@@ -1567,7 +1614,7 @@
"object",
"paths",
"proc-macro-test",
- "ra-ap-rustc_lexer",
+ "ra-ap-rustc_lexer 0.122.0",
"span",
"syntax-bridge",
"tt",
@@ -1586,7 +1633,7 @@
name = "proc-macro-test"
version = "0.0.0"
dependencies = [
- "cargo_metadata",
+ "cargo_metadata 0.20.0",
]
[[package]]
@@ -1627,7 +1674,7 @@
dependencies = [
"anyhow",
"base-db",
- "cargo_metadata",
+ "cargo_metadata 0.21.0",
"cfg",
"expect-test",
"intern",
@@ -1709,9 +1756,9 @@
[[package]]
name = "ra-ap-rustc_abi"
-version = "0.116.0"
+version = "0.122.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a967e3a9cd3e38b543f503978e0eccee461e3aea3f7b10e944959bff41dbe612"
+checksum = "fb01e1fec578003c85481c1cad4ff8cd8195b07c2dc85ae3f716108507ae15d5"
dependencies = [
"bitflags 2.9.1",
"ra-ap-rustc_hashes",
@@ -1721,18 +1768,18 @@
[[package]]
name = "ra-ap-rustc_hashes"
-version = "0.116.0"
+version = "0.122.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1ea4c755ecbbffa5743c251344f484ebe571ec7bc5b36d80b2a8ae775d1a7a40"
+checksum = "e0ec056e72a472ffef8761ce96ece6c626eb07368c09d0105b6df30d27d07673"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
-version = "0.116.0"
+version = "0.122.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aca7ad7cf911538c619caa2162339fe98637e9e46f11bb0484ef96735df4d64a"
+checksum = "0fcdd1001db0295e59052e9f53aeda588bbe81e362534f4687d41bd44777b5a7"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@@ -1740,9 +1787,9 @@
[[package]]
name = "ra-ap-rustc_index_macros"
-version = "0.116.0"
+version = "0.122.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8767ba551c9355bc3031be072cc4bb0381106e5e7cd275e72b7a8c76051c4070"
+checksum = "728d64dd98e25530b32e3f7c7c1e844e52722b269360daa1cdeba9dff9727a26"
dependencies = [
"proc-macro2",
"quote",
@@ -1751,9 +1798,20 @@
[[package]]
name = "ra-ap-rustc_lexer"
-version = "0.116.0"
+version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6101374afb267e6c27e4e2eb0b1352e9f3504c1a8f716f619cd39244e2ed92ab"
+checksum = "22944e31fb91e9b3e75bcbc91e37d958b8c0825a6160927f2856831d2ce83b36"
+dependencies = [
+ "memchr",
+ "unicode-properties",
+ "unicode-xid",
+]
+
+[[package]]
+name = "ra-ap-rustc_lexer"
+version = "0.122.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "415f0821f512608d825b3215489a6a6a2c18ed9f0045953d514e7ec23d4b90ab"
dependencies = [
"memchr",
"unicode-properties",
@@ -1762,19 +1820,19 @@
[[package]]
name = "ra-ap-rustc_parse_format"
-version = "0.116.0"
+version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ecd88a19f00da4f43e6727d5013444cbc399804b5046dfa2bbcd28ebed3970ce"
+checksum = "81057891bc2063ad9e353f29462fbc47a0f5072560af34428ae9313aaa5e9d97"
dependencies = [
- "ra-ap-rustc_lexer",
- "rustc-literal-escaper 0.0.2",
+ "ra-ap-rustc_lexer 0.121.0",
+ "rustc-literal-escaper",
]
[[package]]
name = "ra-ap-rustc_pattern_analysis"
-version = "0.116.0"
+version = "0.122.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bb332dd32d7850a799862533b1c021e6062558861a4ad57817bf522499fbb892"
+checksum = "4657fcfdfe06e2a02ec8180d4e7c95aecf4811ba50367e363d1a2300b7623284"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.1.1",
@@ -1842,7 +1900,7 @@
dependencies = [
"anyhow",
"base64",
- "cargo_metadata",
+ "cargo_metadata 0.21.0",
"cfg",
"crossbeam-channel",
"dirs",
@@ -1921,12 +1979,6 @@
[[package]]
name = "rustc-literal-escaper"
-version = "0.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04"
-
-[[package]]
-name = "rustc-literal-escaper"
version = "0.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab03008eb631b703dd16978282ae36c73282e7922fe101a4bd072a40ecea7b8b"
@@ -1955,16 +2007,18 @@
[[package]]
name = "salsa"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8fff508e3d6ef42a32607f7538e17171a877a12015e32036f46e99d00c95781"
+checksum = "2e235afdb8e510f38a07138fbe5a0b64691894358a9c0cbd813b1aade110efc9"
dependencies = [
"boxcar",
"crossbeam-queue",
- "dashmap",
+ "crossbeam-utils",
"hashbrown 0.15.4",
"hashlink",
"indexmap",
+ "intrusive-collections",
+ "papaya",
"parking_lot",
"portable-atomic",
"rayon",
@@ -1978,17 +2032,16 @@
[[package]]
name = "salsa-macro-rules"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ea72b3c06f2ce6350fe3a0eeb7aaaf842d1d8352b706973c19c4f02e298a87c"
+checksum = "2edb86a7e9c91f6d30c9ce054312721dbe773a162db27bbfae834d16177b30ce"
[[package]]
name = "salsa-macros"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ce92025bc160b27814a207cb78d680973af17f863c7f4fc56cf3a535e22f378"
+checksum = "d0778d6e209051bc4e75acfe83bcd7848601ec3dbe9c3dbb982829020e9128af"
dependencies = [
- "heck",
"proc-macro2",
"quote",
"syn",
@@ -2026,6 +2079,16 @@
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
+name = "seize"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e4b8d813387d566f627f3ea1b914c068aac94c40ae27ec43f5f33bde65abefe7"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
name = "semver"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2207,7 +2270,7 @@
"rayon",
"rowan",
"rustc-hash 2.1.1",
- "rustc-literal-escaper 0.0.4",
+ "rustc-literal-escaper",
"rustc_apfloat",
"smol_str",
"stdx",
@@ -2529,7 +2592,7 @@
dependencies = [
"arrayvec",
"intern",
- "ra-ap-rustc_lexer",
+ "ra-ap-rustc_lexer 0.122.0",
"stdx",
"text-size",
]
diff --git a/Cargo.toml b/Cargo.toml
index 0a8e6fe..700c116 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -4,7 +4,7 @@
resolver = "2"
[workspace.package]
-rust-version = "1.86"
+rust-version = "1.88"
edition = "2024"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"]
@@ -49,6 +49,8 @@
# ungrammar = { path = "../ungrammar" }
# salsa = { path = "../salsa" }
+# salsa-macros = { path = "../salsa/components/salsa-macros" }
+# salsa-macro-rules = { path = "../salsa/components/salsa-macro-rules" }
[workspace.dependencies]
# local crates
@@ -87,11 +89,11 @@
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
-ra-ap-rustc_lexer = { version = "0.116", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.116", default-features = false }
-ra-ap-rustc_index = { version = "0.116", default-features = false }
-ra-ap-rustc_abi = { version = "0.116", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.116", default-features = false }
+ra-ap-rustc_lexer = { version = "0.122", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.121", default-features = false }
+ra-ap-rustc_index = { version = "0.122", default-features = false }
+ra-ap-rustc_abi = { version = "0.122", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.122", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@@ -104,7 +106,7 @@
anyhow = "1.0.98"
arrayvec = "0.7.6"
bitflags = "2.9.1"
-cargo_metadata = "0.20.0"
+cargo_metadata = "0.21.0"
camino = "1.1.10"
chalk-solve = { version = "0.103.0", default-features = false }
chalk-ir = "0.103.0"
@@ -136,8 +138,12 @@
rowan = "=0.15.15"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
-salsa = { version = "0.22.0", default-features = true, features = ["rayon","salsa_unstable", "macros"] }
-salsa-macros = "0.22.0"
+salsa = { version = "0.23.0", default-features = true, features = [
+ "rayon",
+ "salsa_unstable",
+ "macros",
+] }
+salsa-macros = "0.23.0"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 2a87b15..8c9393b 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -6,6 +6,7 @@
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input.
+use std::error::Error;
use std::hash::BuildHasherDefault;
use std::{fmt, mem, ops};
@@ -22,7 +23,49 @@
use crate::{CrateWorkspaceData, EditionedFileId, FxIndexSet, RootQueryDb};
-pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
+pub type ProcMacroPaths =
+ FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), ProcMacroLoadingError>>;
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ProcMacroLoadingError {
+ Disabled,
+ FailedToBuild,
+ MissingDylibPath,
+ NotYetBuilt,
+ NoProcMacros,
+ ProcMacroSrvError(Box<str>),
+}
+impl ProcMacroLoadingError {
+ pub fn is_hard_error(&self) -> bool {
+ match self {
+ ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
+ ProcMacroLoadingError::FailedToBuild
+ | ProcMacroLoadingError::MissingDylibPath
+ | ProcMacroLoadingError::NoProcMacros
+ | ProcMacroLoadingError::ProcMacroSrvError(_) => true,
+ }
+ }
+}
+
+impl Error for ProcMacroLoadingError {}
+impl fmt::Display for ProcMacroLoadingError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
+ ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
+ ProcMacroLoadingError::MissingDylibPath => {
+ write!(f, "proc-macro crate build data is missing a dylib path")
+ }
+ ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
+ ProcMacroLoadingError::NoProcMacros => {
+ write!(f, "proc macro library has no proc macros")
+ }
+ ProcMacroLoadingError::ProcMacroSrvError(msg) => {
+ write!(f, "proc macro server error: {msg}")
+ }
+ }
+ }
+}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 478fae6..ad17f17 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -14,8 +14,9 @@
input::{
BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
- DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroPaths, ReleaseChannel,
- SourceRoot, SourceRootId, TargetLayoutLoadResult, UniqueCrateData,
+ DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
+ UniqueCrateData,
},
};
use dashmap::{DashMap, mapref::entry::Entry};
@@ -33,7 +34,7 @@
#[macro_export]
macro_rules! impl_intern_key {
($id:ident, $loc:ident) => {
- #[salsa_macros::interned(no_lifetime)]
+ #[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct $id {
pub loc: $loc,
@@ -43,7 +44,7 @@
impl ::std::fmt::Debug for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
f.debug_tuple(stringify!($id))
- .field(&format_args!("{:04x}", self.0.as_u32()))
+ .field(&format_args!("{:04x}", self.0.index()))
.finish()
}
}
@@ -167,7 +168,7 @@
}
}
-#[salsa_macros::interned(no_lifetime, debug, constructor=from_span)]
+#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs
index 0ec082d..aed00aa 100644
--- a/crates/cfg/src/cfg_expr.rs
+++ b/crates/cfg/src/cfg_expr.rs
@@ -68,6 +68,11 @@
next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid)
}
+ #[cfg(feature = "tt")]
+ pub fn parse_from_iter<S: Copy>(tt: &mut tt::iter::TtIter<'_, S>) -> CfgExpr {
+ next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid)
+ }
+
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
match self {
@@ -96,7 +101,14 @@
};
let ret = match it.peek() {
- Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
+ Some(TtElement::Leaf(tt::Leaf::Punct(punct)))
+ // Don't consume on e.g. `=>`.
+ if punct.char == '='
+ && (punct.spacing == tt::Spacing::Alone
+ || it.remaining().flat_tokens().get(1).is_none_or(|peek2| {
+ !matches!(peek2, tt::TokenTree::Leaf(tt::Leaf::Punct(_)))
+ })) =>
+ {
match it.remaining().flat_tokens().get(1) {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => {
it.next();
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index 85bd193..d3dfc05 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -22,6 +22,7 @@
use smallvec::SmallVec;
use span::{Edition, SyntaxContext};
use syntax::{AstPtr, SyntaxNodePtr, ast};
+use thin_vec::ThinVec;
use triomphe::Arc;
use tt::TextRange;
@@ -93,17 +94,17 @@
pub type LifetimePtr = AstPtr<ast::Lifetime>;
pub type LifetimeSource = InFile<LifetimePtr>;
-#[derive(Debug, Eq, PartialEq)]
-pub struct ExpressionStore {
- pub exprs: Arena<Expr>,
- pub pats: Arena<Pat>,
- pub bindings: Arena<Binding>,
- pub labels: Arena<Label>,
- pub types: Arena<TypeRef>,
- pub lifetimes: Arena<LifetimeRef>,
+// We split the store into types-only and expressions, because most stores (e.g. generics)
+// don't store any expressions and this saves memory. Same thing for the source map.
+#[derive(Debug, PartialEq, Eq)]
+struct ExpressionOnlyStore {
+ exprs: Arena<Expr>,
+ pats: Arena<Pat>,
+ bindings: Arena<Binding>,
+ labels: Arena<Label>,
/// Id of the closure/coroutine that owns the corresponding binding. If a binding is owned by the
/// top level expression, it will not be listed in here.
- pub binding_owners: FxHashMap<BindingId, ExprId>,
+ binding_owners: FxHashMap<BindingId, ExprId>,
/// Block expressions in this store that may contain inner items.
block_scopes: Box<[BlockId]>,
@@ -114,8 +115,15 @@
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
}
-#[derive(Debug, Eq, PartialEq, Default)]
-pub struct ExpressionStoreSourceMap {
+#[derive(Debug, PartialEq, Eq)]
+pub struct ExpressionStore {
+ expr_only: Option<Box<ExpressionOnlyStore>>,
+ pub types: Arena<TypeRef>,
+ pub lifetimes: Arena<LifetimeRef>,
+}
+
+#[derive(Debug, Eq, Default)]
+struct ExpressionOnlySourceMap {
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
expr_map: FxHashMap<ExprSource, ExprOrPatId>,
@@ -127,26 +135,83 @@
label_map: FxHashMap<LabelSource, LabelId>,
label_map_back: ArenaMap<LabelId, LabelSource>,
- binding_definitions: FxHashMap<BindingId, SmallVec<[PatId; 4]>>,
+ binding_definitions:
+ ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>,
/// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
/// Instead, we use id of expression (`92`) to identify the field.
field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
+ template_map: Option<Box<FormatTemplate>>,
+
+ expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
+
+ /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
+ /// the source map (since they're just as volatile).
+ //
+ // We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except
+ // maybe for cfgs, and they are also not common in type places).
+ diagnostics: ThinVec<ExpressionStoreDiagnostics>,
+}
+
+impl PartialEq for ExpressionOnlySourceMap {
+ fn eq(&self, other: &Self) -> bool {
+ // we only need to compare one of the two mappings
+ // as the other is a reverse mapping and thus will compare
+ // the same as normal mapping
+ let Self {
+ expr_map: _,
+ expr_map_back,
+ pat_map: _,
+ pat_map_back,
+ label_map: _,
+ label_map_back,
+ // If this changed, our pattern data must have changed
+ binding_definitions: _,
+ // If this changed, our expression data must have changed
+ field_map_back: _,
+ // If this changed, our pattern data must have changed
+ pat_field_map_back: _,
+ template_map,
+ expansions,
+ diagnostics,
+ } = self;
+ *expr_map_back == other.expr_map_back
+ && *pat_map_back == other.pat_map_back
+ && *label_map_back == other.label_map_back
+ && *template_map == other.template_map
+ && *expansions == other.expansions
+ && *diagnostics == other.diagnostics
+ }
+}
+
+#[derive(Debug, Eq, Default)]
+pub struct ExpressionStoreSourceMap {
+ expr_only: Option<Box<ExpressionOnlySourceMap>>,
+
types_map_back: ArenaMap<TypeRefId, TypeSource>,
types_map: FxHashMap<TypeSource, TypeRefId>,
lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>,
+ #[expect(
+ unused,
+ reason = "this is here for completeness, and maybe we'll need it in the future"
+ )]
lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>,
+}
- template_map: Option<Box<FormatTemplate>>,
-
- pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
-
- /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
- /// the source map (since they're just as volatile).
- pub diagnostics: Vec<ExpressionStoreDiagnostics>,
+impl PartialEq for ExpressionStoreSourceMap {
+ fn eq(&self, other: &Self) -> bool {
+ // we only need to compare one of the two mappings
+ // as the other is a reverse mapping and thus will compare
+ // the same as normal mapping
+ let Self { expr_only, types_map_back, types_map: _, lifetime_map_back, lifetime_map: _ } =
+ self;
+ *expr_only == other.expr_only
+ && *types_map_back == other.types_map_back
+ && *lifetime_map_back == other.lifetime_map_back
+ }
}
/// The body of an item (function, const etc.).
@@ -161,6 +226,42 @@
pub types: Arena<TypeRef>,
block_scopes: Vec<BlockId>,
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
+
+ // AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
+ // to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
+ expr_map: FxHashMap<ExprSource, ExprOrPatId>,
+ expr_map_back: ArenaMap<ExprId, ExprOrPatSource>,
+
+ pat_map: FxHashMap<PatSource, ExprOrPatId>,
+ pat_map_back: ArenaMap<PatId, ExprOrPatSource>,
+
+ label_map: FxHashMap<LabelSource, LabelId>,
+ label_map_back: ArenaMap<LabelId, LabelSource>,
+
+ types_map_back: ArenaMap<TypeRefId, TypeSource>,
+ types_map: FxHashMap<TypeSource, TypeRefId>,
+
+ lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>,
+ lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>,
+
+ binding_definitions:
+ ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>,
+
+ /// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
+ /// Instead, we use id of expression (`92`) to identify the field.
+ field_map_back: FxHashMap<ExprId, FieldSource>,
+ pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
+
+ template_map: Option<Box<FormatTemplate>>,
+
+ expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
+
+ /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
+ /// the source map (since they're just as volatile).
+ //
+ // We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except
+ // maybe for cfgs, and they are also not common in type places).
+ pub(crate) diagnostics: Vec<ExpressionStoreDiagnostics>,
}
#[derive(Default, Debug, Eq, PartialEq)]
@@ -188,7 +289,7 @@
}
impl ExpressionStoreBuilder {
- pub fn finish(self) -> ExpressionStore {
+ pub fn finish(self) -> (ExpressionStore, ExpressionStoreSourceMap) {
let Self {
block_scopes,
mut exprs,
@@ -199,6 +300,23 @@
mut ident_hygiene,
mut types,
mut lifetimes,
+
+ mut expr_map,
+ mut expr_map_back,
+ mut pat_map,
+ mut pat_map_back,
+ mut label_map,
+ mut label_map_back,
+ mut types_map_back,
+ mut types_map,
+ mut lifetime_map_back,
+ mut lifetime_map,
+ mut binding_definitions,
+ mut field_map_back,
+ mut pat_field_map_back,
+ mut template_map,
+ mut expansions,
+ diagnostics,
} = self;
exprs.shrink_to_fit();
labels.shrink_to_fit();
@@ -209,24 +327,90 @@
types.shrink_to_fit();
lifetimes.shrink_to_fit();
- ExpressionStore {
- exprs,
- pats,
- bindings,
- labels,
- binding_owners,
- types,
- lifetimes,
- block_scopes: block_scopes.into_boxed_slice(),
- ident_hygiene,
+ expr_map.shrink_to_fit();
+ expr_map_back.shrink_to_fit();
+ pat_map.shrink_to_fit();
+ pat_map_back.shrink_to_fit();
+ label_map.shrink_to_fit();
+ label_map_back.shrink_to_fit();
+ types_map_back.shrink_to_fit();
+ types_map.shrink_to_fit();
+ lifetime_map_back.shrink_to_fit();
+ lifetime_map.shrink_to_fit();
+ binding_definitions.shrink_to_fit();
+ field_map_back.shrink_to_fit();
+ pat_field_map_back.shrink_to_fit();
+ if let Some(template_map) = &mut template_map {
+ let FormatTemplate {
+ format_args_to_captures,
+ asm_to_captures,
+ implicit_capture_to_source,
+ } = &mut **template_map;
+ format_args_to_captures.shrink_to_fit();
+ asm_to_captures.shrink_to_fit();
+ implicit_capture_to_source.shrink_to_fit();
}
+ expansions.shrink_to_fit();
+
+ let has_exprs =
+ !exprs.is_empty() || !labels.is_empty() || !pats.is_empty() || !bindings.is_empty();
+
+ let store = {
+ let expr_only = if has_exprs {
+ Some(Box::new(ExpressionOnlyStore {
+ exprs,
+ pats,
+ bindings,
+ labels,
+ binding_owners,
+ block_scopes: block_scopes.into_boxed_slice(),
+ ident_hygiene,
+ }))
+ } else {
+ None
+ };
+ ExpressionStore { expr_only, types, lifetimes }
+ };
+
+ let source_map = {
+ let expr_only = if has_exprs || !expansions.is_empty() || !diagnostics.is_empty() {
+ Some(Box::new(ExpressionOnlySourceMap {
+ expr_map,
+ expr_map_back,
+ pat_map,
+ pat_map_back,
+ label_map,
+ label_map_back,
+ binding_definitions,
+ field_map_back,
+ pat_field_map_back,
+ template_map,
+ expansions,
+ diagnostics: ThinVec::from_iter(diagnostics),
+ }))
+ } else {
+ None
+ };
+ ExpressionStoreSourceMap {
+ expr_only,
+ types_map_back,
+ types_map,
+ lifetime_map_back,
+ lifetime_map,
+ }
+ };
+
+ (store, source_map)
}
}
impl ExpressionStore {
- pub fn empty_singleton() -> Arc<Self> {
- static EMPTY: LazyLock<Arc<ExpressionStore>> =
- LazyLock::new(|| Arc::new(ExpressionStoreBuilder::default().finish()));
+ pub fn empty_singleton() -> (Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>) {
+ static EMPTY: LazyLock<(Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>)> =
+ LazyLock::new(|| {
+ let (store, source_map) = ExpressionStoreBuilder::default().finish();
+ (Arc::new(store), Arc::new(source_map))
+ });
EMPTY.clone()
}
@@ -235,7 +419,12 @@
&'a self,
db: &'a dyn DefDatabase,
) -> impl Iterator<Item = (BlockId, &'a DefMap)> + 'a {
- self.block_scopes.iter().map(move |&block| (block, block_def_map(db, block)))
+ self.expr_only
+ .as_ref()
+ .map(|it| &*it.block_scopes)
+ .unwrap_or_default()
+ .iter()
+ .map(move |&block| (block, block_def_map(db, block)))
}
pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) {
@@ -282,7 +471,8 @@
}
pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
- match self.binding_owners.get(&binding) {
+ let Some(expr_only) = &self.expr_only else { return false };
+ match expr_only.binding_owners.get(&binding) {
Some(it) => {
// We assign expression ids in a way that outer closures will receive
// a lower id
@@ -292,6 +482,11 @@
}
}
+ #[inline]
+ pub fn binding_owner(&self, id: BindingId) -> Option<ExprId> {
+ self.expr_only.as_ref()?.binding_owners.get(&id).copied()
+ }
+
/// Walks the immediate children expressions and calls `f` for each child expression.
///
/// Note that this does not walk const blocks.
@@ -563,16 +758,22 @@
});
}
+ #[inline]
+ #[track_caller]
+ fn assert_expr_only(&self) -> &ExpressionOnlyStore {
+ self.expr_only.as_ref().expect("should have `ExpressionStore::expr_only`")
+ }
+
fn binding_hygiene(&self, binding: BindingId) -> HygieneId {
- self.bindings[binding].hygiene
+ self.assert_expr_only().bindings[binding].hygiene
}
pub fn expr_path_hygiene(&self, expr: ExprId) -> HygieneId {
- self.ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT)
+ self.assert_expr_only().ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT)
}
pub fn pat_path_hygiene(&self, pat: PatId) -> HygieneId {
- self.ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT)
+ self.assert_expr_only().ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT)
}
pub fn expr_or_pat_path_hygiene(&self, id: ExprOrPatId) -> HygieneId {
@@ -581,43 +782,72 @@
ExprOrPatId::PatId(id) => self.pat_path_hygiene(id),
}
}
+
+ #[inline]
+ pub fn exprs(&self) -> impl Iterator<Item = (ExprId, &Expr)> {
+ match &self.expr_only {
+ Some(it) => it.exprs.iter(),
+ None => const { &Arena::new() }.iter(),
+ }
+ }
+
+ #[inline]
+ pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> {
+ match &self.expr_only {
+ Some(it) => it.pats.iter(),
+ None => const { &Arena::new() }.iter(),
+ }
+ }
+
+ #[inline]
+ pub fn bindings(&self) -> impl Iterator<Item = (BindingId, &Binding)> {
+ match &self.expr_only {
+ Some(it) => it.bindings.iter(),
+ None => const { &Arena::new() }.iter(),
+ }
+ }
}
impl Index<ExprId> for ExpressionStore {
type Output = Expr;
+ #[inline]
fn index(&self, expr: ExprId) -> &Expr {
- &self.exprs[expr]
+ &self.assert_expr_only().exprs[expr]
}
}
impl Index<PatId> for ExpressionStore {
type Output = Pat;
+ #[inline]
fn index(&self, pat: PatId) -> &Pat {
- &self.pats[pat]
+ &self.assert_expr_only().pats[pat]
}
}
impl Index<LabelId> for ExpressionStore {
type Output = Label;
+ #[inline]
fn index(&self, label: LabelId) -> &Label {
- &self.labels[label]
+ &self.assert_expr_only().labels[label]
}
}
impl Index<BindingId> for ExpressionStore {
type Output = Binding;
+ #[inline]
fn index(&self, b: BindingId) -> &Binding {
- &self.bindings[b]
+ &self.assert_expr_only().bindings[b]
}
}
impl Index<TypeRefId> for ExpressionStore {
type Output = TypeRef;
+ #[inline]
fn index(&self, b: TypeRefId) -> &TypeRef {
&self.types[b]
}
@@ -626,6 +856,7 @@
impl Index<LifetimeRefId> for ExpressionStore {
type Output = LifetimeRef;
+ #[inline]
fn index(&self, b: LifetimeRefId) -> &LifetimeRef {
&self.lifetimes[b]
}
@@ -646,12 +877,6 @@
// FIXME: Change `node_` prefix to something more reasonable.
// Perhaps `expr_syntax` and `expr_id`?
impl ExpressionStoreSourceMap {
- pub fn empty_singleton() -> Arc<Self> {
- static EMPTY: LazyLock<Arc<ExpressionStoreSourceMap>> =
- LazyLock::new(|| Arc::new(ExpressionStoreSourceMap::default()));
- EMPTY.clone()
- }
-
pub fn expr_or_pat_syntax(&self, id: ExprOrPatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
match id {
ExprOrPatId::ExprId(id) => self.expr_syntax(id),
@@ -659,30 +884,46 @@
}
}
+ #[inline]
+ fn expr_or_synthetic(&self) -> Result<&ExpressionOnlySourceMap, SyntheticSyntax> {
+ self.expr_only.as_deref().ok_or(SyntheticSyntax)
+ }
+
+ #[inline]
+ fn expr_only(&self) -> Option<&ExpressionOnlySourceMap> {
+ self.expr_only.as_deref()
+ }
+
+ #[inline]
+ #[track_caller]
+ fn assert_expr_only(&self) -> &ExpressionOnlySourceMap {
+ self.expr_only.as_ref().expect("should have `ExpressionStoreSourceMap::expr_only`")
+ }
+
pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprOrPatSource, SyntheticSyntax> {
- self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
+ self.expr_or_synthetic()?.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
}
pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprOrPatId> {
let src = node.map(AstPtr::new);
- self.expr_map.get(&src).cloned()
+ self.expr_only()?.expr_map.get(&src).cloned()
}
pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
let src = node.map(AstPtr::new);
- self.expansions.get(&src).cloned()
+ self.expr_only()?.expansions.get(&src).cloned()
}
pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroCallId)> + '_ {
- self.expansions.iter().map(|(&a, &b)| (a, b))
+ self.expr_only().into_iter().flat_map(|it| it.expansions.iter().map(|(&a, &b)| (a, b)))
}
pub fn pat_syntax(&self, pat: PatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
- self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
+ self.expr_or_synthetic()?.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
}
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<ExprOrPatId> {
- self.pat_map.get(&node.map(AstPtr::new)).cloned()
+ self.expr_only()?.pat_map.get(&node.map(AstPtr::new)).cloned()
}
pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> {
@@ -694,49 +935,50 @@
}
pub fn label_syntax(&self, label: LabelId) -> LabelSource {
- self.label_map_back[label]
+ self.assert_expr_only().label_map_back[label]
}
pub fn patterns_for_binding(&self, binding: BindingId) -> &[PatId] {
- self.binding_definitions.get(&binding).map_or(&[], Deref::deref)
+ self.assert_expr_only().binding_definitions.get(binding).map_or(&[], Deref::deref)
}
pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
let src = node.map(AstPtr::new);
- self.label_map.get(&src).cloned()
+ self.expr_only()?.label_map.get(&src).cloned()
}
pub fn field_syntax(&self, expr: ExprId) -> FieldSource {
- self.field_map_back[&expr]
+ self.assert_expr_only().field_map_back[&expr]
}
pub fn pat_field_syntax(&self, pat: PatId) -> PatFieldSource {
- self.pat_field_map_back[&pat]
+ self.assert_expr_only().pat_field_map_back[&pat]
}
pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprOrPatId> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::MacroExpr>).map(AstPtr::upcast);
- self.expr_map.get(&src).copied()
+ self.expr_only()?.expr_map.get(&src).copied()
}
pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroCallId)> {
- self.expansions.iter()
+ self.expr_only().into_iter().flat_map(|it| it.expansions.iter())
}
pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
- self.expansions.get(&node.map(AstPtr::new)).copied()
+ self.expr_only()?.expansions.get(&node.map(AstPtr::new)).copied()
}
pub fn implicit_format_args(
&self,
node: InFile<&ast::FormatArgsExpr>,
) -> Option<(HygieneId, &[(syntax::TextRange, Name)])> {
+ let expr_only = self.expr_only()?;
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
- let (hygiene, names) = self
+ let (hygiene, names) = expr_only
.template_map
.as_ref()?
.format_args_to_captures
- .get(&self.expr_map.get(&src)?.as_expr()?)?;
+ .get(&expr_only.expr_map.get(&src)?.as_expr()?)?;
Some((*hygiene, &**names))
}
@@ -744,67 +986,28 @@
&self,
capture_expr: ExprId,
) -> Option<InFile<(ExprPtr, TextRange)>> {
- self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied()
+ self.expr_only()?
+ .template_map
+ .as_ref()?
+ .implicit_capture_to_source
+ .get(&capture_expr)
+ .copied()
}
pub fn asm_template_args(
&self,
node: InFile<&ast::AsmExpr>,
) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> {
+ let expr_only = self.expr_only()?;
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
- let expr = self.expr_map.get(&src)?.as_expr()?;
- Some(expr)
- .zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref))
+ let expr = expr_only.expr_map.get(&src)?.as_expr()?;
+ Some(expr).zip(
+ expr_only.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref),
+ )
}
/// Get a reference to the source map's diagnostics.
pub fn diagnostics(&self) -> &[ExpressionStoreDiagnostics] {
- &self.diagnostics
- }
-
- fn shrink_to_fit(&mut self) {
- let Self {
- expr_map,
- expr_map_back,
- pat_map,
- pat_map_back,
- label_map,
- label_map_back,
- field_map_back,
- pat_field_map_back,
- expansions,
- template_map,
- diagnostics,
- binding_definitions,
- types_map,
- types_map_back,
- lifetime_map_back,
- lifetime_map,
- } = self;
- if let Some(template_map) = template_map {
- let FormatTemplate {
- format_args_to_captures,
- asm_to_captures,
- implicit_capture_to_source,
- } = &mut **template_map;
- format_args_to_captures.shrink_to_fit();
- asm_to_captures.shrink_to_fit();
- implicit_capture_to_source.shrink_to_fit();
- }
- expr_map.shrink_to_fit();
- expr_map_back.shrink_to_fit();
- pat_map.shrink_to_fit();
- pat_map_back.shrink_to_fit();
- label_map.shrink_to_fit();
- label_map_back.shrink_to_fit();
- field_map_back.shrink_to_fit();
- pat_field_map_back.shrink_to_fit();
- expansions.shrink_to_fit();
- diagnostics.shrink_to_fit();
- binding_definitions.shrink_to_fit();
- types_map.shrink_to_fit();
- types_map_back.shrink_to_fit();
- lifetime_map.shrink_to_fit();
- lifetime_map_back.shrink_to_fit();
+ self.expr_only().map(|it| &*it.diagnostics).unwrap_or_default()
}
}
diff --git a/crates/hir-def/src/expr_store/body.rs b/crates/hir-def/src/expr_store/body.rs
index fb6d931..c955393 100644
--- a/crates/hir-def/src/expr_store/body.rs
+++ b/crates/hir-def/src/expr_store/body.rs
@@ -36,6 +36,7 @@
impl ops::Deref for Body {
type Target = ExpressionStore;
+ #[inline]
fn deref(&self) -> &Self::Target {
&self.store
}
@@ -61,6 +62,7 @@
impl ops::Deref for BodySourceMap {
type Target = ExpressionStoreSourceMap;
+ #[inline]
fn deref(&self) -> &Self::Target {
&self.store
}
@@ -102,9 +104,7 @@
}
};
let module = def.module(db);
- let (body, mut source_map) =
- lower_body(db, def, file_id, module, params, body, is_async_fn);
- source_map.store.shrink_to_fit();
+ let (body, source_map) = lower_body(db, def, file_id, module, params, body, is_async_fn);
(Arc::new(body), Arc::new(source_map))
}
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index c0e51b3..4e87774 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -121,14 +121,10 @@
params = (0..count).map(|_| collector.missing_pat()).collect();
};
let body_expr = collector.missing_expr();
+ let (store, source_map) = collector.store.finish();
return (
- Body {
- store: collector.store.finish(),
- params: params.into_boxed_slice(),
- self_param,
- body_expr,
- },
- BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
+ Body { store, params: params.into_boxed_slice(), self_param, body_expr },
+ BodySourceMap { self_param: source_map_self_param, store: source_map },
);
}
@@ -171,14 +167,10 @@
},
);
+ let (store, source_map) = collector.store.finish();
(
- Body {
- store: collector.store.finish(),
- params: params.into_boxed_slice(),
- self_param,
- body_expr,
- },
- BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
+ Body { store, params: params.into_boxed_slice(), self_param, body_expr },
+ BodySourceMap { self_param: source_map_self_param, store: source_map },
)
}
@@ -190,7 +182,8 @@
let mut expr_collector = ExprCollector::new(db, module, type_ref.file_id);
let type_ref =
expr_collector.lower_type_ref_opt(type_ref.value, &mut ExprCollector::impl_trait_allocator);
- (expr_collector.store.finish(), expr_collector.source_map, type_ref)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, type_ref)
}
pub(crate) fn lower_generic_params(
@@ -205,7 +198,8 @@
let mut collector = generics::GenericParamsCollector::new(def);
collector.lower(&mut expr_collector, param_list, where_clause);
let params = collector.finish();
- (Arc::new(expr_collector.store.finish()), params, expr_collector.source_map)
+ let (store, source_map) = expr_collector.store.finish();
+ (Arc::new(store), params, source_map)
}
pub(crate) fn lower_impl(
@@ -232,7 +226,8 @@
impl_syntax.value.where_clause(),
);
let params = collector.finish();
- (expr_collector.store.finish(), expr_collector.source_map, self_ty, trait_, params)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, self_ty, trait_, params)
}
pub(crate) fn lower_trait(
@@ -253,7 +248,8 @@
trait_syntax.value.where_clause(),
);
let params = collector.finish();
- (expr_collector.store.finish(), expr_collector.source_map, params)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, params)
}
pub(crate) fn lower_trait_alias(
@@ -274,7 +270,8 @@
trait_syntax.value.where_clause(),
);
let params = collector.finish();
- (expr_collector.store.finish(), expr_collector.source_map, params)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, params)
}
pub(crate) fn lower_type_alias(
@@ -313,7 +310,8 @@
.value
.ty()
.map(|ty| expr_collector.lower_type_ref(ty, &mut ExprCollector::impl_trait_allocator));
- (expr_collector.store.finish(), expr_collector.source_map, params, bounds, type_ref)
+ let (store, source_map) = expr_collector.store.finish();
+ (store, source_map, params, bounds, type_ref)
}
pub(crate) fn lower_function(
@@ -421,9 +419,10 @@
} else {
return_type
};
+ let (store, source_map) = expr_collector.store.finish();
(
- expr_collector.store.finish(),
- expr_collector.source_map,
+ store,
+ source_map,
generics,
params.into_boxed_slice(),
return_type,
@@ -440,7 +439,6 @@
local_def_map: &'db LocalDefMap,
module: ModuleId,
pub store: ExpressionStoreBuilder,
- pub(crate) source_map: ExpressionStoreSourceMap,
// state stuff
// Prevent nested impl traits like `impl Foo<impl Bar>`.
@@ -551,7 +549,6 @@
module,
def_map,
local_def_map,
- source_map: ExpressionStoreSourceMap::default(),
store: ExpressionStoreBuilder::default(),
expander,
current_try_block_label: None,
@@ -698,7 +695,7 @@
let id = self.collect_macro_call(mcall, macro_ptr, true, |this, expansion| {
this.lower_type_ref_opt(expansion, impl_trait_lower_fn)
});
- self.source_map.types_map.insert(src, id);
+ self.store.types_map.insert(src, id);
return id;
}
None => TypeRef::Error,
@@ -732,8 +729,8 @@
fn alloc_type_ref(&mut self, type_ref: TypeRef, node: TypePtr) -> TypeRefId {
let id = self.store.types.alloc(type_ref);
let ptr = self.expander.in_file(node);
- self.source_map.types_map_back.insert(id, ptr);
- self.source_map.types_map.insert(ptr, id);
+ self.store.types_map_back.insert(id, ptr);
+ self.store.types_map.insert(ptr, id);
id
}
@@ -744,8 +741,8 @@
) -> LifetimeRefId {
let id = self.store.lifetimes.alloc(lifetime_ref);
let ptr = self.expander.in_file(node);
- self.source_map.lifetime_map_back.insert(id, ptr);
- self.source_map.lifetime_map.insert(ptr, id);
+ self.store.lifetime_map_back.insert(id, ptr);
+ self.store.lifetime_map.insert(ptr, id);
id
}
@@ -1190,14 +1187,14 @@
}
ast::Expr::ContinueExpr(e) => {
let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| {
- self.source_map.diagnostics.push(e);
+ self.store.diagnostics.push(e);
None
});
self.alloc_expr(Expr::Continue { label }, syntax_ptr)
}
ast::Expr::BreakExpr(e) => {
let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| {
- self.source_map.diagnostics.push(e);
+ self.store.diagnostics.push(e);
None
});
let expr = e.expr().map(|e| self.collect_expr(e));
@@ -1207,7 +1204,7 @@
let inner = self.collect_expr_opt(e.expr());
// make the paren expr point to the inner expression as well for IDE resolution
let src = self.expander.in_file(syntax_ptr);
- self.source_map.expr_map.insert(src, inner.into());
+ self.store.expr_map.insert(src, inner.into());
inner
}
ast::Expr::ReturnExpr(e) => {
@@ -1248,7 +1245,7 @@
None => self.missing_expr(),
};
let src = self.expander.in_file(AstPtr::new(&field));
- self.source_map.field_map_back.insert(expr, src);
+ self.store.field_map_back.insert(expr, src);
Some(RecordLitField { name, expr })
})
.collect();
@@ -1271,12 +1268,10 @@
ast::Expr::AwaitExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
if let Awaitable::No(location) = self.is_lowering_awaitable_block() {
- self.source_map.diagnostics.push(
- ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
- node: self.expander.in_file(AstPtr::new(&e)),
- location: location.to_string(),
- },
- );
+ self.store.diagnostics.push(ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
+ node: self.expander.in_file(AstPtr::new(&e)),
+ location: location.to_string(),
+ });
}
self.alloc_expr(Expr::Await { expr }, syntax_ptr)
}
@@ -1442,7 +1437,7 @@
// Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro
let src = self.expander.in_file(syntax_ptr);
- self.source_map.expr_map.insert(src, id.into());
+ self.store.expr_map.insert(src, id.into());
id
}
None => self.alloc_expr(Expr::Missing, syntax_ptr),
@@ -1486,7 +1481,7 @@
let expr = self.collect_expr(expr);
// Do not use `alloc_pat_from_expr()` here, it will override the entry in `expr_map`.
let id = self.store.pats.alloc(Pat::Expr(expr));
- self.source_map.pat_map_back.insert(id, src);
+ self.store.pat_map_back.insert(id, src);
id
})
}
@@ -1555,7 +1550,7 @@
let id = self.collect_macro_call(e, macro_ptr, true, |this, expansion| {
this.collect_expr_as_pat_opt(expansion)
});
- self.source_map.expr_map.insert(src, id.into());
+ self.store.expr_map.insert(src, id.into());
id
}
ast::Expr::RecordExpr(e) => {
@@ -1576,7 +1571,7 @@
let pat = self.collect_expr_as_pat(field_expr);
let name = f.field_name()?.as_name();
let src = self.expander.in_file(AstPtr::new(&f).wrap_left());
- self.source_map.pat_field_map_back.insert(pat, src);
+ self.store.pat_field_map_back.insert(pat, src);
Some(RecordFieldPat { name, pat })
})
.collect();
@@ -1622,7 +1617,7 @@
);
if let Either::Left(pat) = pat {
let src = this.expander.in_file(AstPtr::new(&expr).wrap_left());
- this.source_map.pat_map_back.insert(pat, src);
+ this.store.pat_map_back.insert(pat, src);
}
pat
}
@@ -1968,7 +1963,7 @@
self.module.krate(),
resolver,
&mut |ptr, call| {
- _ = self.source_map.expansions.insert(ptr.map(|(it, _)| it), call);
+ _ = self.store.expansions.insert(ptr.map(|(it, _)| it), call);
},
)
}
@@ -1978,19 +1973,17 @@
Ok(res) => res,
Err(UnresolvedMacro { path }) => {
if record_diagnostics {
- self.source_map.diagnostics.push(
- ExpressionStoreDiagnostics::UnresolvedMacroCall {
- node: self.expander.in_file(syntax_ptr),
- path,
- },
- );
+ self.store.diagnostics.push(ExpressionStoreDiagnostics::UnresolvedMacroCall {
+ node: self.expander.in_file(syntax_ptr),
+ path,
+ });
}
return collector(self, None);
}
};
if record_diagnostics {
if let Some(err) = res.err {
- self.source_map
+ self.store
.diagnostics
.push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err });
}
@@ -2001,7 +1994,7 @@
// Keep collecting even with expansion errors so we can provide completions and
// other services in incomplete macro expressions.
if let Some(macro_file) = self.expander.current_file_id().macro_file() {
- self.source_map.expansions.insert(macro_call_ptr, macro_file);
+ self.store.expansions.insert(macro_call_ptr, macro_file);
}
if record_diagnostics {
@@ -2050,7 +2043,7 @@
// Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro
let src = self.expander.in_file(syntax_ptr);
- self.source_map.expr_map.insert(src, tail.into());
+ self.store.expr_map.insert(src, tail.into());
})
}
@@ -2361,7 +2354,7 @@
let pat = self.collect_pat(ast_pat, binding_list);
let name = f.field_name()?.as_name();
let src = self.expander.in_file(AstPtr::new(&f).wrap_right());
- self.source_map.pat_field_map_back.insert(pat, src);
+ self.store.pat_field_map_back.insert(pat, src);
Some(RecordFieldPat { name, pat })
})
.collect();
@@ -2424,7 +2417,7 @@
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list)
});
- self.source_map.pat_map.insert(src, pat.into());
+ self.store.pat_map.insert(src, pat.into());
return pat;
}
None => Pat::Missing,
@@ -2515,7 +2508,7 @@
}
});
if let Some(pat) = pat.left() {
- self.source_map.pat_map.insert(src, pat.into());
+ self.store.pat_map.insert(src, pat.into());
}
pat
}
@@ -2537,7 +2530,7 @@
match enabled {
Ok(()) => true,
Err(cfg) => {
- self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
+ self.store.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
cfg,
opts: self.cfg_options.clone(),
@@ -2548,7 +2541,7 @@
}
fn add_definition_to_binding(&mut self, binding_id: BindingId, pat_id: PatId) {
- self.source_map.binding_definitions.entry(binding_id).or_default().push(pat_id);
+ self.store.binding_definitions.entry(binding_id).or_default().push(pat_id);
}
// region: labels
@@ -2724,7 +2717,7 @@
|name, range| {
let expr_id = self.alloc_expr_desugared(Expr::Path(Path::from(name)));
if let Some(range) = range {
- self.source_map
+ self.store
.template_map
.get_or_insert_with(Default::default)
.implicit_capture_to_source
@@ -2836,7 +2829,7 @@
)
};
- self.source_map
+ self.store
.template_map
.get_or_insert_with(Default::default)
.format_args_to_captures
@@ -3386,8 +3379,8 @@
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.store.exprs.alloc(expr);
- self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
- self.source_map.expr_map.insert(src, id.into());
+ self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
+ self.store.expr_map.insert(src, id.into());
id
}
// FIXME: desugared exprs don't have ptr, that's wrong and should be fixed.
@@ -3398,9 +3391,9 @@
fn alloc_expr_desugared_with_ptr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.store.exprs.alloc(expr);
- self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
+ self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
// We intentionally don't fill this as it could overwrite a non-desugared entry
- // self.source_map.expr_map.insert(src, id);
+ // self.store.expr_map.insert(src, id);
id
}
fn missing_expr(&mut self) -> ExprId {
@@ -3423,24 +3416,24 @@
fn alloc_pat_from_expr(&mut self, pat: Pat, ptr: ExprPtr) -> PatId {
let src = self.expander.in_file(ptr);
let id = self.store.pats.alloc(pat);
- self.source_map.expr_map.insert(src, id.into());
- self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
+ self.store.expr_map.insert(src, id.into());
+ self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
id
}
fn alloc_expr_from_pat(&mut self, expr: Expr, ptr: PatPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.store.exprs.alloc(expr);
- self.source_map.pat_map.insert(src, id.into());
- self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
+ self.store.pat_map.insert(src, id.into());
+ self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
id
}
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
let src = self.expander.in_file(ptr);
let id = self.store.pats.alloc(pat);
- self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
- self.source_map.pat_map.insert(src, id.into());
+ self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
+ self.store.pat_map.insert(src, id.into());
id
}
// FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow.
@@ -3454,8 +3447,8 @@
fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
let src = self.expander.in_file(ptr);
let id = self.store.labels.alloc(label);
- self.source_map.label_map_back.insert(id, src);
- self.source_map.label_map.insert(src, id);
+ self.store.label_map_back.insert(id, src);
+ self.store.label_map.insert(src, id);
id
}
// FIXME: desugared labels don't have ptr, that's wrong and should be fixed somehow.
diff --git a/crates/hir-def/src/expr_store/lower/asm.rs b/crates/hir-def/src/expr_store/lower/asm.rs
index d36e520..3bc4afb 100644
--- a/crates/hir-def/src/expr_store/lower/asm.rs
+++ b/crates/hir-def/src/expr_store/lower/asm.rs
@@ -10,7 +10,7 @@
use crate::{
expr_store::lower::{ExprCollector, FxIndexSet},
- hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmRegOrRegClass},
+ hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmKind, InlineAsmRegOrRegClass},
};
impl ExprCollector<'_> {
@@ -269,11 +269,20 @@
}
})
};
+
+ let kind = if asm.global_asm_token().is_some() {
+ InlineAsmKind::GlobalAsm
+ } else if asm.naked_asm_token().is_some() {
+ InlineAsmKind::NakedAsm
+ } else {
+ InlineAsmKind::Asm
+ };
+
let idx = self.alloc_expr(
- Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options }),
+ Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options, kind }),
syntax_ptr,
);
- self.source_map
+ self.store
.template_map
.get_or_insert_with(Default::default)
.asm_to_captures
diff --git a/crates/hir-def/src/expr_store/lower/path/tests.rs b/crates/hir-def/src/expr_store/lower/path/tests.rs
index 8fd81c7..f507841 100644
--- a/crates/hir-def/src/expr_store/lower/path/tests.rs
+++ b/crates/hir-def/src/expr_store/lower/path/tests.rs
@@ -23,7 +23,7 @@
let mut ctx =
ExprCollector::new(&db, crate_def_map(&db, krate).root_module_id(), file_id.into());
let lowered_path = ctx.lower_path(path, &mut ExprCollector::impl_trait_allocator);
- let store = ctx.store.finish();
+ let (store, _) = ctx.store.finish();
(db, store, lowered_path)
}
diff --git a/crates/hir-def/src/expr_store/path.rs b/crates/hir-def/src/expr_store/path.rs
index db83e73..19c7ce0 100644
--- a/crates/hir-def/src/expr_store/path.rs
+++ b/crates/hir-def/src/expr_store/path.rs
@@ -29,8 +29,8 @@
// This type is being used a lot, make sure it doesn't grow unintentionally.
#[cfg(target_arch = "x86_64")]
const _: () = {
- assert!(size_of::<Path>() == 16);
- assert!(size_of::<Option<Path>>() == 16);
+ assert!(size_of::<Path>() == 24);
+ assert!(size_of::<Option<Path>>() == 24);
};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
diff --git a/crates/hir-def/src/expr_store/pretty.rs b/crates/hir-def/src/expr_store/pretty.rs
index 87bcd33..f1b0113 100644
--- a/crates/hir-def/src/expr_store/pretty.rs
+++ b/crates/hir-def/src/expr_store/pretty.rs
@@ -902,7 +902,7 @@
let mut same_name = false;
if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] {
if let Binding { name, mode: BindingAnnotation::Unannotated, .. } =
- &self.store.bindings[*id]
+ &self.store.assert_expr_only().bindings[*id]
{
if name.as_str() == field_name {
same_name = true;
@@ -1063,7 +1063,7 @@
}
fn print_binding(&mut self, id: BindingId) {
- let Binding { name, mode, .. } = &self.store.bindings[id];
+ let Binding { name, mode, .. } = &self.store.assert_expr_only().bindings[id];
let mode = match mode {
BindingAnnotation::Unannotated => "",
BindingAnnotation::Mutable => "mut ",
diff --git a/crates/hir-def/src/expr_store/scope.rs b/crates/hir-def/src/expr_store/scope.rs
index a46711c..1952dae 100644
--- a/crates/hir-def/src/expr_store/scope.rs
+++ b/crates/hir-def/src/expr_store/scope.rs
@@ -106,7 +106,9 @@
let mut scopes = ExprScopes {
scopes: Arena::default(),
scope_entries: Arena::default(),
- scope_by_expr: ArenaMap::with_capacity(body.exprs.len()),
+ scope_by_expr: ArenaMap::with_capacity(
+ body.expr_only.as_ref().map_or(0, |it| it.exprs.len()),
+ ),
};
let mut root = scopes.root_scope();
if let Some(self_param) = body.self_param {
@@ -179,7 +181,7 @@
binding: BindingId,
hygiene: HygieneId,
) {
- let Binding { name, .. } = &store.bindings[binding];
+ let Binding { name, .. } = &store[binding];
let entry = self.scope_entries.alloc(ScopeEntry { name: name.clone(), binding, hygiene });
self.scopes[scope].entries =
IdxRange::new_inclusive(self.scopes[scope].entries.start()..=entry);
@@ -251,7 +253,7 @@
scope: &mut ScopeId,
) {
let make_label =
- |label: &Option<LabelId>| label.map(|label| (label, store.labels[label].name.clone()));
+ |label: &Option<LabelId>| label.map(|label| (label, store[label].name.clone()));
let compute_expr_scopes = |scopes: &mut ExprScopes, expr: ExprId, scope: &mut ScopeId| {
compute_expr_scopes(expr, store, scopes, scope)
@@ -534,9 +536,8 @@
};
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
- let pat_src = source_map
- .pat_syntax(*source_map.binding_definitions[&resolved.binding()].first().unwrap())
- .unwrap();
+ let pat_src =
+ source_map.pat_syntax(source_map.patterns_for_binding(resolved.binding())[0]).unwrap();
let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax());
assert_eq!(local_name.text_range(), expected_name.syntax().text_range());
diff --git a/crates/hir-def/src/expr_store/tests/body.rs b/crates/hir-def/src/expr_store/tests/body.rs
index 927e280..c31428b 100644
--- a/crates/hir-def/src/expr_store/tests/body.rs
+++ b/crates/hir-def/src/expr_store/tests/body.rs
@@ -508,9 +508,9 @@
}
"#,
);
- assert_eq!(body.bindings.len(), 1, "should have a binding for `B`");
+ assert_eq!(body.assert_expr_only().bindings.len(), 1, "should have a binding for `B`");
assert_eq!(
- body.bindings[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
+ body[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
"B",
"should have a binding for `B`",
);
@@ -566,6 +566,7 @@
);
let mtch_arms = body
+ .assert_expr_only()
.exprs
.iter()
.find_map(|(_, expr)| {
@@ -578,10 +579,10 @@
.unwrap();
let MatchArm { pat, .. } = mtch_arms[1];
- match body.pats[pat] {
+ match body[pat] {
Pat::Range { start, end } => {
- let hir_start = &body.exprs[start.unwrap()];
- let hir_end = &body.exprs[end.unwrap()];
+ let hir_start = &body[start.unwrap()];
+ let hir_end = &body[end.unwrap()];
assert!(matches!(hir_start, Expr::Path { .. }));
assert!(matches!(hir_end, Expr::Path { .. }));
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs
index 0fc7857..e70cd2c 100644
--- a/crates/hir-def/src/hir.rs
+++ b/crates/hir-def/src/hir.rs
@@ -332,6 +332,17 @@
pub struct InlineAsm {
pub operands: Box<[(Option<Name>, AsmOperand)]>,
pub options: AsmOptions,
+ pub kind: InlineAsmKind,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum InlineAsmKind {
+ /// `asm!()`.
+ Asm,
+ /// `global_asm!()`.
+ GlobalAsm,
+ /// `naked_asm!()`.
+ NakedAsm,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index eb3b92d..eacc3f3 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -149,7 +149,7 @@
}
#[cfg(target_arch = "x86_64")]
-const _: () = assert!(size_of::<TypeRef>() == 16);
+const _: () = assert!(size_of::<TypeRef>() == 24);
pub type TypeRefId = Idx<TypeRef>;
diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs
index f327366..5ab61c8 100644
--- a/crates/hir-def/src/item_tree/lower.rs
+++ b/crates/hir-def/src/item_tree/lower.rs
@@ -143,6 +143,8 @@
ast::Item::MacroRules(ast) => self.lower_macro_rules(ast)?.into(),
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
+ // FIXME: Handle `global_asm!()`.
+ ast::Item::AsmExpr(_) => return None,
};
let attrs = RawAttrs::new(self.db, item, self.span_map());
self.add_attrs(mod_item.ast_id(), attrs);
diff --git a/crates/hir-def/src/item_tree/tests.rs b/crates/hir-def/src/item_tree/tests.rs
index 5923b3e..91b42be 100644
--- a/crates/hir-def/src/item_tree/tests.rs
+++ b/crates/hir-def/src/item_tree/tests.rs
@@ -35,10 +35,10 @@
#![no_std]
#![doc = " another file comment"]
- // AstId: ExternCrate[5A82, 0]
+ // AstId: ExternCrate[070B, 0]
pub(self) extern crate self as renamed;
- // AstId: ExternCrate[7E1C, 0]
+ // AstId: ExternCrate[1EA5, 0]
pub(in super) extern crate bli;
// AstId: Use[0000, 0]
@@ -78,15 +78,15 @@
// AstId: ExternBlock[0000, 0]
extern {
#[on_extern_type]
- // AstId: TypeAlias[9FDF, 0]
+ // AstId: TypeAlias[A09C, 0]
pub(self) type ExType;
#[on_extern_static]
- // AstId: Static[43C1, 0]
+ // AstId: Static[D85E, 0]
pub(self) static EX_STATIC = _;
#[on_extern_fn]
- // AstId: Fn[452D, 0]
+ // AstId: Fn[B240, 0]
pub(self) fn ex_fn;
}
"#]],
@@ -124,20 +124,20 @@
}
"#,
expect![[r#"
- // AstId: Struct[DFF3, 0]
+ // AstId: Struct[ED35, 0]
pub(self) struct Unit;
#[derive(Debug)]
- // AstId: Struct[C7A1, 0]
+ // AstId: Struct[A47C, 0]
pub(self) struct Struct { ... }
- // AstId: Struct[DAC2, 0]
+ // AstId: Struct[C8C9, 0]
pub(self) struct Tuple(...);
- // AstId: Union[2DBB, 0]
+ // AstId: Union[2797, 0]
pub(self) union Ize { ... }
- // AstId: Enum[7FF8, 0]
+ // AstId: Enum[7D23, 0]
pub(self) enum E { ... }
"#]],
);
@@ -162,18 +162,18 @@
}
"#,
expect![[r#"
- // AstId: Static[B393, 0]
+ // AstId: Static[F7C1, 0]
pub static ST = _;
- // AstId: Const[B309, 0]
+ // AstId: Const[84BB, 0]
pub(self) const _ = _;
#[attr]
#[inner_attr_in_fn]
- // AstId: Fn[75E3, 0]
+ // AstId: Fn[BE8F, 0]
pub(self) fn f;
- // AstId: Trait[2998, 0]
+ // AstId: Trait[9320, 0]
pub(self) trait Tr { ... }
"#]],
);
@@ -197,16 +197,16 @@
expect![[r##"
#[doc = " outer"]
#[doc = " inner"]
- // AstId: Module[CF93, 0]
+ // AstId: Module[03AE, 0]
pub(self) mod inline {
// AstId: Use[0000, 0]
pub(self) use super::*;
- // AstId: Fn[1B26, 0]
+ // AstId: Fn[2A78, 0]
pub(self) fn fn_in_module;
}
- // AstId: Module[8994, 0]
+ // AstId: Module[C08B, 0]
pub(self) mod outline;
"##]],
);
@@ -225,13 +225,13 @@
m!();
"#,
expect![[r#"
- // AstId: MacroRules[88CE, 0]
+ // AstId: MacroRules[7E68, 0]
macro_rules! m { ... }
- // AstId: MacroDef[DC34, 0]
+ // AstId: MacroDef[1C1E, 0]
pub macro m2 { ... }
- // AstId: MacroCall[612F, 0], SyntaxContextId: ROOT2024, ExpandTo: Items
+ // AstId: MacroCall[7E68, 0], SyntaxContextId: ROOT2024, ExpandTo: Items
m!(...);
"#]],
);
@@ -244,7 +244,7 @@
pub(self) struct S;
"#,
expect![[r#"
- // AstId: Struct[42E2, 0]
+ // AstId: Struct[5024, 0]
pub(self) struct S;
"#]],
)
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index 777953d..0013c2a 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -746,3 +746,83 @@
623..690: `derive(CoercePointee)` requires `T` to be marked `?Sized`"#]],
);
}
+
+#[test]
+fn union_derive() {
+ check_errors(
+ r#"
+//- minicore: clone, copy, default, fmt, hash, ord, eq, derive
+
+#[derive(Copy)]
+union Foo1 { _v: () }
+#[derive(Clone)]
+union Foo2 { _v: () }
+#[derive(Default)]
+union Foo3 { _v: () }
+#[derive(Debug)]
+union Foo4 { _v: () }
+#[derive(Hash)]
+union Foo5 { _v: () }
+#[derive(Ord)]
+union Foo6 { _v: () }
+#[derive(PartialOrd)]
+union Foo7 { _v: () }
+#[derive(Eq)]
+union Foo8 { _v: () }
+#[derive(PartialEq)]
+union Foo9 { _v: () }
+ "#,
+ expect![[r#"
+ 78..118: this trait cannot be derived for unions
+ 119..157: this trait cannot be derived for unions
+ 158..195: this trait cannot be derived for unions
+ 196..232: this trait cannot be derived for unions
+ 233..276: this trait cannot be derived for unions
+ 313..355: this trait cannot be derived for unions"#]],
+ );
+}
+
+#[test]
+fn default_enum_without_default_attr() {
+ check_errors(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo {
+ Bar,
+}
+ "#,
+ expect!["1..41: `#[derive(Default)]` on enum with no `#[default]`"],
+ );
+}
+
+#[test]
+fn generic_enum_default() {
+ check(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+"#,
+ expect![[r#"
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+
+impl <T, > $crate::default::Default for Foo<T, > where {
+ fn default() -> Self {
+ Foo::Baz
+ }
+}"#]],
+ );
+}
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index 293868d..eeaf865 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -28,6 +28,19 @@
r#"
#[rustc_builtin_macro]
macro_rules! asm {() => {}}
+#[rustc_builtin_macro]
+macro_rules! global_asm {() => {}}
+#[rustc_builtin_macro]
+macro_rules! naked_asm {() => {}}
+
+global_asm! {
+ ""
+}
+
+#[unsafe(naked)]
+extern "C" fn foo() {
+ naked_asm!("");
+}
fn main() {
let i: u64 = 3;
@@ -45,6 +58,17 @@
expect![[r##"
#[rustc_builtin_macro]
macro_rules! asm {() => {}}
+#[rustc_builtin_macro]
+macro_rules! global_asm {() => {}}
+#[rustc_builtin_macro]
+macro_rules! naked_asm {() => {}}
+
+builtin #global_asm ("")
+
+#[unsafe(naked)]
+extern "C" fn foo() {
+ builtin #naked_asm ("");
+}
fn main() {
let i: u64 = 3;
@@ -526,3 +550,51 @@
"##]],
);
}
+
+#[test]
+fn cfg_select() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+pub macro cfg_select($($tt:tt)*) {}
+
+cfg_select! {
+ false => { fn false_1() {} }
+ any(false, true) => { fn true_1() {} }
+}
+
+cfg_select! {
+ false => { fn false_2() {} }
+ _ => { fn true_2() {} }
+}
+
+cfg_select! {
+ false => { fn false_3() {} }
+}
+
+cfg_select! {
+ false
+}
+
+cfg_select! {
+ false =>
+}
+
+ "#,
+ expect![[r#"
+#[rustc_builtin_macro]
+pub macro cfg_select($($tt:tt)*) {}
+
+fn true_1() {}
+
+fn true_2() {}
+
+/* error: none of the predicates in this `cfg_select` evaluated to true */
+
+/* error: expected `=>` after cfg expression */
+
+/* error: expected a token tree after `=>` */
+
+ "#]],
+ );
+}
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index c6d901e..c489c1f 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -35,9 +35,9 @@
};
}
-struct#0:MacroRules[8C8E, 0]@58..64#14336# MyTraitMap2#0:MacroCall[D499, 0]@31..42#ROOT2024# {#0:MacroRules[8C8E, 0]@72..73#14336#
- map#0:MacroRules[8C8E, 0]@86..89#14336#:#0:MacroRules[8C8E, 0]@89..90#14336# #0:MacroRules[8C8E, 0]@89..90#14336#::#0:MacroRules[8C8E, 0]@91..93#14336#std#0:MacroRules[8C8E, 0]@93..96#14336#::#0:MacroRules[8C8E, 0]@96..98#14336#collections#0:MacroRules[8C8E, 0]@98..109#14336#::#0:MacroRules[8C8E, 0]@109..111#14336#HashSet#0:MacroRules[8C8E, 0]@111..118#14336#<#0:MacroRules[8C8E, 0]@118..119#14336#(#0:MacroRules[8C8E, 0]@119..120#14336#)#0:MacroRules[8C8E, 0]@120..121#14336#>#0:MacroRules[8C8E, 0]@121..122#14336#,#0:MacroRules[8C8E, 0]@122..123#14336#
-}#0:MacroRules[8C8E, 0]@132..133#14336#
+struct#0:MacroRules[BE8F, 0]@58..64#14336# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#14336#
+ map#0:MacroRules[BE8F, 0]@86..89#14336#:#0:MacroRules[BE8F, 0]@89..90#14336# #0:MacroRules[BE8F, 0]@89..90#14336#::#0:MacroRules[BE8F, 0]@91..93#14336#std#0:MacroRules[BE8F, 0]@93..96#14336#::#0:MacroRules[BE8F, 0]@96..98#14336#collections#0:MacroRules[BE8F, 0]@98..109#14336#::#0:MacroRules[BE8F, 0]@109..111#14336#HashSet#0:MacroRules[BE8F, 0]@111..118#14336#<#0:MacroRules[BE8F, 0]@118..119#14336#(#0:MacroRules[BE8F, 0]@119..120#14336#)#0:MacroRules[BE8F, 0]@120..121#14336#>#0:MacroRules[BE8F, 0]@121..122#14336#,#0:MacroRules[BE8F, 0]@122..123#14336#
+}#0:MacroRules[BE8F, 0]@132..133#14336#
"#]],
);
}
@@ -75,12 +75,12 @@
};
}
-fn#0:MacroCall[D499, 0]@30..32#ROOT2024# main#0:MacroCall[D499, 0]@33..37#ROOT2024#(#0:MacroCall[D499, 0]@37..38#ROOT2024#)#0:MacroCall[D499, 0]@38..39#ROOT2024# {#0:MacroCall[D499, 0]@40..41#ROOT2024#
- 1#0:MacroCall[D499, 0]@50..51#ROOT2024#;#0:MacroCall[D499, 0]@51..52#ROOT2024#
- 1.0#0:MacroCall[D499, 0]@61..64#ROOT2024#;#0:MacroCall[D499, 0]@64..65#ROOT2024#
- (#0:MacroCall[D499, 0]@74..75#ROOT2024#(#0:MacroCall[D499, 0]@75..76#ROOT2024#1#0:MacroCall[D499, 0]@76..77#ROOT2024#,#0:MacroCall[D499, 0]@77..78#ROOT2024# )#0:MacroCall[D499, 0]@78..79#ROOT2024#,#0:MacroCall[D499, 0]@79..80#ROOT2024# )#0:MacroCall[D499, 0]@80..81#ROOT2024#.#0:MacroCall[D499, 0]@81..82#ROOT2024#0#0:MacroCall[D499, 0]@82..85#ROOT2024#.#0:MacroCall[D499, 0]@82..85#ROOT2024#0#0:MacroCall[D499, 0]@82..85#ROOT2024#;#0:MacroCall[D499, 0]@85..86#ROOT2024#
- let#0:MacroCall[D499, 0]@95..98#ROOT2024# x#0:MacroCall[D499, 0]@99..100#ROOT2024# =#0:MacroCall[D499, 0]@101..102#ROOT2024# 1#0:MacroCall[D499, 0]@103..104#ROOT2024#;#0:MacroCall[D499, 0]@104..105#ROOT2024#
-}#0:MacroCall[D499, 0]@110..111#ROOT2024#
+fn#0:MacroCall[BE8F, 0]@30..32#ROOT2024# main#0:MacroCall[BE8F, 0]@33..37#ROOT2024#(#0:MacroCall[BE8F, 0]@37..38#ROOT2024#)#0:MacroCall[BE8F, 0]@38..39#ROOT2024# {#0:MacroCall[BE8F, 0]@40..41#ROOT2024#
+ 1#0:MacroCall[BE8F, 0]@50..51#ROOT2024#;#0:MacroCall[BE8F, 0]@51..52#ROOT2024#
+ 1.0#0:MacroCall[BE8F, 0]@61..64#ROOT2024#;#0:MacroCall[BE8F, 0]@64..65#ROOT2024#
+ (#0:MacroCall[BE8F, 0]@74..75#ROOT2024#(#0:MacroCall[BE8F, 0]@75..76#ROOT2024#1#0:MacroCall[BE8F, 0]@76..77#ROOT2024#,#0:MacroCall[BE8F, 0]@77..78#ROOT2024# )#0:MacroCall[BE8F, 0]@78..79#ROOT2024#,#0:MacroCall[BE8F, 0]@79..80#ROOT2024# )#0:MacroCall[BE8F, 0]@80..81#ROOT2024#.#0:MacroCall[BE8F, 0]@81..82#ROOT2024#0#0:MacroCall[BE8F, 0]@82..85#ROOT2024#.#0:MacroCall[BE8F, 0]@82..85#ROOT2024#0#0:MacroCall[BE8F, 0]@82..85#ROOT2024#;#0:MacroCall[BE8F, 0]@85..86#ROOT2024#
+ let#0:MacroCall[BE8F, 0]@95..98#ROOT2024# x#0:MacroCall[BE8F, 0]@99..100#ROOT2024# =#0:MacroCall[BE8F, 0]@101..102#ROOT2024# 1#0:MacroCall[BE8F, 0]@103..104#ROOT2024#;#0:MacroCall[BE8F, 0]@104..105#ROOT2024#
+}#0:MacroCall[BE8F, 0]@110..111#ROOT2024#
"#]],
@@ -171,7 +171,7 @@
}
fn main(foo: ()) {
- /* error: unresolved macro unresolved */"helloworld!"#0:Fn[B9C7, 0]@236..321#ROOT2024#;
+ /* error: unresolved macro unresolved */"helloworld!"#0:Fn[15AE, 0]@236..321#ROOT2024#;
}
}
@@ -197,7 +197,7 @@
#[macro_use]
mod foo;
-struct#1:MacroRules[E572, 0]@59..65#14336# Foo#0:MacroCall[BDD3, 0]@32..35#ROOT2024#(#1:MacroRules[E572, 0]@70..71#14336#u32#0:MacroCall[BDD3, 0]@41..44#ROOT2024#)#1:MacroRules[E572, 0]@74..75#14336#;#1:MacroRules[E572, 0]@75..76#14336#
+struct#1:MacroRules[DB0C, 0]@59..65#14336# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#14336#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#14336#;#1:MacroRules[DB0C, 0]@75..76#14336#
"#]],
);
}
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 1c69b37..5e95b061 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -20,13 +20,14 @@
use expect_test::Expect;
use hir_expand::{
AstId, InFile, MacroCallId, MacroCallKind, MacroKind,
+ builtin::quote::quote,
db::ExpandDatabase,
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
span_map::SpanMapRef,
};
-use intern::Symbol;
+use intern::{Symbol, sym};
use itertools::Itertools;
-use span::{Edition, Span};
+use span::{Edition, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext};
use stdx::{format_to, format_to_acc};
use syntax::{
AstNode, AstPtr,
@@ -34,7 +35,9 @@
SyntaxNode, T,
ast::{self, edit::IndentLevel},
};
+use syntax_bridge::token_tree_to_syntax_node;
use test_fixture::WithFixture;
+use tt::{TextRange, TextSize};
use crate::{
AdtId, Lookup, ModuleDefId,
@@ -386,3 +389,38 @@
other.type_id() == TypeId::of::<Self>()
}
}
+
+#[test]
+fn regression_20171() {
+ // This really isn't the appropriate place to put this test, but it's convenient with access to `quote!`.
+ let span = Span {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: SpanAnchor {
+ file_id: span::EditionedFileId::current_edition(span::FileId::from_raw(0)),
+ ast_id: ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: SyntaxContext::root(Edition::CURRENT),
+ };
+ let close_brace = tt::Punct { char: '}', spacing: tt::Spacing::Alone, span };
+ let dotdot1 = tt::Punct { char: '.', spacing: tt::Spacing::Joint, span };
+ let dotdot2 = tt::Punct { char: '.', spacing: tt::Spacing::Alone, span };
+ let dollar_crate = sym::dollar_crate;
+ let tt = quote! {
+ span => {
+ if !((matches!(
+ drive_parser(&mut parser, data, false),
+ Err(TarParserError::CorruptField {
+ field: CorruptFieldContext::PaxKvLength,
+ error: GeneralParseError::ParseInt(ParseIntError { #dotdot1 #dotdot2 })
+ })
+ #close_brace ))) {
+ #dollar_crate::panic::panic_2021!();
+ }}
+ };
+ token_tree_to_syntax_node(
+ &tt,
+ syntax_bridge::TopEntryPoint::MacroStmts,
+ &mut |_| Edition::CURRENT,
+ Edition::CURRENT,
+ );
+}
diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index d5ae6f8..6952a9d 100644
--- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -181,9 +181,9 @@
self.0. 1;
}
-fn#0:Fn[4D85, 0]@45..47#ROOT2024# foo#0:Fn[4D85, 0]@48..51#ROOT2024#(#0:Fn[4D85, 0]@51..52#ROOT2024#�:Fn[4D85, 0]@52..53#ROOT2024#self#0:Fn[4D85, 0]@53..57#ROOT2024# )#0:Fn[4D85, 0]@57..58#ROOT2024# {#0:Fn[4D85, 0]@59..60#ROOT2024#
- self#0:Fn[4D85, 0]@65..69#ROOT2024# .#0:Fn[4D85, 0]@69..70#ROOT2024#0#0:Fn[4D85, 0]@70..71#ROOT2024#.#0:Fn[4D85, 0]@71..72#ROOT2024#1#0:Fn[4D85, 0]@73..74#ROOT2024#;#0:Fn[4D85, 0]@74..75#ROOT2024#
-}#0:Fn[4D85, 0]@76..77#ROOT2024#"#]],
+fn#0:Fn[8A31, 0]@45..47#ROOT2024# foo#0:Fn[8A31, 0]@48..51#ROOT2024#(#0:Fn[8A31, 0]@51..52#ROOT2024#�:Fn[8A31, 0]@52..53#ROOT2024#self#0:Fn[8A31, 0]@53..57#ROOT2024# )#0:Fn[8A31, 0]@57..58#ROOT2024# {#0:Fn[8A31, 0]@59..60#ROOT2024#
+ self#0:Fn[8A31, 0]@65..69#ROOT2024# .#0:Fn[8A31, 0]@69..70#ROOT2024#0#0:Fn[8A31, 0]@70..71#ROOT2024#.#0:Fn[8A31, 0]@71..72#ROOT2024#1#0:Fn[8A31, 0]@73..74#ROOT2024#;#0:Fn[8A31, 0]@74..75#ROOT2024#
+}#0:Fn[8A31, 0]@76..77#ROOT2024#"#]],
);
}
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 0837308..5030585 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -373,19 +373,14 @@
crate_local_def_map(db, crate_id).def_map(db)
}
-#[allow(unused_lifetimes)]
-mod __ {
- use super::*;
- #[salsa_macros::tracked]
- pub(crate) struct DefMapPair<'db> {
- #[tracked]
- #[returns(ref)]
- pub(crate) def_map: DefMap,
- #[returns(ref)]
- pub(crate) local: LocalDefMap,
- }
+#[salsa_macros::tracked]
+pub(crate) struct DefMapPair<'db> {
+ #[tracked]
+ #[returns(ref)]
+ pub(crate) def_map: DefMap,
+ #[returns(ref)]
+ pub(crate) local: LocalDefMap,
}
-pub(crate) use __::DefMapPair;
#[salsa_macros::tracked(returns(ref))]
pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefMapPair<'_> {
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index ba75dca..338851b 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -172,7 +172,7 @@
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "of_",
+ "EnumVariants::of_",
]
"#]],
expect![[r#"
@@ -181,7 +181,7 @@
"ast_id_map_shim",
"file_item_tree_query",
"real_span_map_shim",
- "of_",
+ "EnumVariants::of_",
]
"#]],
);
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 6f32198..316ad5d 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -1052,17 +1052,6 @@
}
}
-pub fn resolver_for_expr(
- db: &dyn DefDatabase,
- owner: DefWithBodyId,
- expr_id: ExprId,
-) -> Resolver<'_> {
- let r = owner.resolver(db);
- let scopes = db.expr_scopes(owner);
- let scope_id = scopes.scope_for(expr_id);
- resolver_for_scope_(db, scopes, scope_id, r, owner)
-}
-
pub fn resolver_for_scope(
db: &dyn DefDatabase,
owner: DefWithBodyId,
diff --git a/crates/hir-def/src/signatures.rs b/crates/hir-def/src/signatures.rs
index 1958eb6..92e610b 100644
--- a/crates/hir-def/src/signatures.rs
+++ b/crates/hir-def/src/signatures.rs
@@ -779,14 +779,10 @@
Arc::new(VariantFields { fields, store: Arc::new(store), shape }),
Arc::new(source_map),
),
- None => (
- Arc::new(VariantFields {
- fields: Arena::default(),
- store: ExpressionStore::empty_singleton(),
- shape,
- }),
- ExpressionStoreSourceMap::empty_singleton(),
- ),
+ None => {
+ let (store, source_map) = ExpressionStore::empty_singleton();
+ (Arc::new(VariantFields { fields: Arena::default(), store, shape }), source_map)
+ }
}
}
@@ -878,7 +874,7 @@
idx += 1;
}
Err(cfg) => {
- col.source_map.diagnostics.push(
+ col.store.diagnostics.push(
crate::expr_store::ExpressionStoreDiagnostics::InactiveCode {
node: InFile::new(fields.file_id, SyntaxNodePtr::new(field.syntax())),
cfg,
@@ -891,9 +887,9 @@
if !has_fields {
return None;
}
- let store = col.store.finish();
+ let (store, source_map) = col.store.finish();
arena.shrink_to_fit();
- Some((arena, store, col.source_map))
+ Some((arena, store, source_map))
}
#[derive(Debug, PartialEq, Eq)]
@@ -980,7 +976,7 @@
if !matches!(variant.shape, FieldsShape::Unit) {
let body = db.body(v.into());
// A variant with explicit discriminant
- if body.exprs[body.body_expr] != crate::hir::Expr::Missing {
+ if !matches!(body[body.body_expr], crate::hir::Expr::Missing) {
return false;
}
}
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index d135584..15e68ff 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -458,6 +458,7 @@
invoc_span: Span,
tt: &tt::TopSubtree,
trait_path: tt::TopSubtree,
+ allow_unions: bool,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let info = match parse_adt(db, tt, invoc_span) {
@@ -469,6 +470,12 @@
);
}
};
+ if !allow_unions && matches!(info.shape, AdtShape::Union) {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(invoc_span)),
+ ExpandError::other(invoc_span, "this trait cannot be derived for unions"),
+ );
+ }
ExpandResult::ok(expand_simple_derive_with_parsed(
invoc_span,
info,
@@ -535,7 +542,14 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::marker::Copy },
+ true,
+ |_| quote! {span =>},
+ )
}
fn clone_expand(
@@ -544,7 +558,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, true, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -599,41 +613,63 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::default::Default }, |adt| {
- let body = match &adt.shape {
- AdtShape::Struct(fields) => {
- let name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#name),
+ let adt = match parse_adt(db, tt, span) {
+ Ok(info) => info,
+ Err(e) => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan { open: span, close: span }),
+ e,
+ );
+ }
+ };
+ let (body, constrain_to_trait) = match &adt.shape {
+ AdtShape::Struct(fields) => {
+ let name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
+ );
+ (body, true)
+ }
+ AdtShape::Enum { default_variant, variants } => {
+ if let Some(d) = default_variant {
+ let (name, fields) = &variants[*d];
+ let adt_name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#adt_name :: #name),
span,
|_| quote!(span =>#krate::default::Default::default()),
- )
- }
- AdtShape::Enum { default_variant, variants } => {
- if let Some(d) = default_variant {
- let (name, fields) = &variants[*d];
- let adt_name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#adt_name :: #name),
- span,
- |_| quote!(span =>#krate::default::Default::default()),
- )
- } else {
- // FIXME: Return expand error here
- quote!(span =>)
- }
- }
- AdtShape::Union => {
- // FIXME: Return expand error here
- quote!(span =>)
- }
- };
- quote! {span =>
- fn default() -> Self {
- #body
+ );
+ (body, false)
+ } else {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "`#[derive(Default)]` on enum with no `#[default]`"),
+ );
}
}
- })
+ AdtShape::Union => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "this trait cannot be derived for unions"),
+ );
+ }
+ };
+ ExpandResult::ok(expand_simple_derive_with_parsed(
+ span,
+ adt,
+ quote! {span => #krate::default::Default },
+ |_adt| {
+ quote! {span =>
+ fn default() -> Self {
+ #body
+ }
+ }
+ },
+ constrain_to_trait,
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ))
}
fn debug_expand(
@@ -642,7 +678,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, false, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
@@ -697,10 +733,7 @@
}
})
.collect(),
- AdtShape::Union => {
- // FIXME: Return expand error here
- vec![]
- }
+ AdtShape::Union => unreachable!(),
};
quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
@@ -718,11 +751,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, false, |adt| {
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -769,7 +798,14 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::cmp::Eq },
+ true,
+ |_| quote! {span =>},
+ )
}
fn partial_eq_expand(
@@ -778,11 +814,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, false, |adt| {
let name = &adt.name;
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
@@ -854,7 +886,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -873,10 +905,6 @@
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
@@ -916,7 +944,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -935,10 +963,6 @@
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index f9abe4f..4a9af01 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -7,6 +7,7 @@
Symbol,
sym::{self},
};
+use itertools::Itertools;
use mbe::{DelimiterKind, expect_fragment};
use span::{Edition, FileId, Span};
use stdx::format_to;
@@ -124,8 +125,9 @@
(assert, Assert) => assert_expand,
(stringify, Stringify) => stringify_expand,
(asm, Asm) => asm_expand,
- (global_asm, GlobalAsm) => asm_expand,
- (naked_asm, NakedAsm) => asm_expand,
+ (global_asm, GlobalAsm) => global_asm_expand,
+ (naked_asm, NakedAsm) => naked_asm_expand,
+ (cfg_select, CfgSelect) => cfg_select_expand,
(cfg, Cfg) => cfg_expand,
(core_panic, CorePanic) => panic_expand,
(std_panic, StdPanic) => panic_expand,
@@ -324,6 +326,101 @@
ExpandResult::ok(expanded)
}
+fn global_asm_expand(
+ _db: &dyn ExpandDatabase,
+ _id: MacroCallId,
+ tt: &tt::TopSubtree,
+ span: Span,
+) -> ExpandResult<tt::TopSubtree> {
+ let mut tt = tt.clone();
+ tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
+ let pound = mk_pound(span);
+ let expanded = quote! {span =>
+ builtin #pound global_asm #tt
+ };
+ ExpandResult::ok(expanded)
+}
+
+fn naked_asm_expand(
+ _db: &dyn ExpandDatabase,
+ _id: MacroCallId,
+ tt: &tt::TopSubtree,
+ span: Span,
+) -> ExpandResult<tt::TopSubtree> {
+ let mut tt = tt.clone();
+ tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
+ let pound = mk_pound(span);
+ let expanded = quote! {span =>
+ builtin #pound naked_asm #tt
+ };
+ ExpandResult::ok(expanded)
+}
+
+fn cfg_select_expand(
+ db: &dyn ExpandDatabase,
+ id: MacroCallId,
+ tt: &tt::TopSubtree,
+ span: Span,
+) -> ExpandResult<tt::TopSubtree> {
+ let loc = db.lookup_intern_macro_call(id);
+ let cfg_options = loc.krate.cfg_options(db);
+
+ let mut iter = tt.iter();
+ let mut expand_to = None;
+ while let Some(next) = iter.peek() {
+ let active = if let tt::TtElement::Leaf(tt::Leaf::Ident(ident)) = next
+ && ident.sym == sym::underscore
+ {
+ iter.next();
+ true
+ } else {
+ cfg_options.check(&CfgExpr::parse_from_iter(&mut iter)) != Some(false)
+ };
+ match iter.expect_glued_punct() {
+ Ok(it) if it.len() == 2 && it[0].char == '=' && it[1].char == '>' => {}
+ _ => {
+ let err_span = iter.peek().map(|it| it.first_span()).unwrap_or(span);
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(err_span, "expected `=>` after cfg expression"),
+ );
+ }
+ }
+ let expand_to_if_active = match iter.next() {
+ Some(tt::TtElement::Subtree(_, tt)) => tt.remaining(),
+ _ => {
+ let err_span = iter.peek().map(|it| it.first_span()).unwrap_or(span);
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(err_span, "expected a token tree after `=>`"),
+ );
+ }
+ };
+
+ if expand_to.is_none() && active {
+ expand_to = Some(expand_to_if_active);
+ }
+ }
+ match expand_to {
+ Some(expand_to) => {
+ let mut builder = tt::TopSubtreeBuilder::new(tt::Delimiter {
+ kind: tt::DelimiterKind::Invisible,
+ open: span,
+ close: span,
+ });
+ builder.extend_with_tt(expand_to);
+ ExpandResult::ok(builder.build())
+ }
+ None => ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(
+ span,
+ "none of the predicates in this `cfg_select` evaluated to true",
+ ),
+ ),
+ }
+}
+
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
@@ -681,11 +778,19 @@
}
fn parse_string(tt: &tt::TopSubtree) -> Result<(Symbol, Span), ExpandError> {
- let delimiter = tt.top_subtree().delimiter;
- tt.iter()
- .next()
- .ok_or(delimiter.open.cover(delimiter.close))
- .and_then(|tt| match tt {
+ let mut tt = TtElement::Subtree(tt.top_subtree(), tt.iter());
+ (|| {
+ // FIXME: We wrap expression fragments in parentheses which can break this expectation
+ // here
+ // Remove this once we handle none delims correctly
+ while let TtElement::Subtree(sub, tt_iter) = &mut tt
+ && let DelimiterKind::Parenthesis | DelimiterKind::Invisible = sub.delimiter.kind
+ {
+ tt =
+ tt_iter.exactly_one().map_err(|_| sub.delimiter.open.cover(sub.delimiter.close))?;
+ }
+
+ match tt {
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
span,
@@ -698,35 +803,11 @@
kind: tt::LitKind::StrRaw(_),
suffix: _,
})) => Ok((text.clone(), *span)),
- // FIXME: We wrap expression fragments in parentheses which can break this expectation
- // here
- // Remove this once we handle none delims correctly
- TtElement::Subtree(tt, mut tt_iter)
- if tt.delimiter.kind == DelimiterKind::Parenthesis =>
- {
- tt_iter
- .next()
- .and_then(|tt| match tt {
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::Str,
- suffix: _,
- })) => Some((unescape_symbol(text), *span)),
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::StrRaw(_),
- suffix: _,
- })) => Some((text.clone(), *span)),
- _ => None,
- })
- .ok_or(delimiter.open.cover(delimiter.close))
- }
TtElement::Leaf(l) => Err(*l.span()),
TtElement::Subtree(tt, _) => Err(tt.delimiter.open.cover(tt.delimiter.close)),
- })
- .map_err(|span| ExpandError::other(span, "expected string literal"))
+ }
+ })()
+ .map_err(|span| ExpandError::other(span, "expected string literal"))
}
fn include_expand(
diff --git a/crates/hir-expand/src/builtin/quote.rs b/crates/hir-expand/src/builtin/quote.rs
index d5874f8..70c38d4 100644
--- a/crates/hir-expand/src/builtin/quote.rs
+++ b/crates/hir-expand/src/builtin/quote.rs
@@ -129,7 +129,7 @@
}
}
}
-pub(super) use quote;
+pub use quote;
pub trait ToTokenTree {
fn to_tokens(self, span: Span, builder: &mut TopSubtreeBuilder);
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 7e9928c..888c140 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -145,7 +145,7 @@
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
}
-#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext)]
+#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext, revisions = usize::MAX)]
pub struct SyntaxContextWrapper {
pub data: SyntaxContext,
}
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index a73a223..6730b33 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -315,11 +315,11 @@
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_with_macro_call_body(
+ pub fn original_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
- self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
+ self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_input(db)
}
pub fn original_syntax_node_rooted(
@@ -465,7 +465,7 @@
}
}
- pub fn original_node_file_range_with_macro_call_body(
+ pub fn original_node_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
@@ -476,7 +476,7 @@
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file);
- loc.kind.original_call_range_with_body(db)
+ loc.kind.original_call_range_with_input(db)
}
}
}
@@ -497,6 +497,18 @@
}
}
}
+
+ pub fn original_node_file_range_rooted_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<FileRange> {
+ match self.file_id {
+ HirFileId::FileId(file_id) => Some(FileRange { file_id, range: self.value }),
+ HirFileId::MacroFile(mac_file) => {
+ map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value)
+ }
+ }
+ }
}
impl<N: AstNode> InFile<N> {
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 6ecac14..ac61b22 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -199,9 +199,9 @@
},
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
match db.proc_macros_for_crate(def_crate).as_ref().and_then(|it| it.get_error()) {
- Some((e, hard_err)) => RenderedExpandError {
- message: e.to_owned(),
- error: hard_err,
+ Some(e) => RenderedExpandError {
+ message: e.to_string(),
+ error: e.is_hard_error(),
kind: RenderedExpandError::GENERAL_KIND,
},
None => RenderedExpandError {
@@ -688,8 +688,11 @@
/// Returns the original file range that best describes the location of this macro call.
///
- /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
- pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
+ /// This spans the entire macro call, including its input. That is for
+ /// - fn_like! {}, it spans the path and token tree
+ /// - #\[derive], it spans the `#[derive(...)]` attribute and the annotated item
+ /// - #\[attr], it spans the `#[attr(...)]` attribute and the annotated item
+ pub fn original_call_range_with_input(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id() {
@@ -712,8 +715,8 @@
/// Returns the original file range that best describes the location of this macro call.
///
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
- /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
- /// get only the specific derive that is being referred to.
+ /// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
+ /// attribute's range, and derives get only the specific derive that is being referred to.
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
@@ -726,7 +729,14 @@
};
let range = match kind {
- MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db);
+ node.path()
+ .unwrap()
+ .syntax()
+ .text_range()
+ .cover(node.excl_token().unwrap().text_range())
+ }
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
@@ -1056,7 +1066,7 @@
intern::impl_internable!(ModPath, attrs::AttrInput);
-#[salsa_macros::interned(no_lifetime, debug)]
+#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[doc(alias = "MacroFileId")]
pub struct MacroCallId {
pub loc: MacroCallLoc,
diff --git a/crates/hir-expand/src/prettify_macro_expansion_.rs b/crates/hir-expand/src/prettify_macro_expansion_.rs
index 6134c3a..6431d46 100644
--- a/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -46,7 +46,7 @@
} else if let Some(crate_name) = ¯o_def_crate.extra_data(db).display_name {
make::tokens::ident(crate_name.crate_name().as_str())
} else {
- return dollar_crate.clone();
+ dollar_crate.clone()
}
});
if replacement.text() == "$crate" {
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index 1c8ebb6..f97d721 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -4,7 +4,7 @@
use std::any::Any;
use std::{panic::RefUnwindSafe, sync};
-use base_db::{Crate, CrateBuilderId, CratesIdMap, Env};
+use base_db::{Crate, CrateBuilderId, CratesIdMap, Env, ProcMacroLoadingError};
use intern::Symbol;
use rustc_hash::FxHashMap;
use span::Span;
@@ -53,8 +53,8 @@
System(String),
}
-pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, (String, bool)>;
-type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, (Box<str>, bool)>;
+pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, ProcMacroLoadingError>;
+type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, ProcMacroLoadingError>;
#[derive(Default, Debug)]
pub struct ProcMacrosBuilder(FxHashMap<CrateBuilderId, Arc<CrateProcMacros>>);
@@ -77,9 +77,7 @@
proc_macros_crate,
match proc_macro {
Ok(it) => Arc::new(CrateProcMacros(Ok(it.into_boxed_slice()))),
- Err((e, hard_err)) => {
- Arc::new(CrateProcMacros(Err((e.into_boxed_str(), hard_err))))
- }
+ Err(e) => Arc::new(CrateProcMacros(Err(e))),
},
);
}
@@ -139,8 +137,8 @@
)
}
- pub fn get_error(&self) -> Option<(&str, bool)> {
- self.0.as_ref().err().map(|(e, hard_err)| (&**e, *hard_err))
+ pub fn get_error(&self) -> Option<&ProcMacroLoadingError> {
+ self.0.as_ref().err()
}
/// Fetch the [`CustomProcMacroExpander`]s and their corresponding names for the given crate.
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index 24530a5..14b9cd2 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -281,7 +281,7 @@
let def = variant_id.into();
let body = db.body(def);
let loc = variant_id.lookup(db);
- if body.exprs[body.body_expr] == Expr::Missing {
+ if matches!(body[body.body_expr], Expr::Missing) {
let prev_idx = loc.index.checked_sub(1);
let value = match prev_idx {
Some(prev_idx) => {
@@ -334,7 +334,7 @@
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
return unknown_const(infer[expr].clone());
}
- if let Expr::Path(p) = &ctx.body.exprs[expr] {
+ if let Expr::Path(p) = &ctx.body[expr] {
let resolver = &ctx.resolver;
if let Some(c) =
path_to_const(db, resolver, p, mode, || ctx.generics(), debruijn, infer[expr].clone())
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 1029969..b3d4684 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -237,15 +237,6 @@
// Interned IDs for Chalk integration
#[salsa::interned]
- fn intern_type_or_const_param_id(
- &self,
- param_id: TypeOrConstParamId,
- ) -> InternedTypeOrConstParamId;
-
- #[salsa::interned]
- fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
-
- #[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
#[salsa::interned]
@@ -329,9 +320,31 @@
fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
}
-impl_intern_key!(InternedTypeOrConstParamId, TypeOrConstParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedTypeOrConstParamId {
+ pub loc: TypeOrConstParamId,
+}
+impl ::std::fmt::Debug for InternedTypeOrConstParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedTypeOrConstParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
-impl_intern_key!(InternedLifetimeParamId, LifetimeParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedLifetimeParamId {
+ pub loc: LifetimeParamId,
+}
+impl ::std::fmt::Debug for InternedLifetimeParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedLifetimeParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
impl_intern_key!(InternedConstParamId, ConstParamId);
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index 9c0f8f4..40fe307 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -226,11 +226,10 @@
let body = self.db.body(func.into());
let edition = self.edition(func);
let mut pats_replacements = body
- .pats
- .iter()
+ .pats()
.filter_map(|(pat_id, pat)| match pat {
Pat::Bind { id, .. } => {
- let bind_name = &body.bindings[*id].name;
+ let bind_name = &body[*id].name;
let mut suggested_text = to_lower_snake_case(bind_name.as_str())?;
if is_raw_identifier(&suggested_text, edition) {
suggested_text.insert_str(0, "r#");
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index 5d56957..5ae6bf6 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -101,7 +101,7 @@
self.check_for_trailing_return(body.body_expr, &body);
}
- for (id, expr) in body.exprs.iter() {
+ for (id, expr) in body.exprs() {
if let Some((variant, missed_fields, true)) =
record_literal_missing_fields(db, &self.infer, id, expr)
{
@@ -132,7 +132,7 @@
}
}
- for (id, pat) in body.pats.iter() {
+ for (id, pat) in body.pats() {
if let Some((variant, missed_fields, true)) =
record_pattern_missing_fields(db, &self.infer, id, pat)
{
@@ -389,7 +389,7 @@
if !self.validate_lints {
return;
}
- match &body.exprs[body_expr] {
+ match &body[body_expr] {
Expr::Block { statements, tail, .. } => {
let last_stmt = tail.or_else(|| match statements.last()? {
Statement::Expr { expr, .. } => Some(*expr),
@@ -428,7 +428,7 @@
if else_branch.is_none() {
return;
}
- if let Expr::Block { statements, tail, .. } = &self.body.exprs[*then_branch] {
+ if let Expr::Block { statements, tail, .. } = &self.body[*then_branch] {
let last_then_expr = tail.or_else(|| match statements.last()? {
Statement::Expr { expr, .. } => Some(*expr),
_ => None,
diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs
index 0bce32a..ca132fb 100644
--- a/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/crates/hir-ty/src/diagnostics/match_check.rs
@@ -25,7 +25,6 @@
db::HirDatabase,
display::{HirDisplay, HirDisplayError, HirFormatter},
infer::BindingMode,
- lang_items::is_box,
};
use self::pat_util::EnumerateAndAdjustIterator;
@@ -77,7 +76,7 @@
subpatterns: Vec<FieldPat>,
},
- /// `box P`, `&P`, `&mut P`, etc.
+ /// `&P`, `&mut P`, etc.
Deref {
subpattern: Pat,
},
@@ -151,7 +150,7 @@
hir_def::hir::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.binding_modes[pat];
ty = &self.infer[id];
- let name = &self.body.bindings[id].name;
+ let name = &self.body[id].name;
match (bm, ty.kind(Interner)) {
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
(BindingMode::Ref(_), _) => {
@@ -406,7 +405,6 @@
}
PatKind::Deref { subpattern } => {
match self.ty.kind(Interner) {
- TyKind::Adt(adt, _) if is_box(f.db, adt.0) => write!(f, "box ")?,
&TyKind::Ref(mutbl, ..) => {
write!(f, "&{}", if mutbl == Mutability::Mut { "mut " } else { "" })?
}
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index 7cf22c6..56fd12e 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -21,7 +21,7 @@
inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
};
-use super::{FieldPat, Pat, PatKind, is_box};
+use super::{FieldPat, Pat, PatKind};
use Constructor::*;
@@ -170,8 +170,6 @@
}
PatKind::Deref { subpattern } => {
ctor = match pat.ty.kind(Interner) {
- // This is a box pattern.
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => Struct,
TyKind::Ref(..) => Ref,
_ => {
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
@@ -194,23 +192,6 @@
ctor = Struct;
arity = substs.len(Interner);
}
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
- // _)` or a box pattern. As a hack to avoid an ICE with the former, we
- // ignore other fields than the first one. This will trigger an error later
- // anyway.
- // See https://github.com/rust-lang/rust/issues/82772 ,
- // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
- // The problem is that we can't know from the type whether we'll match
- // normally or through box-patterns. We'll have to figure out a proper
- // solution when we introduce generalized deref patterns. Also need to
- // prevent mixing of those two options.
- fields.retain(|ipat| ipat.idx == 0);
- ctor = Struct;
- arity = 1;
- }
&TyKind::Adt(AdtId(adt), _) => {
ctor = match pat.kind.as_ref() {
PatKind::Leaf { .. } if matches!(adt, hir_def::AdtId::UnionId(_)) => {
@@ -277,12 +258,6 @@
})
.collect(),
},
- TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
- // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
- // of `std`). So this branch is only reachable when the feature is enabled and
- // the pattern is a box pattern.
- PatKind::Deref { subpattern: subpatterns.next().unwrap() }
- }
TyKind::Adt(adt, substs) => {
let variant = Self::variant_id_for_adt(self.db, pat.ctor(), adt.0).unwrap();
let subpatterns = self
@@ -343,14 +318,8 @@
Struct | Variant(_) | UnionField => match *ty.kind(Interner) {
TyKind::Tuple(arity, ..) => arity,
TyKind::Adt(AdtId(adt), ..) => {
- if is_box(self.db, adt) {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- 1
- } else {
- let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
- variant.fields(self.db).fields().len()
- }
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
+ variant.fields(self.db).fields().len()
}
_ => {
never!("Unexpected type for `Single` constructor: {:?}", ty);
@@ -383,29 +352,22 @@
tys.cloned().map(|ty| (ty, PrivateUninhabitedField(false))).collect()
}
TyKind::Ref(.., rty) => single(rty.clone()),
- &TyKind::Adt(AdtId(adt), ref substs) => {
- if is_box(self.db, adt) {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
- single(subst_ty)
- } else {
- let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
+ &TyKind::Adt(AdtId(adt), ..) => {
+ let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
- let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
+ let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
- self.list_variant_fields(ty, variant)
- .map(move |(fid, ty)| {
- let is_visible = || {
- matches!(adt, hir_def::AdtId::EnumId(..))
- || visibilities[fid].is_visible_from(self.db, self.module)
- };
- let is_uninhabited = self.is_uninhabited(&ty);
- let private_uninhabited = is_uninhabited && !is_visible();
- (ty, PrivateUninhabitedField(private_uninhabited))
- })
- .collect()
- }
+ self.list_variant_fields(ty, variant)
+ .map(move |(fid, ty)| {
+ let is_visible = || {
+ matches!(adt, hir_def::AdtId::EnumId(..))
+ || visibilities[fid].is_visible_from(self.db, self.module)
+ };
+ let is_uninhabited = self.is_uninhabited(&ty);
+ let private_uninhabited = is_uninhabited && !is_visible();
+ (ty, PrivateUninhabitedField(private_uninhabited))
+ })
+ .collect()
}
ty_kind => {
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
@@ -527,6 +489,14 @@
fn complexity_exceeded(&self) -> Result<(), Self::Error> {
Err(())
}
+
+ fn report_mixed_deref_pat_ctors(
+ &self,
+ _deref_pat: &DeconstructedPat<'_>,
+ _normal_pat: &DeconstructedPat<'_>,
+ ) {
+ // FIXME(deref_patterns): This could report an error comparable to the one in rustc.
+ }
}
impl fmt::Debug for MatchCheckCtx<'_> {
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 20cf3c7..f6ad3c7 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -7,7 +7,7 @@
use hir_def::{
AdtId, DefWithBodyId, FieldId, FunctionId, VariantId,
expr_store::{Body, path::Path},
- hir::{AsmOperand, Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp},
+ hir::{AsmOperand, Expr, ExprId, ExprOrPatId, InlineAsmKind, Pat, PatId, Statement, UnaryOp},
resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
signatures::StaticFlags,
type_ref::Rawness,
@@ -217,7 +217,7 @@
}
fn walk_pat(&mut self, current: PatId) {
- let pat = &self.body.pats[current];
+ let pat = &self.body[current];
if self.inside_union_destructure {
match pat {
@@ -264,7 +264,7 @@
}
fn walk_expr(&mut self, current: ExprId) {
- let expr = &self.body.exprs[current];
+ let expr = &self.body[current];
let inside_assignment = mem::replace(&mut self.inside_assignment, false);
match expr {
&Expr::Call { callee, .. } => {
@@ -284,7 +284,7 @@
self.resolver.reset_to_guard(guard);
}
Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => {
- match self.body.exprs[*expr] {
+ match self.body[*expr] {
// Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`,
// see https://github.com/rust-lang/rust/pull/125834.
Expr::Path(_) => return,
@@ -315,7 +315,12 @@
self.inside_assignment = old_inside_assignment;
}
Expr::InlineAsm(asm) => {
- self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm);
+ if asm.kind == InlineAsmKind::Asm {
+ // `naked_asm!()` requires `unsafe` on the attribute (`#[unsafe(naked)]`),
+ // and `global_asm!()` doesn't require it at all.
+ self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm);
+ }
+
asm.operands.iter().for_each(|(_, op)| match op {
AsmOperand::In { expr, .. }
| AsmOperand::Out { expr: Some(expr), .. }
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 507bab2..b3760e3 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -795,6 +795,14 @@
let Some(bytes) = memory_map.get(addr, size_one * count) else {
return f.write_str("<ref-data-not-available>");
};
+ let expected_len = count * size_one;
+ if bytes.len() < expected_len {
+ never!(
+ "Memory map size is too small. Expected {expected_len}, got {}",
+ bytes.len(),
+ );
+ return f.write_str("<layout-error>");
+ }
f.write_str("&[")?;
let mut first = true;
for i in 0..count {
@@ -1432,10 +1440,10 @@
match f.closure_style {
ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"),
ClosureStyle::ClosureWithId => {
- return write!(f, "{{closure#{:?}}}", id.0.as_u32());
+ return write!(f, "{{closure#{:?}}}", id.0.index());
}
ClosureStyle::ClosureWithSubst => {
- write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
+ write!(f, "{{closure#{:?}}}", id.0.index())?;
return hir_fmt_generics(f, substs.as_slice(Interner), None, None);
}
_ => (),
@@ -2328,6 +2336,7 @@
store[*path].hir_fmt(f, store)
}
TypeBound::Use(args) => {
+ write!(f, "use<")?;
let edition = f.edition();
let last = args.len().saturating_sub(1);
for (idx, arg) in args.iter().enumerate() {
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index ce53198..e880438 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -460,19 +460,17 @@
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
- pub tuple_field_access_types: FxHashMap<TupleId, Substitution>,
+ tuple_field_access_types: FxHashMap<TupleId, Substitution>,
/// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
- pub diagnostics: Vec<InferenceDiagnostic>,
- pub type_of_expr: ArenaMap<ExprId, Ty>,
+ diagnostics: Vec<InferenceDiagnostic>,
+ pub(crate) type_of_expr: ArenaMap<ExprId, Ty>,
/// For each pattern record the type it resolves to.
///
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
- pub type_of_pat: ArenaMap<PatId, Ty>,
- pub type_of_binding: ArenaMap<BindingId, Ty>,
- pub type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
- /// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
- pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
+ pub(crate) type_of_pat: ArenaMap<PatId, Ty>,
+ pub(crate) type_of_binding: ArenaMap<BindingId, Ty>,
+ pub(crate) type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
@@ -483,7 +481,7 @@
// FIXME: Move this into `InferenceContext`
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
- pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
+ pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
/// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
///
/// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
@@ -497,12 +495,12 @@
/// }
/// ```
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
- pub binding_modes: ArenaMap<PatId, BindingMode>,
- pub expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
+ pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
+ pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
- pub coercion_casts: FxHashSet<ExprId>,
+ pub(crate) coercion_casts: FxHashSet<ExprId>,
}
impl InferenceResult {
@@ -566,6 +564,26 @@
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}
+
+ pub fn diagnostics(&self) -> &[InferenceDiagnostic] {
+ &self.diagnostics
+ }
+
+ pub fn tuple_field_access_type(&self, id: TupleId) -> &Substitution {
+ &self.tuple_field_access_types[&id]
+ }
+
+ pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty]> {
+ self.pat_adjustments.get(&id).map(|it| &**it)
+ }
+
+ pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> {
+ self.expr_adjustments.get(&id).map(|it| &**it)
+ }
+
+ pub fn binding_mode(&self, id: PatId) -> Option<BindingMode> {
+ self.binding_modes.get(id).copied()
+ }
}
impl Index<ExprId> for InferenceResult {
@@ -772,7 +790,6 @@
type_of_pat,
type_of_binding,
type_of_rpit,
- type_of_for_iterator,
type_mismatches,
has_errors,
standard_types: _,
@@ -832,11 +849,6 @@
*has_errors = *has_errors || ty.contains_unknown();
}
type_of_rpit.shrink_to_fit();
- for ty in type_of_for_iterator.values_mut() {
- *ty = table.resolve_completely(ty.clone());
- *has_errors = *has_errors || ty.contains_unknown();
- }
- type_of_for_iterator.shrink_to_fit();
*has_errors |= !type_mismatches.is_empty();
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index 65a273c..c3029bf 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -1229,10 +1229,11 @@
self.select_from_expr(*expr);
}
}
- Expr::Let { pat: _, expr } => {
+ Expr::Let { pat, expr } => {
self.walk_expr(*expr);
- let place = self.place_of_expr(*expr);
- self.ref_expr(*expr, place);
+ if let Some(place) = self.place_of_expr(*expr) {
+ self.consume_with_pat(place, *pat);
+ }
}
Expr::UnaryOp { expr, op: _ }
| Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index d40d52c..d43c99f 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -731,9 +731,32 @@
&Pat::Expr(expr) => {
Some(self.infer_expr(expr, &Expectation::none(), ExprIsRead::No))
}
- Pat::Path(path) => Some(self.infer_expr_path(path, target.into(), tgt_expr)),
+ Pat::Path(path) => {
+ let resolver_guard =
+ self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
+ let resolution = self.resolver.resolve_path_in_value_ns_fully(
+ self.db,
+ path,
+ self.body.pat_path_hygiene(target),
+ );
+ self.resolver.reset_to_guard(resolver_guard);
+
+ if matches!(
+ resolution,
+ Some(
+ ValueNs::ConstId(_)
+ | ValueNs::StructId(_)
+ | ValueNs::EnumVariantId(_)
+ )
+ ) {
+ None
+ } else {
+ Some(self.infer_expr_path(path, target.into(), tgt_expr))
+ }
+ }
_ => None,
};
+ let is_destructuring_assignment = lhs_ty.is_none();
if let Some(lhs_ty) = lhs_ty {
self.write_pat_ty(target, lhs_ty.clone());
@@ -747,7 +770,15 @@
self.inside_assignment = false;
self.resolver.reset_to_guard(resolver_guard);
}
- self.result.standard_types.unit.clone()
+ if is_destructuring_assignment && self.diverges.is_always() {
+ // Ordinary assignments always return `()`, even when they diverge.
+ // However, rustc lowers destructuring assignments into blocks, and blocks return `!` if they have no tail
+ // expression and they diverge. Therefore, we have to do the same here, even though we don't lower destructuring
+ // assignments into blocks.
+ self.table.new_maybe_never_var()
+ } else {
+ self.result.standard_types.unit.clone()
+ }
}
Expr::Range { lhs, rhs, range_type } => {
let lhs_ty =
diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs
index d2eaf21..3f7eba9 100644
--- a/crates/hir-ty/src/infer/mutability.rs
+++ b/crates/hir-ty/src/infer/mutability.rs
@@ -273,7 +273,7 @@
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
let mut r = Mutability::Not;
self.body.walk_bindings_in_pat(pat, |b| {
- if self.body.bindings[b].mode == BindingAnnotation::RefMut {
+ if self.body[b].mode == BindingAnnotation::RefMut {
r = Mutability::Mut;
}
});
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index 99d3b5c..18288b7 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -459,7 +459,7 @@
expected: &Ty,
decl: Option<DeclContext>,
) -> Ty {
- let Binding { mode, .. } = self.body.bindings[binding];
+ let Binding { mode, .. } = self.body[binding];
let mode = if mode == BindingAnnotation::Unannotated {
default_bm
} else {
@@ -639,7 +639,7 @@
pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
let mut res = false;
body.walk_pats(pat_id, &mut |pat| {
- res |= matches!(body[pat], Pat::Bind { id, .. } if body.bindings[id].mode == BindingAnnotation::Ref);
+ res |= matches!(body[pat], Pat::Bind { id, .. } if body[id].mode == BindingAnnotation::Ref);
});
res
}
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index 3fa2bfb..107da6a 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -261,7 +261,7 @@
}
// Potentially-wide pointers.
TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => {
- let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
+ let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO));
if matches!(ty.kind(Interner), TyKind::Ref(..)) {
data_ptr.valid_range_mut().start = 1;
}
@@ -285,7 +285,7 @@
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
}
TyKind::Dyn(..) => {
- let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
+ let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::ZERO));
vtable.valid_range_mut().start = 1;
vtable
}
diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs
index e1e1c44..82d0ed4 100644
--- a/crates/hir-ty/src/layout/target.rs
+++ b/crates/hir-ty/src/layout/target.rs
@@ -2,7 +2,7 @@
use base_db::Crate;
use hir_def::layout::TargetDataLayout;
-use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors};
+use rustc_abi::{AddressSpace, AlignFromBytesError, TargetDataLayoutErrors};
use triomphe::Arc;
use crate::db::HirDatabase;
@@ -12,7 +12,7 @@
krate: Crate,
) -> Result<Arc<TargetDataLayout>, Arc<str>> {
match &krate.workspace_data(db).data_layout {
- Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
+ Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it, AddressSpace::ZERO) {
Ok(it) => Ok(Arc::new(it)),
Err(e) => {
Err(match e {
@@ -39,6 +39,7 @@
target,
} => format!(r#"inconsistent target specification: "data-layout" claims pointers are {pointer_size}-bit, while "target-pointer-width" is `{target}`"#),
TargetDataLayoutErrors::InvalidBitsSize { err } => err,
+ TargetDataLayoutErrors::UnknownPointerSpecification { err } => format!(r#"use of unknown pointer specifer in "data-layout": {err}"#),
}.into())
}
},
diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs
index cc7d74f..b3bc226 100644
--- a/crates/hir-ty/src/layout/tests.rs
+++ b/crates/hir-ty/src/layout/tests.rs
@@ -119,8 +119,7 @@
.unwrap();
let hir_body = db.body(function_id.into());
let b = hir_body
- .bindings
- .iter()
+ .bindings()
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;
diff --git a/crates/hir-ty/src/lower/path.rs b/crates/hir-ty/src/lower/path.rs
index 06686b6..5c06234 100644
--- a/crates/hir-ty/src/lower/path.rs
+++ b/crates/hir-ty/src/lower/path.rs
@@ -1018,8 +1018,12 @@
}
let lifetime_args_len = def_generics.len_lifetimes_self();
- if provided_lifetimes_count == 0 && lifetime_args_len > 0 && !lowering_assoc_type_generics {
- // In generic associated types, we never allow inferring the lifetimes.
+ if provided_lifetimes_count == 0
+ && lifetime_args_len > 0
+ && (!lowering_assoc_type_generics || infer_args)
+ {
+ // In generic associated types, we never allow inferring the lifetimes, but only in type context, that is
+ // when `infer_args == false`. In expression/pattern context we always allow inferring them, even for GATs.
match lifetime_elision {
&LifetimeElisionKind::AnonymousCreateParameter { report_in_path } => {
ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, report_in_path);
diff --git a/crates/hir-ty/src/mapping.rs b/crates/hir-ty/src/mapping.rs
index 6936d81..9d3d204 100644
--- a/crates/hir-ty/src/mapping.rs
+++ b/crates/hir-ty/src/mapping.rs
@@ -13,7 +13,8 @@
use crate::{
AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId, Interner, OpaqueTyId,
- PlaceholderIndex, chalk_db, db::HirDatabase,
+ PlaceholderIndex, chalk_db,
+ db::{HirDatabase, InternedLifetimeParamId, InternedTypeOrConstParamId},
};
pub trait ToChalk {
@@ -125,30 +126,32 @@
pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_type_or_const_param_id(interned_id)
+ let interned_id =
+ InternedTypeOrConstParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> PlaceholderIndex {
- let interned_id = db.intern_type_or_const_param_id(id);
+ let interned_id = InternedTypeOrConstParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_lifetime_param_id(interned_id)
+ let interned_id =
+ InternedLifetimeParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> PlaceholderIndex {
- let interned_id = db.intern_lifetime_param_id(id);
+ let interned_id = InternedLifetimeParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs
index bf80ed7..482b420 100644
--- a/crates/hir-ty/src/mir.rs
+++ b/crates/hir-ty/src/mir.rs
@@ -1212,10 +1212,9 @@
match *self {
MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }),
// FIXME: Figure out if this is correct wrt. match ergonomics.
- MirSpan::BindingId(binding) => matches!(
- body.bindings[binding].mode,
- BindingAnnotation::Ref | BindingAnnotation::RefMut
- ),
+ MirSpan::BindingId(binding) => {
+ matches!(body[binding].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)
+ }
MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
}
}
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index 1ec55a8..9a97bd6 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -31,8 +31,8 @@
use triomphe::Arc;
use crate::{
- CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, Interner,
- MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
+ AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId,
+ Interner, MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
consteval::{ConstEvalError, intern_const_scalar, try_const_usize},
db::{HirDatabase, InternedClosure},
display::{ClosureStyle, DisplayTarget, HirDisplay},
@@ -630,7 +630,7 @@
Ok(target_data_layout) => target_data_layout,
Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
};
- let cached_ptr_size = target_data_layout.pointer_size.bytes_usize();
+ let cached_ptr_size = target_data_layout.pointer_size().bytes_usize();
Ok(Evaluator {
target_data_layout,
stack: vec![0],
@@ -2195,7 +2195,7 @@
}
}
}
- chalk_ir::TyKind::Array(inner, len) => {
+ TyKind::Array(inner, len) => {
let len = match try_const_usize(this.db, len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in patching addresses"),
@@ -2213,7 +2213,7 @@
)?;
}
}
- chalk_ir::TyKind::Tuple(_, subst) => {
+ TyKind::Tuple(_, subst) => {
let layout = this.layout(ty)?;
for (id, ty) in subst.iter(Interner).enumerate() {
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
@@ -2229,7 +2229,7 @@
)?;
}
}
- chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
+ TyKind::Adt(adt, subst) => match adt.0 {
AdtId::StructId(s) => {
let data = s.fields(this.db);
let layout = this.layout(ty)?;
@@ -2280,6 +2280,10 @@
}
AdtId::UnionId(_) => (),
},
+ TyKind::Alias(AliasTy::Projection(proj)) => {
+ let ty = this.db.normalize_projection(proj.clone(), this.trait_env.clone());
+ rec(this, bytes, &ty, locals, mm, stack_depth_limit - 1)?;
+ }
_ => (),
}
Ok(())
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 845d6b8..07d8147 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -321,7 +321,7 @@
current: BasicBlockId,
) -> Result<Option<(Operand, BasicBlockId)>> {
if !self.has_adjustments(expr_id) {
- if let Expr::Literal(l) = &self.body.exprs[expr_id] {
+ if let Expr::Literal(l) = &self.body[expr_id] {
let ty = self.expr_ty_without_adjust(expr_id);
return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
}
@@ -411,7 +411,7 @@
place: Place,
mut current: BasicBlockId,
) -> Result<Option<BasicBlockId>> {
- match &self.body.exprs[expr_id] {
+ match &self.body[expr_id] {
Expr::OffsetOf(_) => {
not_supported!("builtin#offset_of")
}
@@ -1374,7 +1374,7 @@
}
fn lower_literal_or_const_to_operand(&mut self, ty: Ty, loc: &ExprId) -> Result<Operand> {
- match &self.body.exprs[*loc] {
+ match &self.body[*loc] {
Expr::Literal(l) => self.lower_literal_to_operand(ty, l),
Expr::Path(c) => {
let owner = self.owner;
@@ -1850,7 +1850,7 @@
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
if let Pat::Bind { id, subpat: None } = self.body[it] {
if matches!(
- self.body.bindings[id].mode,
+ self.body[id].mode,
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
) {
self.result.binding_locals.insert(id, local_id);
@@ -1859,7 +1859,7 @@
local_id
}));
// and then rest of bindings
- for (id, _) in self.body.bindings.iter() {
+ for (id, _) in self.body.bindings() {
if !pick_binding(id) {
continue;
}
@@ -2126,7 +2126,7 @@
.result
.binding_locals
.into_iter()
- .filter(|it| ctx.body.binding_owners.get(&it.0).copied() == Some(expr))
+ .filter(|it| ctx.body.binding_owner(it.0) == Some(expr))
.collect();
if let Some(err) = err {
return Err(MirLowerError::UnresolvedUpvar(err));
@@ -2191,7 +2191,7 @@
// 0 is return local
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) });
let binding_picker = |b: BindingId| {
- let owner = ctx.body.binding_owners.get(&b).copied();
+ let owner = ctx.body.binding_owner(b);
if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) }
};
// 1 to param_len is for params
diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs
index e7bffea..e074c2d 100644
--- a/crates/hir-ty/src/mir/lower/as_place.rs
+++ b/crates/hir-ty/src/mir/lower/as_place.rs
@@ -133,7 +133,7 @@
}
this.lower_expr_to_some_place_without_adjust(expr_id, current)
};
- match &self.body.exprs[expr_id] {
+ match &self.body[expr_id] {
Expr::Path(p) => {
let resolver_guard =
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs
index 61c0685..3325226 100644
--- a/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -130,7 +130,7 @@
.collect::<Vec<_>>()
.into(),
);
- Ok(match &self.body.pats[pattern] {
+ Ok(match &self.body[pattern] {
Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else),
Pat::Tuple { args, ellipsis } => {
@@ -436,7 +436,7 @@
(next, Some(else_target))
}
},
- Pat::Lit(l) => match &self.body.exprs[*l] {
+ Pat::Lit(l) => match &self.body[*l] {
Expr::Literal(l) => {
if mode == MatchingMode::Check {
let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs
index 78a69cf..aad54f8 100644
--- a/crates/hir-ty/src/mir/pretty.rs
+++ b/crates/hir-ty/src/mir/pretty.rs
@@ -219,7 +219,7 @@
fn local_name(&self, local: LocalId) -> LocalName {
match self.local_to_binding.get(local) {
- Some(b) => LocalName::Binding(self.hir_body.bindings[*b].name.clone(), local),
+ Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local),
None => LocalName::Unknown(local),
}
}
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index d049c67..b5de0e5 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -166,10 +166,10 @@
self.events.lock().unwrap().take().unwrap()
}
- pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> (Vec<String>, Vec<salsa::Event>) {
let events = self.log(f);
- events
- .into_iter()
+ let executed = events
+ .iter()
.filter_map(|e| match e.kind {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
@@ -181,6 +181,7 @@
}
_ => None,
})
- .collect()
+ .collect();
+ (executed, events)
}
}
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index 79754bc..9605a0b 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -168,7 +168,7 @@
let inference_result = db.infer(def);
for (pat, mut ty) in inference_result.type_of_pat.iter() {
- if let Pat::Bind { id, .. } = body.pats[pat] {
+ if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match pat_node(&body_source_map, pat, &db) {
@@ -316,7 +316,7 @@
}
for (pat, mut ty) in inference_result.type_of_pat.iter() {
- if let Pat::Bind { id, .. } = body.pats[pat] {
+ if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match body_source_map.pat_syntax(pat) {
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 7fb9817..dbc68ee 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -446,7 +446,7 @@
}
#[test]
-fn let_binding_is_a_ref_capture() {
+fn let_binding_is_a_ref_capture_in_ref_binding() {
check_closure_captures(
r#"
//- minicore:copy
@@ -454,12 +454,36 @@
fn main() {
let mut s = S;
let s_ref = &mut s;
+ let mut s2 = S;
+ let s_ref2 = &mut s2;
let closure = || {
if let ref cb = s_ref {
+ } else if let ref mut cb = s_ref2 {
}
};
}
"#,
- expect!["83..135;49..54;112..117 ByRef(Shared) s_ref &'? &'? mut S"],
+ expect![[r#"
+ 129..225;49..54;149..155 ByRef(Shared) s_ref &'? &'? mut S
+ 129..225;93..99;188..198 ByRef(Mut { kind: Default }) s_ref2 &'? mut &'? mut S"#]],
+ );
+}
+
+#[test]
+fn let_binding_is_a_value_capture_in_binding() {
+ check_closure_captures(
+ r#"
+//- minicore:copy, option
+struct Box(i32);
+fn main() {
+ let b = Some(Box(0));
+ let closure = || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ expect!["73..149;37..38;103..104 ByValue b Option<Box>"],
);
}
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 0377ce9..3159499 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -1,6 +1,7 @@
use base_db::SourceDatabase;
use expect_test::Expect;
use hir_def::{DefWithBodyId, ModuleDefId};
+use salsa::EventKind;
use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB};
@@ -567,11 +568,11 @@
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "query_with_diagnostics_",
+ "TraitItems::query_with_diagnostics_",
"body_shim",
"body_with_source_map_shim",
"attrs_shim",
- "of_",
+ "ImplItems::of_",
"infer_shim",
"trait_signature_shim",
"trait_signature_with_source_map_shim",
@@ -596,8 +597,8 @@
"struct_signature_with_source_map_shim",
"generic_predicates_shim",
"value_ty_shim",
- "firewall_",
- "query_",
+ "VariantFields::firewall_",
+ "VariantFields::query_",
"lang_item",
"inherent_impls_in_crate_shim",
"impl_signature_shim",
@@ -674,11 +675,11 @@
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
- "query_with_diagnostics_",
+ "TraitItems::query_with_diagnostics_",
"body_with_source_map_shim",
"attrs_shim",
"body_shim",
- "of_",
+ "ImplItems::of_",
"infer_shim",
"attrs_shim",
"trait_signature_with_source_map_shim",
@@ -697,7 +698,7 @@
"function_signature_with_source_map_shim",
"expr_scopes_shim",
"struct_signature_with_source_map_shim",
- "query_",
+ "VariantFields::query_",
"inherent_impls_in_crate_shim",
"impl_signature_with_source_map_shim",
"impl_signature_shim",
@@ -718,10 +719,23 @@
required: &[(&str, usize)],
expect: Expect,
) {
- let events = db.log_executed(f);
- for (event, count) in required {
- let n = events.iter().filter(|it| it.contains(event)).count();
- assert_eq!(n, *count, "Expected {event} to be executed {count} times, but only got {n}");
- }
- expect.assert_debug_eq(&events);
+ let (executed, events) = db.log_executed(f);
+ salsa::attach(db, || {
+ for (event, count) in required {
+ let n = executed.iter().filter(|it| it.contains(event)).count();
+ assert_eq!(
+ n,
+ *count,
+ "Expected {event} to be executed {count} times, but only got {n}:\n \
+ Executed: {executed:#?}\n \
+ Event log: {events:#?}",
+ events = events
+ .iter()
+ .filter(|event| !matches!(event.kind, EventKind::WillCheckCancellation))
+ .map(|event| { format!("{:?}", event.kind) })
+ .collect::<Vec<_>>(),
+ );
+ }
+ expect.assert_debug_eq(&executed);
+ });
}
diff --git a/crates/hir-ty/src/tests/never_type.rs b/crates/hir-ty/src/tests/never_type.rs
index 1ca4c9b..6a91356 100644
--- a/crates/hir-ty/src/tests/never_type.rs
+++ b/crates/hir-ty/src/tests/never_type.rs
@@ -785,3 +785,31 @@
"#]],
)
}
+
+#[test]
+fn diverging_destructuring_assignment() {
+ check_infer_with_mismatches(
+ r#"
+fn foo() {
+ let n = match 42 {
+ 0 => _ = loop {},
+ _ => 0,
+ };
+}
+ "#,
+ expect![[r#"
+ 9..84 '{ ... }; }': ()
+ 19..20 'n': i32
+ 23..81 'match ... }': i32
+ 29..31 '42': i32
+ 42..43 '0': i32
+ 42..43 '0': i32
+ 47..48 '_': !
+ 47..58 '_ = loop {}': i32
+ 51..58 'loop {}': !
+ 56..58 '{}': ()
+ 68..69 '_': i32
+ 73..74 '0': i32
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index 43e8f37..b154e59 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -3751,7 +3751,7 @@
}
let v: bool = true;
m!();
- // ^^^^ i32
+ // ^^ i32
}
"#,
);
@@ -3765,39 +3765,39 @@
let v: bool;
macro_rules! m { () => { v } }
m!();
- // ^^^^ bool
+ // ^^ bool
let v: char;
macro_rules! m { () => { v } }
m!();
- // ^^^^ char
+ // ^^ char
{
let v: u8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u8
+ // ^^ u8
let v: i8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i8
+ // ^^ i8
let v: i16;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i16
+ // ^^ i16
{
let v: u32;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u32
+ // ^^ u32
let v: u64;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u64
+ // ^^ u64
}
}
}
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index aba2e03..c1e814e 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -36,16 +36,16 @@
};
macro_rules! diagnostics {
- ($($diag:ident $(<$lt:lifetime>)?,)*) => {
+ ($AnyDiagnostic:ident <$db:lifetime> -> $($diag:ident $(<$lt:lifetime>)?,)*) => {
#[derive(Debug)]
- pub enum AnyDiagnostic<'db> {$(
+ pub enum $AnyDiagnostic<$db> {$(
$diag(Box<$diag $(<$lt>)?>),
)*}
$(
- impl<'db> From<$diag $(<$lt>)?> for AnyDiagnostic<'db> {
- fn from(d: $diag $(<$lt>)?) -> AnyDiagnostic<'db> {
- AnyDiagnostic::$diag(Box::new(d))
+ impl<$db> From<$diag $(<$lt>)?> for $AnyDiagnostic<$db> {
+ fn from(d: $diag $(<$lt>)?) -> $AnyDiagnostic<$db> {
+ $AnyDiagnostic::$diag(Box::new(d))
}
}
)*
@@ -66,7 +66,7 @@
// }, ...
// ]
-diagnostics![
+diagnostics![AnyDiagnostic<'db> ->
AwaitOutsideOfAsync,
BreakOutsideOfLoop,
CastToUnsized<'db>,
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index e8a1816..1b2b769 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -1260,7 +1260,9 @@
}
pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
- let ty = db.infer(self.owner).tuple_field_access_types[&self.tuple]
+ let ty = db
+ .infer(self.owner)
+ .tuple_field_access_type(self.tuple)
.as_slice(Interner)
.get(self.index as usize)
.and_then(|arg| arg.ty(Interner))
@@ -1927,7 +1929,7 @@
expr_store_diagnostics(db, acc, &source_map);
let infer = db.infer(self.into());
- for d in &infer.diagnostics {
+ for d in infer.diagnostics() {
acc.extend(AnyDiagnostic::inference_diagnostic(
db,
self.into(),
@@ -2034,7 +2036,7 @@
)
}
let mol = &borrowck_result.mutability_of_locals;
- for (binding_id, binding_data) in body.bindings.iter() {
+ for (binding_id, binding_data) in body.bindings() {
if binding_data.problems.is_some() {
// We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`.
continue;
@@ -3220,7 +3222,8 @@
}
}
- pub fn is_asm_or_global_asm(&self, db: &dyn HirDatabase) -> bool {
+ /// Is this `asm!()`, or a variant of it (e.g. `global_asm!()`)?
+ pub fn is_asm_like(&self, db: &dyn HirDatabase) -> bool {
match self.id {
MacroId::Macro2Id(it) => {
matches!(it.lookup(db).expander, MacroExpander::BuiltIn(m) if m.is_asm())
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 247bb69..adba592 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -677,8 +677,7 @@
pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
let body = self.db.body(to_be_renamed.parent);
let resolver = to_be_renamed.parent.resolver(self.db);
- let starting_expr =
- body.binding_owners.get(&to_be_renamed.binding_id).copied().unwrap_or(body.body_expr);
+ let starting_expr = body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.body_expr);
let mut visitor = RenameConflictsVisitor {
body: &body,
conflicts: FxHashSet::default(),
@@ -1776,7 +1775,7 @@
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
- if mac.is_asm_or_global_asm(self.db) {
+ if mac.is_asm_like(self.db) {
return true;
}
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index f18ca7c..ecc6e5f 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -242,11 +242,7 @@
fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingId> {
let pat_id = self.pat_id(&pat.clone().into())?;
- if let Pat::Bind { id, .. } = self.store()?.pats[pat_id.as_pat()?] {
- Some(id)
- } else {
- None
- }
+ if let Pat::Bind { id, .. } = self.store()?[pat_id.as_pat()?] { Some(id) } else { None }
}
pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> {
@@ -254,7 +250,7 @@
// expressions nor patterns).
let expr_id = self.expr_id(expr.clone())?.as_expr()?;
let infer = self.infer()?;
- infer.expr_adjustments.get(&expr_id).map(|v| &**v)
+ infer.expr_adjustment(expr_id)
}
pub(crate) fn type_of_type(
@@ -286,7 +282,7 @@
let infer = self.infer()?;
let coerced = expr_id
.as_expr()
- .and_then(|expr_id| infer.expr_adjustments.get(&expr_id))
+ .and_then(|expr_id| infer.expr_adjustment(expr_id))
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
let ty = infer[expr_id].clone();
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
@@ -302,12 +298,11 @@
let infer = self.infer()?;
let coerced = match expr_or_pat_id {
ExprOrPatId::ExprId(idx) => infer
- .expr_adjustments
- .get(&idx)
+ .expr_adjustment(idx)
.and_then(|adjusts| adjusts.last().cloned())
.map(|adjust| adjust.target),
ExprOrPatId::PatId(idx) => {
- infer.pat_adjustments.get(&idx).and_then(|adjusts| adjusts.last().cloned())
+ infer.pat_adjustment(idx).and_then(|adjusts| adjusts.last().cloned())
}
};
@@ -345,7 +340,7 @@
) -> Option<BindingMode> {
let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer()?;
- infer.binding_modes.get(id.as_pat()?).map(|bm| match bm {
+ infer.binding_mode(id.as_pat()?).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@@ -362,8 +357,7 @@
let infer = self.infer()?;
Some(
infer
- .pat_adjustments
- .get(&pat_id.as_pat()?)?
+ .pat_adjustment(pat_id.as_pat()?)?
.iter()
.map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
.collect(),
@@ -736,7 +730,7 @@
let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
let variant_data = variant.fields(db);
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
- let (adt, subst) = self.infer()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
+ let (adt, subst) = self.infer()?[pat_id.as_pat()?].as_adt()?;
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
Some((
@@ -765,7 +759,8 @@
},
};
- let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, Some(store))?;
+ let body_owner = self.resolver.body_owner();
+ let res = resolve_hir_value_path(db, &self.resolver, body_owner, path, HygieneId::ROOT)?;
match res {
PathResolution::Def(def) => Some(def),
_ => None,
@@ -996,7 +991,7 @@
let parent_hir_path = path
.parent_path()
.and_then(|p| collector.lower_path(p, &mut ExprCollector::impl_trait_error_allocator));
- let store = collector.store.finish();
+ let (store, _) = collector.store.finish();
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar.
@@ -1205,7 +1200,7 @@
let mut collector = ExprCollector::new(db, self.resolver.module(), self.file_id);
let hir_path =
collector.lower_path(path.clone(), &mut ExprCollector::impl_trait_error_allocator)?;
- let store = collector.store.finish();
+ let (store, _) = collector.store.finish();
Some(resolve_hir_path_(
db,
&self.resolver,
@@ -1249,7 +1244,7 @@
let infer = self.infer()?;
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
- let substs = infer.type_of_pat[pat_id].as_adt()?.1;
+ let substs = infer[pat_id].as_adt()?.1;
let (variant, missing_fields, _exhaustive) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
@@ -1440,9 +1435,11 @@
) -> Option<ScopeId> {
node.ancestors_with_macros(db)
.take_while(|it| {
- !ast::Item::can_cast(it.kind())
- || ast::MacroCall::can_cast(it.kind())
- || ast::Use::can_cast(it.kind())
+ let kind = it.kind();
+ !ast::Item::can_cast(kind)
+ || ast::MacroCall::can_cast(kind)
+ || ast::Use::can_cast(kind)
+ || ast::AsmExpr::can_cast(kind)
})
.filter_map(|it| it.map(ast::Expr::cast).transpose())
.filter_map(|it| source_map.node_expr(it.as_ref())?.as_expr())
@@ -1785,8 +1782,8 @@
}
fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {
- match infer.expr_adjustments.get(&id).and_then(|adjustments| adjustments.last()) {
+ match infer.expr_adjustment(id).and_then(|adjustments| adjustments.last()) {
Some(adjustment) => Some(&adjustment.target),
- None => infer.type_of_expr.get(id),
+ None => Some(&infer[id]),
}
}
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
index 7566508..dca1019 100644
--- a/crates/hir/src/symbols.rs
+++ b/crates/hir/src/symbols.rs
@@ -125,6 +125,13 @@
}
ModuleDefId::AdtId(AdtId::EnumId(id)) => {
this.push_decl(id, name, false, None);
+ let enum_name = this.db.enum_signature(id).name.as_str().to_smolstr();
+ this.with_container_name(Some(enum_name), |this| {
+ let variants = id.enum_variants(this.db);
+ for (variant_id, variant_name, _) in &variants.variants {
+ this.push_decl(*variant_id, variant_name, true, None);
+ }
+ });
}
ModuleDefId::AdtId(AdtId::UnionId(id)) => {
this.push_decl(id, name, false, None);
diff --git a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
index efcbcef..9126e86 100644
--- a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
+++ b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
@@ -1,8 +1,8 @@
use ide_db::defs::{Definition, NameRefClass};
use syntax::{
AstNode, SyntaxNode,
- ast::{self, HasName, Name},
- ted,
+ ast::{self, HasName, Name, syntax_factory::SyntaxFactory},
+ syntax_editor::SyntaxEditor,
};
use crate::{
@@ -121,34 +121,36 @@
// Rename `extracted` with `binding` in `pat`.
fn rename_variable(pat: &ast::Pat, extracted: &[Name], binding: ast::Pat) -> SyntaxNode {
- let syntax = pat.syntax().clone_for_update();
+ let syntax = pat.syntax().clone_subtree();
+ let mut editor = SyntaxEditor::new(syntax.clone());
+ let make = SyntaxFactory::with_mappings();
let extracted = extracted
.iter()
- .map(|e| syntax.covering_element(e.syntax().text_range()))
+ .map(|e| e.syntax().text_range() - pat.syntax().text_range().start())
+ .map(|r| syntax.covering_element(r))
.collect::<Vec<_>>();
for extracted_syntax in extracted {
// If `extracted` variable is a record field, we should rename it to `binding`,
// otherwise we just need to replace `extracted` with `binding`.
-
if let Some(record_pat_field) =
extracted_syntax.ancestors().find_map(ast::RecordPatField::cast)
{
if let Some(name_ref) = record_pat_field.field_name() {
- ted::replace(
+ editor.replace(
record_pat_field.syntax(),
- ast::make::record_pat_field(
- ast::make::name_ref(&name_ref.text()),
- binding.clone(),
+ make.record_pat_field(
+ make.name_ref(&name_ref.text()),
+ binding.clone_for_update(),
)
- .syntax()
- .clone_for_update(),
+ .syntax(),
);
}
} else {
- ted::replace(extracted_syntax, binding.clone().syntax().clone_for_update());
+ editor.replace(extracted_syntax, binding.syntax().clone_for_update());
}
}
- syntax
+ editor.add_mappings(make.finish_with_mappings());
+ editor.finish().new_root().clone()
}
#[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
index 32c4ae2..8d27574 100644
--- a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
@@ -4,7 +4,8 @@
use syntax::{
SyntaxKind,
ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility},
- match_ast, ted,
+ match_ast,
+ syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder};
@@ -97,11 +98,14 @@
// Note that we don't need to consider macro files in this function because this is
// currently not triggered for struct definitions inside macro calls.
let tuple_fields = record_fields.fields().filter_map(|f| {
- let field = ast::make::tuple_field(f.visibility(), f.ty()?).clone_for_update();
- ted::insert_all(
- ted::Position::first_child_of(field.syntax()),
+ let field = ast::make::tuple_field(f.visibility(), f.ty()?);
+ let mut editor = SyntaxEditor::new(field.syntax().clone());
+ editor.insert_all(
+ Position::first_child_of(field.syntax()),
f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(),
);
+ let field_syntax = editor.finish().new_root().clone();
+ let field = ast::TupleField::cast(field_syntax)?;
Some(field)
});
let tuple_fields = ast::make::tuple_field_list(tuple_fields);
@@ -1086,8 +1090,7 @@
}
"#,
r#"
-pub struct Foo(#[my_custom_attr]
-u32);
+pub struct Foo(#[my_custom_attr]u32);
"#,
);
}
diff --git a/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
index b71de5e..c80b78f 100644
--- a/crates/ide-assists/src/handlers/expand_rest_pattern.rs
+++ b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
@@ -175,7 +175,7 @@
// ast::TuplePat(it) => (),
// FIXME
// ast::SlicePat(it) => (),
- _ => return None,
+ _ => None,
}
}
}
diff --git a/crates/ide-assists/src/handlers/generate_default_from_new.rs b/crates/ide-assists/src/handlers/generate_default_from_new.rs
index 79a78ab..47233fb 100644
--- a/crates/ide-assists/src/handlers/generate_default_from_new.rs
+++ b/crates/ide-assists/src/handlers/generate_default_from_new.rs
@@ -2,7 +2,7 @@
use stdx::format_to;
use syntax::{
AstNode,
- ast::{self, HasGenericParams, HasName, Impl, make},
+ ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl, make},
};
use crate::{
@@ -88,20 +88,19 @@
let generic_params = impl_.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
- let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
+ let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
// remove defaults since they can't be specified in impls
- match param {
+ let param = match param {
ast::TypeOrConstParam::Type(param) => {
- let param = param.clone_for_update();
- param.remove_default();
+ let param = make::type_param(param.name()?, param.type_bound_list());
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
- let param = param.clone_for_update();
- param.remove_default();
+ let param = make::const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
- }
+ };
+ Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
diff --git a/crates/ide-assists/src/handlers/generate_deref.rs b/crates/ide-assists/src/handlers/generate_deref.rs
index c7b97dc..55a09c5 100644
--- a/crates/ide-assists/src/handlers/generate_deref.rs
+++ b/crates/ide-assists/src/handlers/generate_deref.rs
@@ -10,7 +10,7 @@
use crate::{
AssistId,
assist_context::{AssistContext, Assists, SourceChangeBuilder},
- utils::generate_trait_impl_text,
+ utils::generate_trait_impl_text_intransitive,
};
// Assist: generate_deref
@@ -150,7 +150,7 @@
),
};
let strukt_adt = ast::Adt::Struct(strukt);
- let deref_impl = generate_trait_impl_text(
+ let deref_impl = generate_trait_impl_text_intransitive(
&strukt_adt,
&trait_path.display(db, edition).to_string(),
&impl_code,
@@ -228,6 +228,28 @@
}
#[test]
+ fn test_generate_record_deref_with_generic() {
+ check_assist(
+ generate_deref,
+ r#"
+//- minicore: deref
+struct A<T>($0T);
+"#,
+ r#"
+struct A<T>(T);
+
+impl<T> core::ops::Deref for A<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn test_generate_record_deref_short_path() {
check_assist(
generate_deref,
diff --git a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
index c7e5e41..20ee925 100644
--- a/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
+++ b/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
@@ -294,7 +294,7 @@
let self_expr = make::ext::expr_self();
let lhs = make::expr_field(self_expr, field_name);
let rhs = make::expr_path(make::ext::ident_path(field_name));
- let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs));
+ let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs).into());
let body = make::block_expr([assign_stmt.into()], None);
// Make the setter fn
diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs
index 2862e6d..14601ca 100644
--- a/crates/ide-assists/src/handlers/generate_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_impl.rs
@@ -1,14 +1,14 @@
use syntax::{
ast::{self, AstNode, HasName, edit_in_place::Indent, make},
- ted,
+ syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists, utils};
-fn insert_impl(impl_: ast::Impl, nominal: &ast::Adt) {
+fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &ast::Adt) {
let indent = nominal.indent_level();
- ted::insert_all_raw(
- ted::Position::after(nominal.syntax()),
+ editor.insert_all(
+ Position::after(nominal.syntax()),
vec![
// Add a blank line after the ADT, and indentation for the impl to match the ADT
make::tokens::whitespace(&format!("\n\n{indent}")).into(),
@@ -51,14 +51,17 @@
// Generate the impl
let impl_ = utils::generate_impl(&nominal);
+ let mut editor = edit.make_editor(nominal.syntax());
// Add a tabstop after the left curly brace
if let Some(cap) = ctx.config.snippet_cap {
if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) {
- edit.add_tabstop_after_token(cap, l_curly);
+ let tabstop = edit.make_tabstop_after(cap);
+ editor.add_annotation(l_curly, tabstop);
}
}
- insert_impl(impl_, &edit.make_mut(nominal));
+ insert_impl(&mut editor, &impl_, &nominal);
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -97,18 +100,22 @@
// Generate the impl
let impl_ = utils::generate_trait_impl_intransitive(&nominal, make::ty_placeholder());
+ let mut editor = edit.make_editor(nominal.syntax());
// Make the trait type a placeholder snippet
if let Some(cap) = ctx.config.snippet_cap {
if let Some(trait_) = impl_.trait_() {
- edit.add_placeholder_snippet(cap, trait_);
+ let placeholder = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(trait_.syntax(), placeholder);
}
if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) {
- edit.add_tabstop_after_token(cap, l_curly);
+ let tabstop = edit.make_tabstop_after(cap);
+ editor.add_annotation(l_curly, tabstop);
}
}
- insert_impl(impl_, &edit.make_mut(nominal));
+ insert_impl(&mut editor, &impl_, &nominal);
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index bab2ccf..dc26ec7 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -1,6 +1,6 @@
-use ide_db::famous_defs::FamousDefs;
+use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
use syntax::{
- AstNode,
+ AstNode, T,
ast::{self, edit_in_place::Indent, make},
ted,
};
@@ -32,7 +32,7 @@
//
// $0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
// fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
-// &self[index as usize]
+// &mut self[index as usize]
// }
// }
//
@@ -48,36 +48,34 @@
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
let indent = impl_def.indent_level();
- let trait_ = impl_def.trait_()?;
- if let ast::Type::PathType(trait_path) = trait_ {
- let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
- let scope = ctx.sema.scope(trait_path.syntax())?;
- if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
- return None;
- }
- }
+ let ast::Type::PathType(path) = impl_def.trait_()? else {
+ return None;
+ };
+ let trait_name = path.path()?.segment()?.name_ref()?;
+
+ let scope = ctx.sema.scope(impl_def.trait_()?.syntax())?;
+ let famous = FamousDefs(&ctx.sema, scope.krate());
+
+ let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
+ let trait_new = get_trait_mut(&trait_, famous)?;
// Index -> IndexMut
- let index_trait = impl_def
- .syntax()
- .descendants()
- .filter_map(ast::NameRef::cast)
- .find(|it| it.text() == "Index")?;
- ted::replace(
- index_trait.syntax(),
- make::path_segment(make::name_ref("IndexMut")).clone_for_update().syntax(),
- );
+ ted::replace(trait_name.syntax(), make::name_ref(trait_new).clone_for_update().syntax());
// index -> index_mut
- let trait_method_name = impl_def
+ let (trait_method_name, new_trait_method_name) = impl_def
.syntax()
.descendants()
.filter_map(ast::Name::cast)
- .find(|it| it.text() == "index")?;
- ted::replace(trait_method_name.syntax(), make::name("index_mut").clone_for_update().syntax());
+ .find_map(process_method_name)?;
+ ted::replace(
+ trait_method_name.syntax(),
+ make::name(new_trait_method_name).clone_for_update().syntax(),
+ );
- let type_alias = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast)?;
- ted::remove(type_alias.syntax());
+ if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) {
+ ted::remove(type_alias.syntax());
+ }
// &self -> &mut self
let mut_self_param = make::mut_self_param();
@@ -87,15 +85,14 @@
// &Self::Output -> &mut Self::Output
let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?;
- ted::replace(
- ret_type.syntax(),
- make::ret_type(make::ty("&mut Self::Output")).clone_for_update().syntax(),
- );
+ let new_ret_type = process_ret_type(&ret_type)?;
+ ted::replace(ret_type.syntax(), make::ret_type(new_ret_type).clone_for_update().syntax());
let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it {
ast::AssocItem::Fn(f) => Some(f),
_ => None,
})?;
+ let _ = process_ref_mut(&fn_);
let assoc_list = make::assoc_item_list().clone_for_update();
ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
@@ -104,7 +101,7 @@
let target = impl_def.syntax().text_range();
acc.add(
AssistId::generate("generate_mut_trait_impl"),
- "Generate `IndexMut` impl from this `Index` trait",
+ format!("Generate `{trait_new}` impl from this `{trait_name}` trait"),
target,
|edit| {
edit.insert(target.start(), format!("$0{impl_def}\n\n{indent}"));
@@ -112,6 +109,56 @@
)
}
+fn process_ref_mut(fn_: &ast::Fn) -> Option<()> {
+ let expr = fn_.body()?.tail_expr()?;
+ match &expr {
+ ast::Expr::RefExpr(ref_expr) if ref_expr.mut_token().is_none() => {
+ ted::insert_all_raw(
+ ted::Position::after(ref_expr.amp_token()?),
+ vec![make::token(T![mut]).into(), make::tokens::whitespace(" ").into()],
+ );
+ }
+ _ => {}
+ }
+ None
+}
+
+fn get_trait_mut(apply_trait: &hir::Trait, famous: FamousDefs<'_, '_>) -> Option<&'static str> {
+ let trait_ = Some(apply_trait);
+ if trait_ == famous.core_convert_Index().as_ref() {
+ return Some("IndexMut");
+ }
+ if trait_ == famous.core_convert_AsRef().as_ref() {
+ return Some("AsMut");
+ }
+ if trait_ == famous.core_borrow_Borrow().as_ref() {
+ return Some("BorrowMut");
+ }
+ if trait_ == famous.core_ops_Deref().as_ref() {
+ return Some("DerefMut");
+ }
+ None
+}
+
+fn process_method_name(name: ast::Name) -> Option<(ast::Name, &'static str)> {
+ let new_name = match &*name.text() {
+ "index" => "index_mut",
+ "as_ref" => "as_mut",
+ "borrow" => "borrow_mut",
+ "deref" => "deref_mut",
+ _ => return None,
+ };
+ Some((name, new_name))
+}
+
+fn process_ret_type(ref_ty: &ast::RetType) -> Option<ast::Type> {
+ let ty = ref_ty.ty()?;
+ let ast::Type::RefType(ref_type) = ty else {
+ return None;
+ };
+ Some(make::ty_ref(ref_type.ty()?, true))
+}
+
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@@ -139,7 +186,7 @@
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
- &self[index as usize]
+ &mut self[index as usize]
}
}
@@ -188,6 +235,68 @@
}
"#,
);
+
+ check_assist(
+ generate_mut_trait_impl,
+ r#"
+//- minicore: as_ref
+struct Foo(i32);
+
+impl core::convert::AsRef$0<i32> for Foo {
+ fn as_ref(&self) -> &i32 {
+ &self.0
+ }
+}
+"#,
+ r#"
+struct Foo(i32);
+
+$0impl core::convert::AsMut<i32> for Foo {
+ fn as_mut(&mut self) -> &mut i32 {
+ &mut self.0
+ }
+}
+
+impl core::convert::AsRef<i32> for Foo {
+ fn as_ref(&self) -> &i32 {
+ &self.0
+ }
+}
+"#,
+ );
+
+ check_assist(
+ generate_mut_trait_impl,
+ r#"
+//- minicore: deref
+struct Foo(i32);
+
+impl core::ops::Deref$0 for Foo {
+ type Target = i32;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+"#,
+ r#"
+struct Foo(i32);
+
+$0impl core::ops::DerefMut for Foo {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+}
+
+impl core::ops::Deref for Foo {
+ type Target = i32;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+"#,
+ );
}
#[test]
@@ -287,5 +396,13 @@
impl<T> Index$0<i32> for [T; 3] {}
"#,
);
+ check_assist_not_applicable(
+ generate_mut_trait_impl,
+ r#"
+pub trait AsRef<T: ?Sized> {}
+
+impl AsRef$0<i32> for [T; 3] {}
+"#,
+ );
}
}
diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs
index 4837f92..51c2f65 100644
--- a/crates/ide-assists/src/handlers/generate_new.rs
+++ b/crates/ide-assists/src/handlers/generate_new.rs
@@ -1,5 +1,6 @@
use ide_db::{
- imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
+ imports::import_assets::item_for_path_search, syntax_helpers::suggest_name::NameGenerator,
+ use_trivial_constructor::use_trivial_constructor,
};
use syntax::{
ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make},
@@ -35,10 +36,30 @@
pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
- // We want to only apply this to non-union structs with named fields
let field_list = match strukt.kind() {
- StructKind::Record(named) => named,
- _ => return None,
+ StructKind::Record(named) => {
+ named.fields().filter_map(|f| Some((f.name()?, f.ty()?))).collect::<Vec<_>>()
+ }
+ StructKind::Tuple(tuple) => {
+ let mut name_generator = NameGenerator::default();
+ tuple
+ .fields()
+ .enumerate()
+ .filter_map(|(i, f)| {
+ let ty = f.ty()?;
+ let name = match name_generator.for_type(
+ &ctx.sema.resolve_type(&ty)?,
+ ctx.db(),
+ ctx.edition(),
+ ) {
+ Some(name) => name,
+ None => name_generator.suggest_name(&format!("_{i}")),
+ };
+ Some((make::name(name.as_str()), f.ty()?))
+ })
+ .collect::<Vec<_>>()
+ }
+ StructKind::Unit => return None,
};
// Return early if we've found an existing new fn
@@ -50,11 +71,9 @@
let target = strukt.syntax().text_range();
acc.add(AssistId::generate("generate_new"), "Generate `new`", target, |builder| {
let trivial_constructors = field_list
- .fields()
- .map(|f| {
- let name = f.name()?;
-
- let ty = ctx.sema.resolve_type(&f.ty()?)?;
+ .iter()
+ .map(|(name, ty)| {
+ let ty = ctx.sema.resolve_type(ty)?;
let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
@@ -73,34 +92,44 @@
edition,
)?;
- Some(make::record_expr_field(make::name_ref(&name.text()), Some(expr)))
+ Some((make::name_ref(&name.text()), Some(expr)))
})
.collect::<Vec<_>>();
- let params = field_list.fields().enumerate().filter_map(|(i, f)| {
+ let params = field_list.iter().enumerate().filter_map(|(i, (name, ty))| {
if trivial_constructors[i].is_none() {
- let name = f.name()?;
- let ty = f.ty()?;
-
- Some(make::param(make::ident_pat(false, false, name).into(), ty))
+ Some(make::param(make::ident_pat(false, false, name.clone()).into(), ty.clone()))
} else {
None
}
});
let params = make::param_list(None, params);
- let fields = field_list.fields().enumerate().filter_map(|(i, f)| {
- let constructor = trivial_constructors[i].clone();
- if constructor.is_some() {
+ let fields = field_list.iter().enumerate().map(|(i, (name, _))| {
+ if let Some(constructor) = trivial_constructors[i].clone() {
constructor
} else {
- Some(make::record_expr_field(make::name_ref(&f.name()?.text()), None))
+ (make::name_ref(&name.text()), None)
}
});
- let fields = make::record_expr_field_list(fields);
- let record_expr = make::record_expr(make::ext::ident_path("Self"), fields);
- let body = make::block_expr(None, Some(record_expr.into()));
+ let tail_expr: ast::Expr = match strukt.kind() {
+ StructKind::Record(_) => {
+ let fields = fields.map(|(name, expr)| make::record_expr_field(name, expr));
+ let fields = make::record_expr_field_list(fields);
+ make::record_expr(make::ext::ident_path("Self"), fields).into()
+ }
+ StructKind::Tuple(_) => {
+ let args = fields.map(|(arg, expr)| {
+ let arg = || make::expr_path(make::path_unqualified(make::path_segment(arg)));
+ expr.unwrap_or_else(arg)
+ });
+ let arg_list = make::arg_list(args);
+ make::expr_call(make::expr_path(make::ext::ident_path("Self")), arg_list).into()
+ }
+ StructKind::Unit => unreachable!(),
+ };
+ let body = make::block_expr(None, tail_expr.into());
let ret_type = make::ret_type(make::ty_path(make::ext::ident_path("Self")));
@@ -120,8 +149,35 @@
.clone_for_update();
fn_.indent(1.into());
- // Add a tabstop before the name
if let Some(cap) = ctx.config.snippet_cap {
+ match strukt.kind() {
+ StructKind::Tuple(_) => {
+ let struct_args = fn_
+ .body()
+ .unwrap()
+ .syntax()
+ .descendants()
+ .filter(|it| syntax::ast::ArgList::can_cast(it.kind()))
+ .flat_map(|args| args.children())
+ .filter(|it| syntax::ast::PathExpr::can_cast(it.kind()))
+ .enumerate()
+ .filter_map(|(i, node)| {
+ if trivial_constructors[i].is_none() { Some(node) } else { None }
+ });
+ if let Some(fn_params) = fn_.param_list() {
+ for (struct_arg, fn_param) in struct_args.zip(fn_params.params()) {
+ if let Some(fn_pat) = fn_param.pat() {
+ let fn_pat = fn_pat.syntax().clone();
+ builder
+ .add_placeholder_snippet_group(cap, vec![struct_arg, fn_pat]);
+ }
+ }
+ }
+ }
+ _ => {}
+ }
+
+ // Add a tabstop before the name
if let Some(name) = fn_.name() {
builder.add_tabstop_before(cap, name);
}
@@ -157,7 +213,7 @@
}
#[cfg(test)]
-mod tests {
+mod record_tests {
use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
use super::*;
@@ -695,3 +751,308 @@
);
}
}
+
+#[cfg(test)]
+mod tuple_tests {
+ use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_new_with_zst_fields() {
+ check_assist(
+ generate_new,
+ r#"
+struct Empty;
+
+struct Foo(Empty$0);
+"#,
+ r#"
+struct Empty;
+
+struct Foo(Empty);
+
+impl Foo {
+ fn $0new() -> Self {
+ Self(Empty)
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Empty;
+
+struct Foo(String, Empty$0);
+"#,
+ r#"
+struct Empty;
+
+struct Foo(String, Empty);
+
+impl Foo {
+ fn $0new(${1:_0}: String) -> Self {
+ Self(${1:_0}, Empty)
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+enum Empty { Bar }
+
+struct Foo(Empty$0);
+"#,
+ r#"
+enum Empty { Bar }
+
+struct Foo(Empty);
+
+impl Foo {
+ fn $0new() -> Self {
+ Self(Empty::Bar)
+ }
+}
+"#,
+ );
+
+ // make sure the assist only works on unit variants
+ check_assist(
+ generate_new,
+ r#"
+struct Empty {}
+
+struct Foo(Empty$0);
+"#,
+ r#"
+struct Empty {}
+
+struct Foo(Empty);
+
+impl Foo {
+ fn $0new(${1:empty}: Empty) -> Self {
+ Self(${1:empty})
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+enum Empty { Bar {} }
+
+struct Foo(Empty$0);
+"#,
+ r#"
+enum Empty { Bar {} }
+
+struct Foo(Empty);
+
+impl Foo {
+ fn $0new(${1:empty}: Empty) -> Self {
+ Self(${1:empty})
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_new() {
+ check_assist(
+ generate_new,
+ r#"
+struct Foo($0);
+"#,
+ r#"
+struct Foo();
+
+impl Foo {
+ fn $0new() -> Self {
+ Self()
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo<T: Clone>($0);
+"#,
+ r#"
+struct Foo<T: Clone>();
+
+impl<T: Clone> Foo<T> {
+ fn $0new() -> Self {
+ Self()
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo<'a, T: Foo<'a>>($0);
+"#,
+ r#"
+struct Foo<'a, T: Foo<'a>>();
+
+impl<'a, T: Foo<'a>> Foo<'a, T> {
+ fn $0new() -> Self {
+ Self()
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Foo(String$0);
+"#,
+ r#"
+struct Foo(String);
+
+impl Foo {
+ fn $0new(${1:_0}: String) -> Self {
+ Self(${1:_0})
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+struct Vec<T> { };
+struct Foo(String, Vec<i32>$0);
+"#,
+ r#"
+struct Vec<T> { };
+struct Foo(String, Vec<i32>);
+
+impl Foo {
+ fn $0new(${1:_0}: String, ${2:items}: Vec<i32>) -> Self {
+ Self(${1:_0}, ${2:items})
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn check_that_visibility_modifiers_dont_get_brought_in() {
+ check_assist(
+ generate_new,
+ r#"
+struct Vec<T> { };
+struct Foo(pub String, pub Vec<i32>$0);
+"#,
+ r#"
+struct Vec<T> { };
+struct Foo(pub String, pub Vec<i32>);
+
+impl Foo {
+ fn $0new(${1:_0}: String, ${2:items}: Vec<i32>) -> Self {
+ Self(${1:_0}, ${2:items})
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_new_not_applicable_if_fn_exists() {
+ check_assist_not_applicable(
+ generate_new,
+ r#"
+struct Foo($0);
+
+impl Foo {
+ fn new() -> Self {
+ Self
+ }
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ generate_new,
+ r#"
+struct Foo($0);
+
+impl Foo {
+ fn New() -> Self {
+ Self
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generate_new_target() {
+ check_assist_target(
+ generate_new,
+ r#"
+struct SomeThingIrrelevant;
+/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>>($0);
+struct EvenMoreIrrelevant;
+"#,
+ "/// Has a lifetime parameter
+struct Foo<'a, T: Foo<'a>>();",
+ );
+ }
+
+ #[test]
+ fn test_unrelated_new() {
+ check_assist(
+ generate_new,
+ r#"
+pub struct AstId<N: AstNode> {
+ file_id: HirFileId,
+ file_ast_id: FileAstId<N>,
+}
+
+impl<N: AstNode> AstId<N> {
+ pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
+ AstId { file_id, file_ast_id }
+ }
+}
+
+pub struct Source<T>(pub HirFileId,$0 pub T);
+
+impl<T> Source<T> {
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
+ Source(self.file_id, f(self.ast))
+ }
+}
+"#,
+ r#"
+pub struct AstId<N: AstNode> {
+ file_id: HirFileId,
+ file_ast_id: FileAstId<N>,
+}
+
+impl<N: AstNode> AstId<N> {
+ pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
+ AstId { file_id, file_ast_id }
+ }
+}
+
+pub struct Source<T>(pub HirFileId, pub T);
+
+impl<T> Source<T> {
+ pub fn $0new(${1:_0}: HirFileId, ${2:_1}: T) -> Self {
+ Self(${1:_0}, ${2:_1})
+ }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
+ Source(self.file_id, f(self.ast))
+ }
+}
+"#,
+ );
+ }
+}
diff --git a/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs b/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
new file mode 100644
index 0000000..4e95ceb
--- /dev/null
+++ b/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs
@@ -0,0 +1,1000 @@
+use ast::make;
+use hir::{HasCrate, ModuleDef, Semantics};
+use ide_db::{
+ RootDatabase, famous_defs::FamousDefs, helpers::mod_path_to_ast,
+ imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
+};
+use syntax::{
+ TokenText,
+ ast::{self, AstNode, HasGenericParams, HasName, edit, edit_in_place::Indent},
+};
+
+use crate::{
+ AssistId,
+ assist_context::{AssistContext, Assists},
+ utils::add_cfg_attrs_to,
+};
+
+// Assist: generate_single_field_struct_from
+//
+// Implement From for a single field structure, ignore trivial types.
+//
+// ```
+// # //- minicore: from, phantom_data
+// use core::marker::PhantomData;
+// struct $0Foo<T> {
+// id: i32,
+// _phantom_data: PhantomData<T>,
+// }
+// ```
+// ->
+// ```
+// use core::marker::PhantomData;
+// struct Foo<T> {
+// id: i32,
+// _phantom_data: PhantomData<T>,
+// }
+//
+// impl<T> From<i32> for Foo<T> {
+// fn from(id: i32) -> Self {
+// Self { id, _phantom_data: PhantomData }
+// }
+// }
+// ```
+pub(crate) fn generate_single_field_struct_from(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let strukt_name = ctx.find_node_at_offset::<ast::Name>()?;
+ let adt = ast::Adt::cast(strukt_name.syntax().parent()?)?;
+ let ast::Adt::Struct(strukt) = adt else {
+ return None;
+ };
+
+ let sema = &ctx.sema;
+ let (names, types) = get_fields(&strukt)?;
+
+ let module = sema.scope(strukt.syntax())?.module();
+ let constructors = make_constructors(ctx, module, &types);
+
+ if constructors.iter().filter(|expr| expr.is_none()).count() != 1 {
+ return None;
+ }
+ let main_field_i = constructors.iter().position(Option::is_none)?;
+ if from_impl_exists(&strukt, main_field_i, &ctx.sema).is_some() {
+ return None;
+ }
+
+ let main_field_name =
+ names.as_ref().map_or(TokenText::borrowed("value"), |names| names[main_field_i].text());
+ let main_field_ty = types[main_field_i].clone();
+
+ acc.add(
+ AssistId::generate("generate_single_field_struct_from"),
+ "Generate single field `From`",
+ strukt.syntax().text_range(),
+ |builder| {
+ let indent = strukt.indent_level();
+ let ty_where_clause = strukt.where_clause();
+ let type_gen_params = strukt.generic_param_list();
+ let type_gen_args = type_gen_params.as_ref().map(|params| params.to_generic_args());
+ let trait_gen_args = Some(make::generic_arg_list([ast::GenericArg::TypeArg(
+ make::type_arg(main_field_ty.clone()),
+ )]));
+
+ let ty = make::ty(&strukt_name.text());
+
+ let constructor =
+ make_adt_constructor(names.as_deref(), constructors, &main_field_name);
+ let body = make::block_expr([], Some(constructor));
+
+ let fn_ = make::fn_(
+ None,
+ make::name("from"),
+ None,
+ None,
+ make::param_list(
+ None,
+ [make::param(
+ make::path_pat(make::path_from_text(&main_field_name)),
+ main_field_ty,
+ )],
+ ),
+ body,
+ Some(make::ret_type(make::ty("Self"))),
+ false,
+ false,
+ false,
+ false,
+ )
+ .clone_for_update();
+
+ fn_.indent(1.into());
+
+ let impl_ = make::impl_trait(
+ false,
+ None,
+ trait_gen_args,
+ type_gen_params,
+ type_gen_args,
+ false,
+ make::ty("From"),
+ ty.clone(),
+ None,
+ ty_where_clause.map(|wc| edit::AstNodeEdit::reset_indent(&wc)),
+ None,
+ )
+ .clone_for_update();
+
+ impl_.get_or_create_assoc_item_list().add_item(fn_.into());
+
+ add_cfg_attrs_to(&strukt, &impl_);
+
+ impl_.reindent_to(indent);
+
+ builder.insert(strukt.syntax().text_range().end(), format!("\n\n{indent}{impl_}"));
+ },
+ )
+}
+
+fn make_adt_constructor(
+ names: Option<&[ast::Name]>,
+ constructors: Vec<Option<ast::Expr>>,
+ main_field_name: &TokenText<'_>,
+) -> ast::Expr {
+ if let Some(names) = names {
+ let fields = make::record_expr_field_list(names.iter().zip(constructors).map(
+ |(name, initializer)| {
+ make::record_expr_field(make::name_ref(&name.text()), initializer)
+ },
+ ));
+ make::record_expr(make::path_from_text("Self"), fields).into()
+ } else {
+ let arg_list = make::arg_list(constructors.into_iter().map(|expr| {
+ expr.unwrap_or_else(|| make::expr_path(make::path_from_text(main_field_name)))
+ }));
+ make::expr_call(make::expr_path(make::path_from_text("Self")), arg_list).into()
+ }
+}
+
+fn make_constructors(
+ ctx: &AssistContext<'_>,
+ module: hir::Module,
+ types: &[ast::Type],
+) -> Vec<Option<ast::Expr>> {
+ let (db, sema) = (ctx.db(), &ctx.sema);
+ types
+ .iter()
+ .map(|ty| {
+ let ty = sema.resolve_type(ty)?;
+ if ty.is_unit() {
+ return Some(make::expr_tuple([]).into());
+ }
+ let item_in_ns = ModuleDef::Adt(ty.as_adt()?).into();
+ let edition = module.krate().edition(db);
+
+ let ty_path = module.find_path(
+ db,
+ item_for_path_search(db, item_in_ns)?,
+ ctx.config.import_path_config(),
+ )?;
+
+ use_trivial_constructor(db, mod_path_to_ast(&ty_path, edition), &ty, edition)
+ })
+ .collect()
+}
+
+fn get_fields(strukt: &ast::Struct) -> Option<(Option<Vec<ast::Name>>, Vec<ast::Type>)> {
+ Some(match strukt.kind() {
+ ast::StructKind::Unit => return None,
+ ast::StructKind::Record(fields) => {
+ let names = fields.fields().map(|field| field.name()).collect::<Option<_>>()?;
+ let types = fields.fields().map(|field| field.ty()).collect::<Option<_>>()?;
+ (Some(names), types)
+ }
+ ast::StructKind::Tuple(fields) => {
+ (None, fields.fields().map(|field| field.ty()).collect::<Option<_>>()?)
+ }
+ })
+}
+
+fn from_impl_exists(
+ strukt: &ast::Struct,
+ main_field_i: usize,
+ sema: &Semantics<'_, RootDatabase>,
+) -> Option<()> {
+ let db = sema.db;
+ let strukt = sema.to_def(strukt)?;
+ let krate = strukt.krate(db);
+ let from_trait = FamousDefs(sema, krate).core_convert_From()?;
+ let ty = strukt.fields(db).get(main_field_i)?.ty(db);
+
+ strukt.ty(db).impls_trait(db, from_trait, &[ty]).then_some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::generate_single_field_struct_from;
+
+ #[test]
+ fn works() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo {
+ foo: i32,
+ }
+ "#,
+ r#"
+ struct Foo {
+ foo: i32,
+ }
+
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ struct $0Foo {
+ b1: (),
+ b2: core::marker::PhantomData,
+ foo: i32,
+ a1: (),
+ a2: core::marker::PhantomData,
+ }
+ "#,
+ r#"
+ struct Foo {
+ b1: (),
+ b2: core::marker::PhantomData,
+ foo: i32,
+ a1: (),
+ a2: core::marker::PhantomData,
+ }
+
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { b1: (), b2: core::marker::PhantomData, foo, a1: (), a2: core::marker::PhantomData }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn cfgs() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ #[cfg(feature = "foo")]
+ #[cfg(test)]
+ struct $0Foo {
+ foo: i32,
+ }
+ "#,
+ r#"
+ #[cfg(feature = "foo")]
+ #[cfg(test)]
+ struct Foo {
+ foo: i32,
+ }
+
+ #[cfg(feature = "foo")]
+ #[cfg(test)]
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn indent() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ mod foo {
+ struct $0Foo {
+ foo: i32,
+ }
+ }
+ "#,
+ r#"
+ mod foo {
+ struct Foo {
+ foo: i32,
+ }
+
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { foo }
+ }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ mod foo {
+ mod bar {
+ struct $0Foo {
+ foo: i32,
+ }
+ }
+ }
+ "#,
+ r#"
+ mod foo {
+ mod bar {
+ struct Foo {
+ foo: i32,
+ }
+
+ impl From<i32> for Foo {
+ fn from(foo: i32) -> Self {
+ Self { foo }
+ }
+ }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn where_clause_indent() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ mod foo {
+ mod bar {
+ trait Trait {}
+ struct $0Foo<T>
+ where
+ T: Trait,
+ {
+ foo: T,
+ }
+ }
+ }
+ "#,
+ r#"
+ mod foo {
+ mod bar {
+ trait Trait {}
+ struct Foo<T>
+ where
+ T: Trait,
+ {
+ foo: T,
+ }
+
+ impl<T> From<T> for Foo<T>
+ where
+ T: Trait,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ mod foo {
+ mod bar {
+ trait Trait<const B: bool> {}
+ struct $0Foo<T>
+ where
+ T: Trait<{
+ true
+ }>
+ {
+ foo: T,
+ }
+ }
+ }
+ "#,
+ r#"
+ mod foo {
+ mod bar {
+ trait Trait<const B: bool> {}
+ struct Foo<T>
+ where
+ T: Trait<{
+ true
+ }>
+ {
+ foo: T,
+ }
+
+ impl<T> From<T> for Foo<T>
+ where
+ T: Trait<{
+ true
+ }>
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn generics() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T> {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T> {
+ foo: T,
+ }
+
+ impl<T> From<T> for Foo<T> {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send> {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send> {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T> {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send> where T: Sync,{
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send> where T: Sync,{
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send> where T: Sync {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send> where T: Sync {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send> where T: Sync, Self: Send {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send> where T: Sync, Self: Send {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync, Self: Send
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send>
+ where T: Sync, Self: Send
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send>
+ where T: Sync, Self: Send
+ {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync, Self: Send
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send>
+ where T: Sync, Self: Send,
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send>
+ where T: Sync, Self: Send,
+ {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync, Self: Send,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send>
+ where T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send>
+ where T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where T: Sync,
+ Self: Send,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+
+ impl<T: Send> From<T> for Foo<T>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T: Send + Sync>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+ "#,
+ r#"
+ struct Foo<T: Send + Sync>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ foo: T,
+ }
+
+ impl<T: Send + Sync> From<T> for Foo<T>
+ where
+ T: Sync,
+ Self: Send,
+ {
+ fn from(foo: T) -> Self {
+ Self { foo }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn tuple() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+ "#,
+ r#"
+ struct Foo(i32);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value)
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T>(T);
+ "#,
+ r#"
+ struct Foo<T>(T);
+
+ impl<T> From<T> for Foo<T> {
+ fn from(value: T) -> Self {
+ Self(value)
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn trivial() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ use core::marker::PhantomData;
+ struct $0Foo(i32, PhantomData<i32>);
+ "#,
+ r#"
+ use core::marker::PhantomData;
+ struct Foo(i32, PhantomData<i32>);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value, PhantomData)
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ use core::marker::PhantomData;
+ struct $0Foo(i32, PhantomData<()>);
+ "#,
+ r#"
+ use core::marker::PhantomData;
+ struct Foo(i32, PhantomData<()>);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value, PhantomData)
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ use core::marker::PhantomData;
+ struct $0Foo(PhantomData<()>, i32, PhantomData<()>);
+ "#,
+ r#"
+ use core::marker::PhantomData;
+ struct Foo(PhantomData<()>, i32, PhantomData<()>);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(PhantomData, value, PhantomData)
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from, phantom_data
+ use core::marker::PhantomData;
+ struct $0Foo<T>(PhantomData<T>, i32, PhantomData<()>);
+ "#,
+ r#"
+ use core::marker::PhantomData;
+ struct Foo<T>(PhantomData<T>, i32, PhantomData<()>);
+
+ impl<T> From<i32> for Foo<T> {
+ fn from(value: i32) -> Self {
+ Self(PhantomData, value, PhantomData)
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn unit() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32, ());
+ "#,
+ r#"
+ struct Foo(i32, ());
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value, ())
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo((), i32, ());
+ "#,
+ r#"
+ struct Foo((), i32, ());
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self((), value, ())
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo((), (), i32, ());
+ "#,
+ r#"
+ struct Foo((), (), i32, ());
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self((), (), value, ())
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn invalid_multiple_main_field() {
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32, i32);
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T>(i32, T);
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T>(T, T);
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo<T> { foo: T, bar: i32 }
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo { foo: i32, bar: i64 }
+ "#,
+ );
+ }
+
+ #[test]
+ fn exists_other_from() {
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+
+ impl From<&i32> for Foo {
+ fn from(value: &i32) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ r#"
+ struct Foo(i32);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value)
+ }
+ }
+
+ impl From<&i32> for Foo {
+ fn from(value: &i32) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+
+ type X = i32;
+
+ impl From<&X> for Foo {
+ fn from(value: &X) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ r#"
+ struct Foo(i32);
+
+ impl From<i32> for Foo {
+ fn from(value: i32) -> Self {
+ Self(value)
+ }
+ }
+
+ type X = i32;
+
+ impl From<&X> for Foo {
+ fn from(value: &X) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn exists_from() {
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+
+ impl From<i32> for Foo {
+ fn from(_: i32) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ );
+ check_assist_not_applicable(
+ generate_single_field_struct_from,
+ r#"
+ //- minicore: from
+ struct $0Foo(i32);
+
+ type X = i32;
+
+ impl From<X> for Foo {
+ fn from(_: X) -> Self {
+ todo!()
+ }
+ }
+ "#,
+ );
+ }
+}
diff --git a/crates/ide-assists/src/handlers/promote_local_to_const.rs b/crates/ide-assists/src/handlers/promote_local_to_const.rs
index 6316a8f..603be4d 100644
--- a/crates/ide-assists/src/handlers/promote_local_to_const.rs
+++ b/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -3,8 +3,7 @@
use stdx::to_upper_snake_case;
use syntax::{
AstNode,
- ast::{self, HasName, make},
- ted,
+ ast::{self, HasName, syntax_factory::SyntaxFactory},
};
use crate::{
@@ -69,15 +68,18 @@
"Promote local to constant",
let_stmt.syntax().text_range(),
|edit| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(let_stmt.syntax());
let name = to_upper_snake_case(&name.to_string());
let usages = Definition::Local(local).usages(&ctx.sema).all();
if let Some(usages) = usages.references.get(&ctx.file_id()) {
- let name_ref = make::name_ref(&name);
+ let name_ref = make.name_ref(&name);
for usage in usages {
let Some(usage_name) = usage.name.as_name_ref().cloned() else { continue };
if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage_name) {
- let name_expr = make::expr_path(make::path_from_text(&name));
+ let path = make.ident_path(&name);
+ let name_expr = make.expr_path(path);
utils::replace_record_field_expr(ctx, edit, record_field, name_expr);
} else {
let usage_range = usage.range;
@@ -86,15 +88,17 @@
}
}
- let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer)
- .clone_for_update();
- let let_stmt = edit.make_mut(let_stmt);
+ let item = make.item_const(None, make.name(&name), make.ty(&ty), initializer);
if let Some((cap, name)) = ctx.config.snippet_cap.zip(item.name()) {
- edit.add_tabstop_before(cap, name);
+ let tabstop = edit.make_tabstop_before(cap);
+ editor.add_annotation(name.syntax().clone(), tabstop);
}
- ted::replace(let_stmt.syntax(), item.syntax());
+ editor.replace(let_stmt.syntax(), item.syntax());
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/pull_assignment_up.rs b/crates/ide-assists/src/handlers/pull_assignment_up.rs
index 5f626d2..1b0c313 100644
--- a/crates/ide-assists/src/handlers/pull_assignment_up.rs
+++ b/crates/ide-assists/src/handlers/pull_assignment_up.rs
@@ -1,7 +1,8 @@
use syntax::{
AstNode,
- ast::{self, make},
- ted,
+ algo::find_node_at_range,
+ ast::{self, syntax_factory::SyntaxFactory},
+ syntax_editor::SyntaxEditor,
};
use crate::{
@@ -66,33 +67,51 @@
return None;
}
}
+ let target = tgt.syntax().text_range();
+ let edit_tgt = tgt.syntax().clone_subtree();
+ let assignments: Vec<_> = collector
+ .assignments
+ .into_iter()
+ .filter_map(|(stmt, rhs)| {
+ Some((
+ find_node_at_range::<ast::BinExpr>(
+ &edit_tgt,
+ stmt.syntax().text_range() - target.start(),
+ )?,
+ find_node_at_range::<ast::Expr>(
+ &edit_tgt,
+ rhs.syntax().text_range() - target.start(),
+ )?,
+ ))
+ })
+ .collect();
+
+ let mut editor = SyntaxEditor::new(edit_tgt);
+ for (stmt, rhs) in assignments {
+ let mut stmt = stmt.syntax().clone();
+ if let Some(parent) = stmt.parent() {
+ if ast::ExprStmt::cast(parent.clone()).is_some() {
+ stmt = parent.clone();
+ }
+ }
+ editor.replace(stmt, rhs.syntax());
+ }
+ let new_tgt_root = editor.finish().new_root().clone();
+ let new_tgt = ast::Expr::cast(new_tgt_root)?;
acc.add(
AssistId::refactor_extract("pull_assignment_up"),
"Pull assignment up",
- tgt.syntax().text_range(),
+ target,
move |edit| {
- let assignments: Vec<_> = collector
- .assignments
- .into_iter()
- .map(|(stmt, rhs)| (edit.make_mut(stmt), rhs.clone_for_update()))
- .collect();
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(tgt.syntax());
+ let assign_expr = make.expr_assignment(collector.common_lhs, new_tgt.clone());
+ let assign_stmt = make.expr_stmt(assign_expr.into());
- let tgt = edit.make_mut(tgt);
-
- for (stmt, rhs) in assignments {
- let mut stmt = stmt.syntax().clone();
- if let Some(parent) = stmt.parent() {
- if ast::ExprStmt::cast(parent.clone()).is_some() {
- stmt = parent.clone();
- }
- }
- ted::replace(stmt, rhs.syntax());
- }
- let assign_expr = make::expr_assignment(collector.common_lhs, tgt.clone());
- let assign_stmt = make::expr_stmt(assign_expr);
-
- ted::replace(tgt.syntax(), assign_stmt.syntax().clone_for_update());
+ editor.replace(tgt.syntax(), assign_stmt.syntax());
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/remove_dbg.rs b/crates/ide-assists/src/handlers/remove_dbg.rs
index 52ace03..9356d02 100644
--- a/crates/ide-assists/src/handlers/remove_dbg.rs
+++ b/crates/ide-assists/src/handlers/remove_dbg.rs
@@ -1,8 +1,9 @@
use itertools::Itertools;
use syntax::{
- Edition, NodeOrToken, SyntaxElement, T, TextRange, TextSize,
- ast::{self, AstNode, AstToken, make},
- match_ast, ted,
+ Edition, NodeOrToken, SyntaxNode, SyntaxToken, T,
+ ast::{self, AstNode, make},
+ match_ast,
+ syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists};
@@ -40,21 +41,23 @@
let replacements =
macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::<Vec<_>>();
-
- acc.add(
- AssistId::quick_fix("remove_dbg"),
- "Remove dbg!()",
- replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range))?,
- |builder| {
- for (range, expr) in replacements {
- if let Some(expr) = expr {
- builder.replace(range, expr.to_string());
- } else {
- builder.delete(range);
- }
+ let target = replacements
+ .iter()
+ .flat_map(|(node_or_token, _)| node_or_token.iter())
+ .map(|t| t.text_range())
+ .reduce(|acc, range| acc.cover(range))?;
+ acc.add(AssistId::quick_fix("remove_dbg"), "Remove dbg!()", target, |builder| {
+ let mut editor = builder.make_editor(ctx.source_file().syntax());
+ for (range, expr) in replacements {
+ if let Some(expr) = expr {
+ editor.insert(Position::before(range[0].clone()), expr.syntax().clone_for_update());
}
- },
- )
+ for node_or_token in range {
+ editor.delete(node_or_token);
+ }
+ }
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
+ })
}
/// Returns `None` when either
@@ -63,7 +66,9 @@
/// - (`macro_expr` has no parent - is that possible?)
///
/// Returns `Some(_, None)` when the macro call should just be removed.
-fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Option<ast::Expr>)> {
+fn compute_dbg_replacement(
+ macro_expr: ast::MacroExpr,
+) -> Option<(Vec<NodeOrToken<SyntaxNode, SyntaxToken>>, Option<ast::Expr>)> {
let macro_call = macro_expr.macro_call()?;
let tt = macro_call.token_tree()?;
let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?);
@@ -88,22 +93,22 @@
match_ast! {
match parent {
ast::StmtList(_) => {
- let range = macro_expr.syntax().text_range();
- let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) {
- Some(start) => range.cover_offset(start),
- None => range,
- };
- (range, None)
+ let mut replace = vec![macro_expr.syntax().clone().into()];
+ if let Some(prev_sibling) = macro_expr.syntax().prev_sibling_or_token()
+ && prev_sibling.kind() == syntax::SyntaxKind::WHITESPACE {
+ replace.push(prev_sibling);
+ }
+ (replace, None)
},
ast::ExprStmt(it) => {
- let range = it.syntax().text_range();
- let range = match whitespace_start(it.syntax().prev_sibling_or_token()) {
- Some(start) => range.cover_offset(start),
- None => range,
- };
- (range, None)
+ let mut replace = vec![it.syntax().clone().into()];
+ if let Some(prev_sibling) = it.syntax().prev_sibling_or_token()
+ && prev_sibling.kind() == syntax::SyntaxKind::WHITESPACE {
+ replace.push(prev_sibling);
+ }
+ (replace, None)
},
- _ => (macro_call.syntax().text_range(), Some(make::ext::expr_unit())),
+ _ => (vec![macro_call.syntax().clone().into()], Some(make::ext::expr_unit())),
}
}
}
@@ -147,13 +152,13 @@
};
let expr = replace_nested_dbgs(expr.clone());
let expr = if wrap { make::expr_paren(expr).into() } else { expr.clone_subtree() };
- (macro_call.syntax().text_range(), Some(expr))
+ (vec![macro_call.syntax().clone().into()], Some(expr))
}
// dbg!(expr0, expr1, ...)
exprs => {
let exprs = exprs.iter().cloned().map(replace_nested_dbgs);
let expr = make::expr_tuple(exprs);
- (macro_call.syntax().text_range(), Some(expr.into()))
+ (vec![macro_call.syntax().clone().into()], Some(expr.into()))
}
})
}
@@ -178,8 +183,8 @@
return replaced;
}
- let expanded = expanded.clone_for_update();
-
+ let expanded = expanded.clone_subtree();
+ let mut editor = SyntaxEditor::new(expanded.syntax().clone());
// We need to collect to avoid mutation during traversal.
let macro_exprs: Vec<_> =
expanded.syntax().descendants().filter_map(ast::MacroExpr::cast).collect();
@@ -191,17 +196,13 @@
};
if let Some(expr) = expr_opt {
- ted::replace(mac.syntax(), expr.syntax().clone_for_update());
+ editor.replace(mac.syntax(), expr.syntax().clone_for_update());
} else {
- ted::remove(mac.syntax());
+ editor.delete(mac.syntax());
}
}
-
- expanded
-}
-
-fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> {
- Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start())
+ let expanded_syntax = editor.finish().new_root().clone();
+ ast::Expr::cast(expanded_syntax).unwrap()
}
#[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
index e933bcc..5ef8ba4 100644
--- a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
+++ b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
@@ -1,8 +1,5 @@
use ide_db::syntax_helpers::suggest_name;
-use syntax::{
- ast::{self, AstNode, make},
- ted,
-};
+use syntax::ast::{self, AstNode, syntax_factory::SyntaxFactory};
use crate::{AssistContext, AssistId, Assists};
@@ -60,21 +57,24 @@
message,
call_expr.syntax().text_range(),
|edit| {
- let call_expr = edit.make_mut(call_expr);
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(call_expr.syntax());
- let var_pat = make::ident_pat(false, false, make::name(&var_name));
- let pat = make::tuple_struct_pat(make::ext::ident_path(text), [var_pat.into()]);
- let let_expr = make::expr_let(pat.into(), receiver).clone_for_update();
+ let var_pat = make.ident_pat(false, false, make.name(&var_name));
+ let pat = make.tuple_struct_pat(make.ident_path(text), [var_pat.into()]);
+ let let_expr = make.expr_let(pat.into(), receiver);
- if let Some(cap) = ctx.config.snippet_cap {
- if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() {
- if let Some(first_var) = pat.fields().next() {
- edit.add_placeholder_snippet(cap, first_var);
- }
- }
+ if let Some(cap) = ctx.config.snippet_cap
+ && let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat()
+ && let Some(first_var) = pat.fields().next()
+ {
+ let placeholder = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(first_var.syntax(), placeholder);
}
- ted::replace(call_expr.syntax(), let_expr.syntax());
+ editor.replace(call_expr.syntax(), let_expr.syntax());
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
index 109269b..504e12f 100644
--- a/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
+++ b/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs
@@ -1,8 +1,7 @@
use ide_db::assists::AssistId;
use syntax::{
AstNode, T,
- ast::{self, make},
- ted,
+ ast::{self, syntax_factory::SyntaxFactory},
};
use crate::{AssistContext, Assists};
@@ -37,8 +36,7 @@
RCur,
}
- let makro = ctx.find_node_at_offset::<ast::MacroCall>()?.clone_for_update();
- let makro_text_range = makro.syntax().text_range();
+ let makro = ctx.find_node_at_offset::<ast::MacroCall>()?;
let cursor_offset = ctx.offset();
let semicolon = makro.semicolon_token();
@@ -71,24 +69,28 @@
},
token_tree.syntax().text_range(),
|builder| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = builder.make_editor(makro.syntax());
+
match token {
MacroDelims::LPar | MacroDelims::RPar => {
- ted::replace(ltoken, make::token(T!['{']));
- ted::replace(rtoken, make::token(T!['}']));
+ editor.replace(ltoken, make.token(T!['{']));
+ editor.replace(rtoken, make.token(T!['}']));
if let Some(sc) = semicolon {
- ted::remove(sc);
+ editor.delete(sc);
}
}
MacroDelims::LBra | MacroDelims::RBra => {
- ted::replace(ltoken, make::token(T!['(']));
- ted::replace(rtoken, make::token(T![')']));
+ editor.replace(ltoken, make.token(T!['(']));
+ editor.replace(rtoken, make.token(T![')']));
}
MacroDelims::LCur | MacroDelims::RCur => {
- ted::replace(ltoken, make::token(T!['[']));
- ted::replace(rtoken, make::token(T![']']));
+ editor.replace(ltoken, make.token(T!['[']));
+ editor.replace(rtoken, make.token(T![']']));
}
}
- builder.replace(makro_text_range, makro.syntax().text());
+ editor.add_mappings(make.finish_with_mappings());
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
index 5aedff5..7b0f2dc 100644
--- a/crates/ide-assists/src/handlers/unmerge_match_arm.rs
+++ b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
@@ -1,8 +1,7 @@
use syntax::{
Direction, SyntaxKind, T,
- algo::neighbor,
- ast::{self, AstNode, edit::IndentLevel, make},
- ted::{self, Position},
+ ast::{self, AstNode, edit::IndentLevel, syntax_factory::SyntaxFactory},
+ syntax_editor::{Element, Position},
};
use crate::{AssistContext, AssistId, Assists};
@@ -33,7 +32,7 @@
// ```
pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let pipe_token = ctx.find_token_syntax_at_offset(T![|])?;
- let or_pat = ast::OrPat::cast(pipe_token.parent()?)?.clone_for_update();
+ let or_pat = ast::OrPat::cast(pipe_token.parent()?)?;
if or_pat.leading_pipe().is_some_and(|it| it == pipe_token) {
return None;
}
@@ -44,13 +43,14 @@
// without `OrPat`.
let new_parent = match_arm.syntax().parent()?;
- let old_parent_range = new_parent.text_range();
acc.add(
AssistId::refactor_rewrite("unmerge_match_arm"),
"Unmerge match arm",
pipe_token.text_range(),
|edit| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(&new_parent);
let pats_after = pipe_token
.siblings_with_tokens(Direction::Next)
.filter_map(|it| ast::Pat::cast(it.into_node()?))
@@ -59,11 +59,9 @@
let new_pat = if pats_after.len() == 1 {
pats_after[0].clone()
} else {
- make::or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
+ make.or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
};
- let new_match_arm =
- make::match_arm(new_pat, match_arm.guard(), match_arm_body).clone_for_update();
-
+ let new_match_arm = make.match_arm(new_pat, match_arm.guard(), match_arm_body);
let mut pipe_index = pipe_token.index();
if pipe_token
.prev_sibling_or_token()
@@ -71,10 +69,13 @@
{
pipe_index -= 1;
}
- or_pat.syntax().splice_children(
- pipe_index..or_pat.syntax().children_with_tokens().count(),
- Vec::new(),
- );
+ for child in or_pat
+ .syntax()
+ .children_with_tokens()
+ .skip_while(|child| child.index() < pipe_index)
+ {
+ editor.delete(child.syntax_element());
+ }
let mut insert_after_old_arm = Vec::new();
@@ -86,33 +87,19 @@
// body is a block, but we don't bother to check that.
// - Missing after the arm with arms after, if the arm body is a block. In this case
// we don't want to insert a comma at all.
- let has_comma_after =
- std::iter::successors(match_arm.syntax().last_child_or_token(), |it| {
- it.prev_sibling_or_token()
- })
- .map(|it| it.kind())
- .find(|it| !it.is_trivia())
- == Some(T![,]);
- let has_arms_after = neighbor(&match_arm, Direction::Next).is_some();
- if !has_comma_after && !has_arms_after {
- insert_after_old_arm.push(make::token(T![,]).into());
+ let has_comma_after = match_arm.comma_token().is_some();
+ if !has_comma_after && !match_arm.expr().unwrap().is_block_like() {
+ insert_after_old_arm.push(make.token(T![,]).into());
}
let indent = IndentLevel::from_node(match_arm.syntax());
- insert_after_old_arm.push(make::tokens::whitespace(&format!("\n{indent}")).into());
+ insert_after_old_arm.push(make.whitespace(&format!("\n{indent}")).into());
insert_after_old_arm.push(new_match_arm.syntax().clone().into());
- ted::insert_all_raw(Position::after(match_arm.syntax()), insert_after_old_arm);
-
- if has_comma_after {
- ted::insert_raw(
- Position::last_child_of(new_match_arm.syntax()),
- make::token(T![,]),
- );
- }
-
- edit.replace(old_parent_range, new_parent.to_string());
+ editor.insert_all(Position::after(match_arm.syntax()), insert_after_old_arm);
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -258,7 +245,7 @@
let x = X::A;
let y = match x {
X::A => 1i32,
- X::B => 1i32
+ X::B => 1i32,
};
}
"#,
@@ -276,7 +263,7 @@
fn main() {
let x = X::A;
match x {
- X::A $0| X::B => {},
+ X::A $0| X::B => {}
}
}
"#,
@@ -287,8 +274,8 @@
fn main() {
let x = X::A;
match x {
- X::A => {},
- X::B => {},
+ X::A => {}
+ X::B => {}
}
}
"#,
diff --git a/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
index e1b9467..5183566 100644
--- a/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
+++ b/crates/ide-assists/src/handlers/wrap_unwrap_cfg_attr.rs
@@ -2,8 +2,7 @@
use itertools::Itertools;
use syntax::{
NodeOrToken, SyntaxToken, T, TextRange, algo,
- ast::{self, AstNode, make},
- ted::{self, Position},
+ ast::{self, AstNode, make, syntax_factory::SyntaxFactory},
};
use crate::{AssistContext, AssistId, Assists};
@@ -173,40 +172,45 @@
}
}
let handle_source_change = |edit: &mut SourceChangeBuilder| {
- let new_derive = make::attr_outer(make::meta_token_tree(
- make::ext::ident_path("derive"),
- make::token_tree(T!['('], new_derive),
- ))
- .clone_for_update();
- let meta = make::meta_token_tree(
- make::ext::ident_path("cfg_attr"),
- make::token_tree(
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(attr.syntax());
+ let new_derive = make.attr_outer(
+ make.meta_token_tree(make.ident_path("derive"), make.token_tree(T!['('], new_derive)),
+ );
+ let meta = make.meta_token_tree(
+ make.ident_path("cfg_attr"),
+ make.token_tree(
T!['('],
vec![
- NodeOrToken::Token(make::token(T![,])),
- NodeOrToken::Token(make::tokens::whitespace(" ")),
- NodeOrToken::Token(make::tokens::ident("derive")),
- NodeOrToken::Node(make::token_tree(T!['('], cfg_derive_tokens)),
+ NodeOrToken::Token(make.token(T![,])),
+ NodeOrToken::Token(make.whitespace(" ")),
+ NodeOrToken::Token(make.ident("derive")),
+ NodeOrToken::Node(make.token_tree(T!['('], cfg_derive_tokens)),
],
),
);
- // Remove the derive attribute
- let edit_attr = edit.make_syntax_mut(attr.syntax().clone());
- ted::replace(edit_attr, new_derive.syntax().clone());
- let cfg_attr = make::attr_outer(meta).clone_for_update();
-
- ted::insert_all_raw(
- Position::after(new_derive.syntax().clone()),
- vec![make::tokens::whitespace("\n").into(), cfg_attr.syntax().clone().into()],
+ let cfg_attr = make.attr_outer(meta);
+ editor.replace_with_many(
+ attr.syntax(),
+ vec![
+ new_derive.syntax().clone().into(),
+ make.whitespace("\n").into(),
+ cfg_attr.syntax().clone().into(),
+ ],
);
+
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
- edit.add_tabstop_after_token(snippet_cap, first_meta)
+ let tabstop = edit.make_tabstop_after(snippet_cap);
+ editor.add_annotation(first_meta, tabstop);
}
}
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
};
acc.add(
@@ -221,10 +225,10 @@
let range = attr.syntax().text_range();
let path = attr.path()?;
let handle_source_change = |edit: &mut SourceChangeBuilder| {
- let mut raw_tokens = vec![
- NodeOrToken::Token(make::token(T![,])),
- NodeOrToken::Token(make::tokens::whitespace(" ")),
- ];
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(attr.syntax());
+ let mut raw_tokens =
+ vec![NodeOrToken::Token(make.token(T![,])), NodeOrToken::Token(make.whitespace(" "))];
path.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
raw_tokens.push(NodeOrToken::Token(token));
@@ -232,9 +236,9 @@
});
if let Some(meta) = attr.meta() {
if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) {
- raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+ raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
raw_tokens.push(NodeOrToken::Token(eq));
- raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
+ raw_tokens.push(NodeOrToken::Token(make.whitespace(" ")));
expr.syntax().descendants_with_tokens().for_each(|it| {
if let NodeOrToken::Token(token) = it {
@@ -245,26 +249,24 @@
raw_tokens.extend(tt.token_trees_and_tokens());
}
}
- let meta = make::meta_token_tree(
- make::ext::ident_path("cfg_attr"),
- make::token_tree(T!['('], raw_tokens),
- );
- let cfg_attr = if attr.excl_token().is_some() {
- make::attr_inner(meta)
- } else {
- make::attr_outer(meta)
- }
- .clone_for_update();
- let attr_syntax = edit.make_syntax_mut(attr.syntax().clone());
- ted::replace(attr_syntax, cfg_attr.syntax());
+ let meta =
+ make.meta_token_tree(make.ident_path("cfg_attr"), make.token_tree(T!['('], raw_tokens));
+ let cfg_attr =
+ if attr.excl_token().is_some() { make.attr_inner(meta) } else { make.attr_outer(meta) };
+
+ editor.replace(attr.syntax(), cfg_attr.syntax());
if let Some(snippet_cap) = ctx.config.snippet_cap {
if let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{
- edit.add_tabstop_after_token(snippet_cap, first_meta)
+ let tabstop = edit.make_tabstop_after(snippet_cap);
+ editor.add_annotation(first_meta, tabstop);
}
}
+
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
};
acc.add(
AssistId::refactor("wrap_unwrap_cfg_attr"),
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index c260443..cde0d87 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -172,6 +172,7 @@
mod generate_is_empty_from_len;
mod generate_mut_trait_impl;
mod generate_new;
+ mod generate_single_field_struct_from;
mod generate_trait_from_impl;
mod inline_call;
mod inline_const_as_literal;
@@ -305,6 +306,7 @@
generate_mut_trait_impl::generate_mut_trait_impl,
generate_new::generate_new,
generate_trait_from_impl::generate_trait_from_impl,
+ generate_single_field_struct_from::generate_single_field_struct_from,
inline_call::inline_call,
inline_call::inline_into_callers,
inline_const_as_literal::inline_const_as_literal,
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index 72f7195..fc1c692 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -1933,7 +1933,7 @@
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
- &self[index as usize]
+ &mut self[index as usize]
}
}
@@ -1995,6 +1995,34 @@
}
#[test]
+fn doctest_generate_single_field_struct_from() {
+ check_doc_test(
+ "generate_single_field_struct_from",
+ r#####"
+//- minicore: from, phantom_data
+use core::marker::PhantomData;
+struct $0Foo<T> {
+ id: i32,
+ _phantom_data: PhantomData<T>,
+}
+"#####,
+ r#####"
+use core::marker::PhantomData;
+struct Foo<T> {
+ id: i32,
+ _phantom_data: PhantomData<T>,
+}
+
+impl<T> From<i32> for Foo<T> {
+ fn from(id: i32) -> Self {
+ Self { id, _phantom_data: PhantomData }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_generate_trait_from_impl() {
check_doc_test(
"generate_trait_from_impl",
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 1a91053..fbce1d3 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -1,5 +1,7 @@
//! Assorted functions shared by several assists.
+use std::slice;
+
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
use hir::{
DisplayTarget, HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution,
@@ -565,6 +567,7 @@
///
/// This is useful for traits like `PartialEq`, since `impl<T> PartialEq for U<T>` often requires `T: PartialEq`.
// FIXME: migrate remaining uses to `generate_trait_impl`
+#[allow(dead_code)]
pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: &str) -> String {
generate_impl_text_inner(adt, Some(trait_text), true, code)
}
@@ -592,12 +595,10 @@
let generic_params = adt.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
- let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
- match param {
+ let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
+ let param = match param {
ast::TypeOrConstParam::Type(param) => {
- let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
- param.remove_default();
let mut bounds =
param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect());
if let Some(trait_) = trait_text {
@@ -608,17 +609,16 @@
}
};
// `{ty_param}: {bounds}`
- let param =
- make::type_param(param.name().unwrap(), make::type_bound_list(bounds));
+ let param = make::type_param(param.name()?, make::type_bound_list(bounds));
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
- let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
- param.remove_default();
+ let param = make::const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
- }
+ };
+ Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
@@ -693,12 +693,10 @@
let generic_params = adt.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
- let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
- match param {
+ let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
+ let param = match param {
ast::TypeOrConstParam::Type(param) => {
- let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
- param.remove_default();
let mut bounds =
param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect());
if let Some(trait_) = &trait_ {
@@ -709,17 +707,16 @@
}
};
// `{ty_param}: {bounds}`
- let param =
- make::type_param(param.name().unwrap(), make::type_bound_list(bounds));
+ let param = make::type_param(param.name()?, make::type_bound_list(bounds));
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
- let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
- param.remove_default();
+ let param = make::const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
- }
+ };
+ Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
@@ -747,16 +744,23 @@
.clone_for_update();
// Copy any cfg attrs from the original adt
- let cfg_attrs = adt
- .attrs()
- .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false));
- for attr in cfg_attrs {
- impl_.add_attr(attr.clone_for_update());
- }
+ add_cfg_attrs_to(adt, &impl_);
impl_
}
+pub(crate) fn add_cfg_attrs_to<T, U>(from: &T, to: &U)
+where
+ T: HasAttrs,
+ U: AttrsOwnerEdit,
+{
+ let cfg_attrs =
+ from.attrs().filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg"));
+ for attr in cfg_attrs {
+ to.add_attr(attr.clone_for_update());
+ }
+}
+
pub(crate) fn add_method_to_adt(
builder: &mut SourceChangeBuilder,
adt: &ast::Adt,
@@ -912,7 +916,7 @@
) -> Option<(ReferenceConversionType, bool)> {
let str_type = hir::BuiltinType::str().ty(db);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type.clone()])
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&str_type))
.then_some((ReferenceConversionType::AsRefStr, could_deref_to_target(ty, &str_type, db)))
}
@@ -924,7 +928,7 @@
let type_argument = ty.type_arguments().next()?;
let slice_type = hir::Type::new_slice(type_argument);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type.clone()]).then_some((
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&slice_type)).then_some((
ReferenceConversionType::AsRefSlice,
could_deref_to_target(ty, &slice_type, db),
))
@@ -937,10 +941,11 @@
) -> Option<(ReferenceConversionType, bool)> {
let type_argument = ty.type_arguments().next()?;
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument.clone()]).then_some((
- ReferenceConversionType::Dereferenced,
- could_deref_to_target(ty, &type_argument, db),
- ))
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&type_argument))
+ .then_some((
+ ReferenceConversionType::Dereferenced,
+ could_deref_to_target(ty, &type_argument, db),
+ ))
}
fn handle_option_as_ref(
diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 092219a..975c2f0 100644
--- a/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -37,6 +37,7 @@
SymbolKind, documentation::HasDocs, path_transform::PathTransform,
syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items,
};
+use syntax::ast::HasGenericParams;
use syntax::{
AstNode, SmolStr, SyntaxElement, SyntaxKind, T, TextRange, ToSmolStr,
ast::{self, HasGenericArgs, HasTypeBounds, edit_in_place::AttrsOwnerEdit, make},
@@ -390,6 +391,12 @@
} else if let Some(end) = transformed_ty.eq_token().map(|tok| tok.text_range().start())
{
end
+ } else if let Some(end) = transformed_ty
+ .where_clause()
+ .and_then(|wc| wc.where_token())
+ .map(|tok| tok.text_range().start())
+ {
+ end
} else if let Some(end) =
transformed_ty.semicolon_token().map(|tok| tok.text_range().start())
{
@@ -400,17 +407,29 @@
let len = end - start;
let mut decl = transformed_ty.syntax().text().slice(..len).to_string();
- if !decl.ends_with(' ') {
- decl.push(' ');
- }
- decl.push_str("= ");
+ decl.truncate(decl.trim_end().len());
+ decl.push_str(" = ");
+
+ let wc = transformed_ty
+ .where_clause()
+ .map(|wc| {
+ let ws = wc
+ .where_token()
+ .and_then(|it| it.prev_token())
+ .filter(|token| token.kind() == SyntaxKind::WHITESPACE)
+ .map(|token| token.to_string())
+ .unwrap_or_else(|| " ".into());
+ format!("{ws}{wc}")
+ })
+ .unwrap_or_default();
match ctx.config.snippet_cap {
Some(cap) => {
- let snippet = format!("{decl}$0;");
+ let snippet = format!("{decl}$0{wc};");
item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
}
None => {
+ decl.push_str(&wc);
item.text_edit(TextEdit::replace(replacement_range, decl));
}
};
@@ -1440,6 +1459,30 @@
"#,
);
}
+ #[test]
+ fn includes_where_clause() {
+ check_edit(
+ "type Ty",
+ r#"
+trait Tr {
+ type Ty where Self: Copy;
+}
+
+impl Tr for () {
+ $0
+}
+"#,
+ r#"
+trait Tr {
+ type Ty where Self: Copy;
+}
+
+impl Tr for () {
+ type Ty = $0 where Self: Copy;
+}
+"#,
+ );
+ }
#[test]
fn strips_comments() {
diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs
index 411902f..46a3630 100644
--- a/crates/ide-completion/src/tests/attribute.rs
+++ b/crates/ide-completion/src/tests/attribute.rs
@@ -878,6 +878,7 @@
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialEq macro PartialEq
de PartialEq, Eq
@@ -900,6 +901,7 @@
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -921,6 +923,7 @@
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -942,6 +945,7 @@
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialOrd
de PartialOrd, Ord
diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs
index 179d669..ac32649 100644
--- a/crates/ide-completion/src/tests/item_list.rs
+++ b/crates/ide-completion/src/tests/item_list.rs
@@ -458,6 +458,33 @@
r"
struct A;
trait B {
+type O<'a>
+where
+Self: 'a;
+}
+impl B for A {
+$0
+}
+",
+ r#"
+struct A;
+trait B {
+type O<'a>
+where
+Self: 'a;
+}
+impl B for A {
+type O<'a> = $0
+where
+Self: 'a;
+}
+"#,
+ );
+ check_edit(
+ "type O",
+ r"
+struct A;
+trait B {
type O: ?Sized = u32;
}
impl B for A {
diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs
index 994150b..8e68738 100644
--- a/crates/ide-db/src/famous_defs.rs
+++ b/crates/ide-db/src/famous_defs.rs
@@ -106,6 +106,18 @@
self.find_trait("core:convert:AsRef")
}
+ pub fn core_convert_AsMut(&self) -> Option<Trait> {
+ self.find_trait("core:convert:AsMut")
+ }
+
+ pub fn core_borrow_Borrow(&self) -> Option<Trait> {
+ self.find_trait("core:borrow:Borrow")
+ }
+
+ pub fn core_borrow_BorrowMut(&self) -> Option<Trait> {
+ self.find_trait("core:borrow:BorrowMut")
+ }
+
pub fn core_ops_ControlFlow(&self) -> Option<Enum> {
self.find_enum("core:ops:ControlFlow")
}
diff --git a/crates/ide-db/src/generated/lints.rs b/crates/ide-db/src/generated/lints.rs
index de8a429..f9eb44d 100644
--- a/crates/ide-db/src/generated/lints.rs
+++ b/crates/ide-db/src/generated/lints.rs
@@ -4711,9 +4711,9 @@
label: "const_trait_impl",
description: r##"# `const_trait_impl`
-The tracking issue for this feature is: [#67792]
+The tracking issue for this feature is: [#143874]
-[#67792]: https://github.com/rust-lang/rust/issues/67792
+[#143874]: https://github.com/rust-lang/rust/issues/143874
------------------------
"##,
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 5356614..e661857 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -272,5 +272,5 @@
.display_name
.as_deref()
.cloned()
- .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).as_u32() as usize))
+ .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).index() as usize))
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 7d460f7..9cf0bcf 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -317,7 +317,7 @@
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -332,7 +332,7 @@
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -341,7 +341,7 @@
if let Definition::SelfType(impl_) = self {
return match impl_.source(db).map(|src| src.syntax().cloned()) {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -360,7 +360,7 @@
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -531,7 +531,7 @@
node.token_at_offset(offset)
.find(|it| {
// `name` is stripped of raw ident prefix. See the comment on name retrieval below.
- it.text().trim_start_matches("r#") == name
+ it.text().trim_start_matches('\'').trim_start_matches("r#") == name
})
.into_iter()
.flat_map(move |token| {
@@ -938,7 +938,12 @@
})
};
// We need to search without the `r#`, hence `as_str` access.
- self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.as_str().to_smolstr())
+ // We strip `'` from lifetimes and labels as otherwise they may not match with raw-escaped ones,
+ // e.g. if we search `'foo` we won't find `'r#foo`.
+ self.def
+ .name(sema.db)
+ .or_else(self_kw_refs)
+ .map(|it| it.as_str().trim_start_matches('\'').to_smolstr())
}
};
let name = match &name {
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index de046e7..973256c 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -11,6 +11,40 @@
},
[
FileSymbol {
+ name: "A",
+ def: Variant(
+ Variant {
+ id: EnumVariantId(
+ 7800,
+ ),
+ },
+ ),
+ loc: DeclarationLocation {
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: VARIANT,
+ range: 201..202,
+ },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 201..202,
+ },
+ ),
+ },
+ container_name: Some(
+ "Enum",
+ ),
+ is_alias: false,
+ is_assoc: true,
+ is_import: false,
+ do_not_complete: Yes,
+ },
+ FileSymbol {
name: "Alias",
def: TypeAlias(
TypeAlias {
@@ -43,6 +77,40 @@
do_not_complete: Yes,
},
FileSymbol {
+ name: "B",
+ def: Variant(
+ Variant {
+ id: EnumVariantId(
+ 7801,
+ ),
+ },
+ ),
+ loc: DeclarationLocation {
+ hir_file_id: FileId(
+ EditionedFileId(
+ Id(2000),
+ ),
+ ),
+ ptr: SyntaxNodePtr {
+ kind: VARIANT,
+ range: 204..205,
+ },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 204..205,
+ },
+ ),
+ },
+ container_name: Some(
+ "Enum",
+ ),
+ is_alias: false,
+ is_assoc: true,
+ is_import: false,
+ do_not_complete: Yes,
+ },
+ FileSymbol {
name: "CONST",
def: Const(
Const {
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs b/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
index 06f3575..7402133 100644
--- a/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
+++ b/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
@@ -183,4 +183,28 @@
"#,
);
}
+
+ #[test]
+ fn generic_assoc_type_infer_lifetime_in_expr_position() {
+ check_diagnostics(
+ r#"
+//- minicore: sized
+struct Player;
+
+struct Foo<'c, C> {
+ _v: &'c C,
+}
+trait WithSignals: Sized {
+ type SignalCollection<'c, C>;
+ fn __signals_from_external(&self) -> Self::SignalCollection<'_, Self>;
+}
+impl WithSignals for Player {
+ type SignalCollection<'c, C> = Foo<'c, C>;
+ fn __signals_from_external(&self) -> Self::SignalCollection<'_, Self> {
+ Self::SignalCollection { _v: self }
+ }
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index 546512a..c39e00e 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -242,8 +242,8 @@
fn f() {
outer!();
-} //^^^^^^^^ error: leftover tokens
- //^^^^^^^^ error: Syntax Error in Expansion: expected expression
+} //^^^^^^ error: leftover tokens
+ //^^^^^^ error: Syntax Error in Expansion: expected expression
"#,
)
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 8a5d82b..7da799e 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -66,7 +66,7 @@
let current_module =
ctx.sema.scope(d.field_list_parent.to_node(&root).syntax()).map(|it| it.module());
let range = InFile::new(d.file, d.field_list_parent.text_range())
- .original_node_file_range_rooted(ctx.sema.db);
+ .original_node_file_range_rooted_opt(ctx.sema.db)?;
let build_text_edit = |new_syntax: &SyntaxNode, old_syntax| {
let edit = {
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index d8f6e81..17caf63 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -983,4 +983,19 @@
"#,
);
}
+
+ #[test]
+ fn naked_asm_is_safe() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! naked_asm { () => {} }
+
+#[unsafe(naked)]
+extern "C" fn naked() {
+ naked_asm!("");
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 0928262..1e80d02 100644
--- a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -239,4 +239,22 @@
"#,
)
}
+
+ #[test]
+ fn regression_20155() {
+ check_diagnostics(
+ r#"
+//- minicore: copy, option
+struct Box(i32);
+fn test() {
+ let b = Some(Box(0));
+ || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index 4327b12..fc2648e 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -77,6 +77,7 @@
redundant_item_def: String,
range: TextRange,
) -> Option<Vec<Assist>> {
+ let file_id = d.file_id.file_id()?;
let add_assoc_item_def = |builder: &mut SourceChangeBuilder| -> Option<()> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.file_id);
@@ -90,12 +91,14 @@
let trait_def = d.trait_.source(db)?.value;
let l_curly = trait_def.assoc_item_list()?.l_curly_token()?.text_range();
let where_to_insert =
- hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted(db).range;
+ hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted_opt(db)?;
+ if where_to_insert.file_id != file_id {
+ return None;
+ }
- builder.insert(where_to_insert.end(), redundant_item_def);
+ builder.insert(where_to_insert.range.end(), redundant_item_def);
Some(())
};
- let file_id = d.file_id.file_id()?;
let mut source_change_builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db));
add_assoc_item_def(&mut source_change_builder)?;
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 1f2d671..dcca85d 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -120,8 +120,7 @@
let call = ast::MethodCallExpr::cast(expr.syntax().clone())?;
let range = InFile::new(expr_ptr.file_id, call.syntax().text_range())
- .original_node_file_range_rooted(db)
- .range;
+ .original_node_file_range_rooted_opt(db)?;
let receiver = call.receiver()?;
let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original;
@@ -174,18 +173,16 @@
let assoc_func_call_expr_string = make::expr_call(assoc_func_path, args).to_string();
- let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id;
-
Some(Assist {
id: AssistId::quick_fix("method_call_to_assoc_func_call_fix"),
label: Label::new(format!(
"Use associated func call instead: `{assoc_func_call_expr_string}`"
)),
group: None,
- target: range,
+ target: range.range,
source_change: Some(SourceChange::from_text_edit(
- file_id.file_id(ctx.sema.db),
- TextEdit::replace(range, assoc_func_call_expr_string),
+ range.file_id.file_id(ctx.sema.db),
+ TextEdit::replace(range.range, assoc_func_call_expr_string),
)),
command: None,
})
@@ -300,7 +297,7 @@
}
fn main() {
m!(());
- // ^^^^^^ error: no method `foo` on type `()`
+ // ^^ error: no method `foo` on type `()`
}
"#,
);
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs
index 4b8d07a..7a04059 100644
--- a/crates/ide/src/call_hierarchy.rs
+++ b/crates/ide/src/call_hierarchy.rs
@@ -592,7 +592,7 @@
"#,
expect!["callee Function FileId(0) 22..37 30..36"],
expect![[r#"
- caller Function FileId(0) 38..52 : FileId(0):44..50
+ caller Function FileId(0) 38..43 : FileId(0):44..50
caller Function FileId(1) 130..136 130..136 : FileId(0):44..50
callee Function FileId(0) 38..52 44..50 : FileId(0):44..50"#]],
expect![[]],
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index 2c98328..a5d9a10 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -60,7 +60,7 @@
let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb))
.into_offset_iter();
- let doc = map_links(doc, |target, title, range| {
+ let doc = map_links(doc, |target, title, range, link_type| {
// This check is imperfect, there's some overlap between valid intra-doc links
// and valid URLs so we choose to be too eager to try to resolve what might be
// a URL.
@@ -78,7 +78,7 @@
.map(|(_, attr_id)| attr_id.is_inner_attr())
.unwrap_or(false);
if let Some((target, title)) =
- rewrite_intra_doc_link(db, definition, target, title, is_inner_doc)
+ rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type)
{
(None, target, title)
} else if let Some(target) = rewrite_url_link(db, definition, target) {
@@ -417,6 +417,7 @@
target: &str,
title: &str,
is_inner_doc: bool,
+ link_type: LinkType,
) -> Option<(String, String)> {
let (link, ns) = parse_intra_doc_link(target);
@@ -438,7 +439,21 @@
url = url.join(&file).ok()?;
url.set_fragment(frag);
- Some((url.into(), strip_prefixes_suffixes(title).to_owned()))
+ // We want to strip the keyword prefix from the title, but only if the target is implicitly the same
+ // as the title.
+ let title = match link_type {
+ LinkType::Email
+ | LinkType::Autolink
+ | LinkType::Shortcut
+ | LinkType::Collapsed
+ | LinkType::Reference
+ | LinkType::Inline => title.to_owned(),
+ LinkType::ShortcutUnknown | LinkType::CollapsedUnknown | LinkType::ReferenceUnknown => {
+ strip_prefixes_suffixes(title).to_owned()
+ }
+ };
+
+ Some((url.into(), title))
}
/// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`).
@@ -470,7 +485,7 @@
/// Rewrites a markdown document, applying 'callback' to each link.
fn map_links<'e>(
events: impl Iterator<Item = (Event<'e>, Range<usize>)>,
- callback: impl Fn(&str, &str, Range<usize>) -> (Option<LinkType>, String, String),
+ callback: impl Fn(&str, &str, Range<usize>, LinkType) -> (Option<LinkType>, String, String),
) -> impl Iterator<Item = Event<'e>> {
let mut in_link = false;
// holds the origin link target on start event and the rewritten one on end event
@@ -490,25 +505,25 @@
Event::End(Tag::Link(link_type, target, _)) => {
in_link = false;
Event::End(Tag::Link(
- end_link_type.unwrap_or(link_type),
+ end_link_type.take().unwrap_or(link_type),
end_link_target.take().unwrap_or(target),
CowStr::Borrowed(""),
))
}
Event::Text(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
- if !matches!(end_link_type, Some(LinkType::Autolink)) {
+ if !matches!(end_link_type, Some(LinkType::Autolink)) && link_type.is_some() {
end_link_type = link_type;
}
Event::Text(CowStr::Boxed(link_name.into()))
}
Event::Code(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
- if !matches!(end_link_type, Some(LinkType::Autolink)) {
+ if !matches!(end_link_type, Some(LinkType::Autolink)) && link_type.is_some() {
end_link_type = link_type;
}
Event::Code(CowStr::Boxed(link_name.into()))
diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs
index 347da4e..6820f99 100644
--- a/crates/ide/src/file_structure.rs
+++ b/crates/ide/src/file_structure.rs
@@ -329,7 +329,7 @@
#[deprecated]
fn obsolete() {}
-#[deprecated(note = "for awhile")]
+#[deprecated(note = "for a while")]
fn very_obsolete() {}
// region: Some region name
@@ -608,8 +608,8 @@
StructureNode {
parent: None,
label: "very_obsolete",
- navigation_range: 511..524,
- node_range: 473..529,
+ navigation_range: 512..525,
+ node_range: 473..530,
kind: SymbolKind(
Function,
),
@@ -621,8 +621,8 @@
StructureNode {
parent: None,
label: "Some region name",
- navigation_range: 531..558,
- node_range: 531..558,
+ navigation_range: 532..559,
+ node_range: 532..559,
kind: Region,
detail: None,
deprecated: false,
@@ -630,8 +630,8 @@
StructureNode {
parent: None,
label: "m",
- navigation_range: 598..599,
- node_range: 573..636,
+ navigation_range: 599..600,
+ node_range: 574..637,
kind: SymbolKind(
Module,
),
@@ -643,8 +643,8 @@
22,
),
label: "dontpanic",
- navigation_range: 573..593,
- node_range: 573..593,
+ navigation_range: 574..594,
+ node_range: 574..594,
kind: Region,
detail: None,
deprecated: false,
@@ -654,8 +654,8 @@
22,
),
label: "f",
- navigation_range: 605..606,
- node_range: 602..611,
+ navigation_range: 606..607,
+ node_range: 603..612,
kind: SymbolKind(
Function,
),
@@ -669,8 +669,8 @@
22,
),
label: "g",
- navigation_range: 628..629,
- node_range: 612..634,
+ navigation_range: 629..630,
+ node_range: 613..635,
kind: SymbolKind(
Function,
),
@@ -682,8 +682,8 @@
StructureNode {
parent: None,
label: "extern \"C\"",
- navigation_range: 638..648,
- node_range: 638..651,
+ navigation_range: 639..649,
+ node_range: 639..652,
kind: ExternBlock,
detail: None,
deprecated: false,
@@ -691,8 +691,8 @@
StructureNode {
parent: None,
label: "let_statements",
- navigation_range: 656..670,
- node_range: 653..813,
+ navigation_range: 657..671,
+ node_range: 654..814,
kind: SymbolKind(
Function,
),
@@ -706,8 +706,8 @@
27,
),
label: "x",
- navigation_range: 683..684,
- node_range: 679..690,
+ navigation_range: 684..685,
+ node_range: 680..691,
kind: SymbolKind(
Local,
),
@@ -719,8 +719,8 @@
27,
),
label: "mut y",
- navigation_range: 699..704,
- node_range: 695..709,
+ navigation_range: 700..705,
+ node_range: 696..710,
kind: SymbolKind(
Local,
),
@@ -732,8 +732,8 @@
27,
),
label: "Foo { .. }",
- navigation_range: 718..740,
- node_range: 714..753,
+ navigation_range: 719..741,
+ node_range: 715..754,
kind: SymbolKind(
Local,
),
@@ -745,8 +745,8 @@
27,
),
label: "_",
- navigation_range: 803..804,
- node_range: 799..811,
+ navigation_range: 804..805,
+ node_range: 800..812,
kind: SymbolKind(
Local,
),
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs
index 9bd8504..698fd14 100755
--- a/crates/ide/src/folding_ranges.rs
+++ b/crates/ide/src/folding_ranges.rs
@@ -23,6 +23,7 @@
WhereClause,
ReturnType,
MatchArm,
+ Function,
// region: item runs
Modules,
Consts,
@@ -59,6 +60,31 @@
NodeOrToken::Token(token) => token.text().contains('\n'),
};
if is_multiline {
+ // for the func with multiline param list
+ if matches!(element.kind(), FN) {
+ if let NodeOrToken::Node(node) = &element {
+ if let Some(fn_node) = ast::Fn::cast(node.clone()) {
+ if !fn_node
+ .param_list()
+ .map(|param_list| param_list.syntax().text().contains_char('\n'))
+ .unwrap_or(false)
+ {
+ continue;
+ }
+
+ if fn_node.body().is_some() {
+ res.push(Fold {
+ range: TextRange::new(
+ node.text_range().start(),
+ node.text_range().end(),
+ ),
+ kind: FoldKind::Function,
+ });
+ continue;
+ }
+ }
+ }
+ }
res.push(Fold { range: element.text_range(), kind });
continue;
}
@@ -152,6 +178,7 @@
ARG_LIST | PARAM_LIST | GENERIC_ARG_LIST | GENERIC_PARAM_LIST => Some(FoldKind::ArgList),
ARRAY_EXPR => Some(FoldKind::Array),
RET_TYPE => Some(FoldKind::ReturnType),
+ FN => Some(FoldKind::Function),
WHERE_CLAUSE => Some(FoldKind::WhereClause),
ASSOC_ITEM_LIST
| RECORD_FIELD_LIST
@@ -291,6 +318,7 @@
use super::*;
+ #[track_caller]
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
let (ranges, text) = extract_tags(ra_fixture, "fold");
@@ -322,6 +350,7 @@
FoldKind::WhereClause => "whereclause",
FoldKind::ReturnType => "returntype",
FoldKind::MatchArm => "matcharm",
+ FoldKind::Function => "function",
FoldKind::TraitAliases => "traitaliases",
FoldKind::ExternCrates => "externcrates",
};
@@ -330,6 +359,23 @@
}
#[test]
+ fn test_fold_func_with_multiline_param_list() {
+ check(
+ r#"
+<fold function>fn func<fold arglist>(
+ a: i32,
+ b: i32,
+ c: i32,
+)</fold> <fold block>{
+
+
+
+}</fold></fold>
+"#,
+ );
+ }
+
+ #[test]
fn test_fold_comments() {
check(
r#"
@@ -541,10 +587,10 @@
fn fold_multiline_params() {
check(
r#"
-fn foo<fold arglist>(
+<fold function>fn foo<fold arglist>(
x: i32,
y: String,
-)</fold> {}
+)</fold> {}</fold>
"#,
)
}
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index fd465f3..29fc68b 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -1082,7 +1082,7 @@
}
define_fn!();
-//^^^^^^^^^^^^^
+//^^^^^^^^^^
fn bar() {
$0foo();
}
@@ -3228,7 +3228,7 @@
use crate::m;
m!();
- // ^^^^^
+ // ^^
fn qux() {
Foo$0;
@@ -3851,4 +3851,76 @@
"#,
);
}
+
+ #[test]
+ fn goto_const_from_match_pat_with_tuple_struct() {
+ check(
+ r#"
+struct Tag(u8);
+struct Path {}
+
+const Path: u8 = 0;
+ // ^^^^
+fn main() {
+ match Tag(Path) {
+ Tag(Path$0) => {}
+ _ => {}
+ }
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_const_from_match_pat() {
+ check(
+ r#"
+type T1 = u8;
+const T1: u8 = 0;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_struct_from_match_pat() {
+ check(
+ r#"
+struct T1;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_goto_trait_from_match_pat() {
+ check(
+ r#"
+trait T1 {}
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ // ^^
+ _ => {}
+ }
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index a281a49..c548021 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -10927,3 +10927,99 @@
"#]],
);
}
+
+#[test]
+fn keyword_inside_link() {
+ check(
+ r#"
+enum Foo {
+ MacroExpansion,
+}
+
+/// I return a [macro expansion](Foo::MacroExpansion).
+fn bar$0() -> Foo {
+ Foo::MacroExpansion
+}
+ "#,
+ expect.
+ "#]],
+ );
+}
+
+#[test]
+fn regression_20190() {
+ check(
+ r#"
+struct Foo;
+
+/// [`foo` bar](Foo).
+fn has_docs$0() {}
+ "#,
+ expect.
+ "#]],
+ );
+}
+
+#[test]
+fn regression_20225() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+trait Trait {
+ type Type<'a, T: ?Sized + 'a>;
+}
+
+enum Borrowed {}
+
+impl Trait for Borrowed {
+ type Type<'a, T: ?Sized + 'a> = &'a T;
+}
+
+enum Enum<'a, T: Trait + 'a> {
+ Variant1(T::Type<'a, [Enum<'a, T>]>),
+ Variant2,
+}
+
+impl Enum<'_, Borrowed> {
+ const CONSTANT$0: Self = Self::Variant1(&[Self::Variant2]);
+}
+ "#,
+ expect![[r#"
+ *CONSTANT*
+
+ ```rust
+ ra_test_fixture::Enum
+ ```
+
+ ```rust
+ const CONSTANT: Self = Variant1(&[Variant2])
+ ```
+ "#]],
+ );
+}
diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs
index f2844a2..49b43fc 100644
--- a/crates/ide/src/inlay_hints/adjustment.rs
+++ b/crates/ide/src/inlay_hints/adjustment.rs
@@ -109,50 +109,90 @@
}
has_adjustments = true;
- // FIXME: Add some nicer tooltips to each of these
- let (text, coercion) = match kind {
+ let (text, coercion, detailed_tooltip) = match kind {
Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => {
allow_edit = false;
- ("<never-to-any>", "never to any")
+ (
+ "<never-to-any>",
+ "never to any",
+ "Coerces the never type `!` into any other type. This happens in code paths that never return, like after `panic!()` or `return`.",
+ )
}
- Adjust::Deref(None) => ("*", "dereference"),
- Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => {
- ("*", "`Deref` dereference")
- }
- Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => {
- ("*", "`DerefMut` dereference")
- }
- Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => ("&", "borrow"),
- Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => ("&mut ", "unique borrow"),
- Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => {
- ("&raw const ", "const pointer borrow")
- }
- Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => {
- ("&raw mut ", "mut pointer borrow")
- }
+ Adjust::Deref(None) => (
+ "*",
+ "dereference",
+ "Built-in dereference of a reference to access the underlying value. The compiler inserts `*` to get the value from `&T`.",
+ ),
+ Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => (
+ "*",
+ "`Deref` dereference",
+ "Dereference via the `Deref` trait. Used for types like `Box<T>` or `Rc<T>` so they act like plain `T`.",
+ ),
+ Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => (
+ "*",
+ "`DerefMut` dereference",
+ "Mutable dereference using the `DerefMut` trait. Enables smart pointers to give mutable access to their inner values.",
+ ),
+ Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => (
+ "&",
+ "shared borrow",
+ "Inserts `&` to create a shared reference. Lets you use a value without moving or cloning it.",
+ ),
+ Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => (
+ "&mut ",
+ "mutable borrow",
+ "Inserts `&mut` to create a unique, mutable reference. Lets you modify a value without taking ownership.",
+ ),
+ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => (
+ "&raw const ",
+ "const raw pointer",
+ "Converts a reference to a raw const pointer `*const T`. Often used when working with FFI or unsafe code.",
+ ),
+ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => (
+ "&raw mut ",
+ "mut raw pointer",
+ "Converts a mutable reference to a raw mutable pointer `*mut T`. Allows mutation in unsafe contexts.",
+ ),
// some of these could be represented via `as` casts, but that's not too nice and
// handling everything as a prefix expr makes the `(` and `)` insertion easier
Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => {
allow_edit = false;
match cast {
- PointerCast::ReifyFnPointer => {
- ("<fn-item-to-fn-pointer>", "fn item to fn pointer")
- }
+ PointerCast::ReifyFnPointer => (
+ "<fn-item-to-fn-pointer>",
+ "fn item to fn pointer",
+ "Converts a named function to a function pointer `fn()`. Useful when passing functions as values.",
+ ),
PointerCast::UnsafeFnPointer => (
"<safe-fn-pointer-to-unsafe-fn-pointer>",
"safe fn pointer to unsafe fn pointer",
+ "Coerces a safe function pointer to an unsafe one. Allows calling it in an unsafe context.",
),
- PointerCast::ClosureFnPointer(Safety::Unsafe) => {
- ("<closure-to-unsafe-fn-pointer>", "closure to unsafe fn pointer")
- }
- PointerCast::ClosureFnPointer(Safety::Safe) => {
- ("<closure-to-fn-pointer>", "closure to fn pointer")
- }
- PointerCast::MutToConstPointer => {
- ("<mut-ptr-to-const-ptr>", "mut ptr to const ptr")
- }
- PointerCast::ArrayToPointer => ("<array-ptr-to-element-ptr>", ""),
- PointerCast::Unsize => ("<unsize>", "unsize"),
+ PointerCast::ClosureFnPointer(Safety::Unsafe) => (
+ "<closure-to-unsafe-fn-pointer>",
+ "closure to unsafe fn pointer",
+ "Converts a non-capturing closure to an unsafe function pointer. Required for use in `extern` or unsafe APIs.",
+ ),
+ PointerCast::ClosureFnPointer(Safety::Safe) => (
+ "<closure-to-fn-pointer>",
+ "closure to fn pointer",
+ "Converts a non-capturing closure to a function pointer. Lets closures behave like plain functions.",
+ ),
+ PointerCast::MutToConstPointer => (
+ "<mut-ptr-to-const-ptr>",
+ "mut ptr to const ptr",
+ "Coerces `*mut T` to `*const T`. Safe because const pointers restrict what you can do.",
+ ),
+ PointerCast::ArrayToPointer => (
+ "<array-ptr-to-element-ptr>",
+ "array to pointer",
+ "Converts an array to a pointer to its first element. Similar to how arrays decay to pointers in C.",
+ ),
+ PointerCast::Unsize => (
+ "<unsize>",
+ "unsize coercion",
+ "Converts a sized type to an unsized one. Used for things like turning arrays into slices or concrete types into trait objects.",
+ ),
}
}
_ => continue,
@@ -162,9 +202,11 @@
linked_location: None,
tooltip: Some(config.lazy_tooltip(|| {
InlayTooltip::Markdown(format!(
- "`{}` → `{}` ({coercion} coercion)",
+ "`{}` → `{}`\n\n**{}**\n\n{}",
source.display(sema.db, display_target),
target.display(sema.db, display_target),
+ coercion,
+ detailed_tooltip
))
})),
};
diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs
index d2216e6..05253b6 100644
--- a/crates/ide/src/inlay_hints/closing_brace.rs
+++ b/crates/ide/src/inlay_hints/closing_brace.rs
@@ -91,8 +91,6 @@
match_ast! {
match parent {
ast::Fn(it) => {
- // FIXME: this could include parameters, but `HirDisplay` prints too much info
- // and doesn't respect the max length either, so the hints end up way too long
(format!("fn {}", it.name()?), it.name().map(name))
},
ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)),
diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs
index bf4688e..d0539ab 100644
--- a/crates/ide/src/inlay_hints/implicit_drop.rs
+++ b/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -92,7 +92,7 @@
},
MirSpan::Unknown => continue,
};
- let binding = &hir.bindings[binding_idx];
+ let binding = &hir[binding_idx];
let name = binding.name.display_no_db(display_target.edition).to_smolstr();
if name.starts_with("<ra@") {
continue; // Ignore desugared variables
diff --git a/crates/ide/src/inlay_hints/implied_dyn_trait.rs b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
index cd01c07..0da1785 100644
--- a/crates/ide/src/inlay_hints/implied_dyn_trait.rs
+++ b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
@@ -17,8 +17,12 @@
let parent = path.syntax().parent()?;
let range = match path {
Either::Left(path) => {
- let paren =
- parent.ancestors().take_while(|it| ast::ParenType::can_cast(it.kind())).last();
+ let paren = parent
+ .ancestors()
+ .take_while(|it| {
+ ast::ParenType::can_cast(it.kind()) || ast::ForType::can_cast(it.kind())
+ })
+ .last();
let parent = paren.as_ref().and_then(|it| it.parent()).unwrap_or(parent);
if ast::TypeBound::can_cast(parent.kind())
|| ast::TypeAnchor::can_cast(parent.kind())
@@ -34,7 +38,7 @@
return None;
}
sema.resolve_trait(&path.path()?)?;
- paren.map_or_else(|| path.syntax().text_range(), |it| it.text_range())
+ path.syntax().text_range()
}
Either::Right(dyn_) => {
if dyn_.dyn_token().is_some() {
@@ -89,7 +93,7 @@
impl T {}
// ^ dyn
impl T for (T) {}
- // ^^^ dyn
+ // ^ dyn
impl T
"#,
);
@@ -112,7 +116,7 @@
_: &mut (T + T)
// ^^^^^ dyn
_: *mut (T),
- // ^^^ dyn
+ // ^ dyn
) {}
"#,
);
@@ -136,4 +140,26 @@
"#]],
);
}
+
+ #[test]
+ fn hrtb_bound_does_not_add_dyn() {
+ check(
+ r#"
+//- minicore: fn
+fn test<F>(f: F) where F: for<'a> FnOnce(&'a i32) {}
+ // ^: Sized
+ "#,
+ );
+ }
+
+ #[test]
+ fn with_parentheses() {
+ check(
+ r#"
+trait T {}
+fn foo(v: &(T)) {}
+ // ^ dyn
+ "#,
+ );
+ }
}
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 4c7c597..7dc1814 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -844,7 +844,7 @@
// *should* contain the name
_ => {
let kind = call_kind();
- let range = kind.clone().original_call_range_with_body(db);
+ let range = kind.clone().original_call_range_with_input(db);
//If the focus range is in the attribute/derive body, we
// need to point the call site to the entire body, if not, fall back
// to the name range of the attribute/derive call
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index fe874bc..86b88a1 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -3088,4 +3088,42 @@
"#]],
);
}
+
+ #[test]
+ fn raw_labels_and_lifetimes() {
+ check(
+ r#"
+fn foo<'r#fn>(s: &'r#fn str) {
+ let _a: &'r#fn str = s;
+ let _b: &'r#fn str;
+ 'r#break$0: {
+ break 'r#break;
+ }
+}
+ "#,
+ expect![[r#"
+ 'r#break Label FileId(0) 87..96 87..95
+
+ FileId(0) 113..121
+ "#]],
+ );
+ check(
+ r#"
+fn foo<'r#fn$0>(s: &'r#fn str) {
+ let _a: &'r#fn str = s;
+ let _b: &'r#fn str;
+ 'r#break: {
+ break 'r#break;
+ }
+}
+ "#,
+ expect![[r#"
+ 'r#fn LifetimeParam FileId(0) 7..12
+
+ FileId(0) 18..23
+ FileId(0) 44..49
+ FileId(0) 72..77
+ "#]],
+ );
+ }
}
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index f48150b..9d1a5ba 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -351,7 +351,7 @@
)
.call_site();
- let file_range = fn_source.syntax().original_file_range_with_macro_call_body(sema.db);
+ let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &fn_source.file_syntax(sema.db), file_range);
@@ -425,7 +425,7 @@
let impl_source = sema.source(*def)?;
let impl_syntax = impl_source.syntax();
- let file_range = impl_syntax.original_file_range_with_macro_call_body(sema.db);
+ let file_range = impl_syntax.original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &impl_syntax.file_syntax(sema.db), file_range);
@@ -1241,10 +1241,10 @@
[
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"\", kind: Module })",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..310, name: \"foo_test\", kind: Function })",
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"foo_test2\", kind: Function }, true)",
- "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..344, name: \"main\", kind: Function })",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..307, name: \"foo_test\", kind: Function })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"foo_test2\", kind: Function }, true)",
+ "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..341, name: \"main\", kind: Function })",
]
"#]],
);
@@ -1272,10 +1272,10 @@
"#,
expect![[r#"
[
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo0\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo1\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo2\", kind: Function }, true)",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo0\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo1\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo2\", kind: Function }, true)",
]
"#]],
);
diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs
index 7985279..25deffe 100644
--- a/crates/ide/src/view_crate_graph.rs
+++ b/crates/ide/src/view_crate_graph.rs
@@ -79,7 +79,7 @@
}
fn node_id(&'a self, n: &Crate) -> Id<'a> {
- let id = n.as_id().as_u32();
+ let id = n.as_id().index();
Id::new(format!("_{id:?}")).unwrap()
}
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index 1ccd20c..4780743 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -156,6 +156,7 @@
cfg_attr,
cfg_eval,
cfg,
+ cfg_select,
char,
clone,
Clone,
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 52f5967..26ee698 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -11,7 +11,7 @@
};
use ide_db::{
ChangeWithProcMacros, FxHashMap, RootDatabase,
- base_db::{CrateGraphBuilder, Env, SourceRoot, SourceRootId},
+ base_db::{CrateGraphBuilder, Env, ProcMacroLoadingError, SourceRoot, SourceRootId},
prime_caches,
};
use itertools::Itertools;
@@ -69,6 +69,23 @@
extra_env: &FxHashMap<String, Option<String>>,
load_config: &LoadCargoConfig,
) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroClient>)> {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
+ let mut db = RootDatabase::new(lru_cap);
+
+ let (vfs, proc_macro_server) = load_workspace_into_db(ws, extra_env, load_config, &mut db)?;
+
+ Ok((db, vfs, proc_macro_server))
+}
+
+// This variant of `load_workspace` allows deferring the loading of rust-analyzer
+// into an existing database, which is useful in certain third-party scenarios,
+// now that `salsa` supports extending foreign databases (e.g. `RootDatabase`).
+pub fn load_workspace_into_db(
+ ws: ProjectWorkspace,
+ extra_env: &FxHashMap<String, Option<String>>,
+ load_config: &LoadCargoConfig,
+ db: &mut RootDatabase,
+) -> anyhow::Result<(vfs::Vfs, Option<ProcMacroClient>)> {
let (sender, receiver) = unbounded();
let mut vfs = vfs::Vfs::default();
let mut loader = {
@@ -78,23 +95,27 @@
tracing::debug!(?load_config, "LoadCargoConfig");
let proc_macro_server = match &load_config.with_proc_macro_server {
- ProcMacroServerChoice::Sysroot => ws
- .find_sysroot_proc_macro_srv()
- .and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into))
- .map_err(|e| (e, true)),
+ ProcMacroServerChoice::Sysroot => ws.find_sysroot_proc_macro_srv().map(|it| {
+ it.and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into)).map_err(
+ |e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()),
+ )
+ }),
ProcMacroServerChoice::Explicit(path) => {
- ProcMacroClient::spawn(path, extra_env).map_err(Into::into).map_err(|e| (e, true))
+ Some(ProcMacroClient::spawn(path, extra_env).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())
+ }))
}
- ProcMacroServerChoice::None => {
- Err((anyhow::format_err!("proc macro server disabled"), false))
- }
+ ProcMacroServerChoice::None => Some(Err(ProcMacroLoadingError::Disabled)),
};
match &proc_macro_server {
- Ok(server) => {
- tracing::info!(path=%server.server_path(), "Proc-macro server started")
+ Some(Ok(server)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), path=%server.server_path(), "Proc-macro server started")
}
- Err((e, _)) => {
- tracing::info!(%e, "Failed to start proc-macro server")
+ Some(Err(e)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), %e, "Failed to start proc-macro server")
+ }
+ None => {
+ tracing::info!(manifest=%ws.manifest_or_root(), "No proc-macro server started")
}
}
@@ -111,22 +132,24 @@
);
let proc_macros = {
let proc_macro_server = match &proc_macro_server {
- Ok(it) => Ok(it),
- Err((e, hard_err)) => Err((e.to_string(), *hard_err)),
+ Some(Ok(it)) => Ok(it),
+ Some(Err(e)) => {
+ Err(ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()))
+ }
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running, workspace is missing a sysroot".into(),
+ )),
};
proc_macros
.into_iter()
.map(|(crate_id, path)| {
(
crate_id,
- path.map_or_else(
- |e| Err((e, true)),
- |(_, path)| {
- proc_macro_server.as_ref().map_err(Clone::clone).and_then(
- |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
- )
- },
- ),
+ path.map_or_else(Err, |(_, path)| {
+ proc_macro_server.as_ref().map_err(Clone::clone).and_then(
+ |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
+ )
+ }),
)
})
.collect()
@@ -139,18 +162,20 @@
version: 0,
});
- let db = load_crate_graph(
+ load_crate_graph_into_db(
crate_graph,
proc_macros,
project_folders.source_root_config,
&mut vfs,
&receiver,
+ db,
);
if load_config.prefill_caches {
- prime_caches::parallel_prime_caches(&db, 1, &|_| ());
+ prime_caches::parallel_prime_caches(db, 1, &|_| ());
}
- Ok((db, vfs, proc_macro_server.ok()))
+
+ Ok((vfs, proc_macro_server.and_then(Result::ok)))
}
#[derive(Default)]
@@ -391,11 +416,13 @@
path: &AbsPath,
ignored_macros: &[Box<str>],
) -> ProcMacroLoadResult {
- let res: Result<Vec<_>, String> = (|| {
+ let res: Result<Vec<_>, _> = (|| {
let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ let vec = server.load_dylib(dylib).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
+ })?;
if vec.is_empty() {
- return Err("proc macro library returned no proc macros".to_owned());
+ return Err(ProcMacroLoadingError::NoProcMacros);
}
Ok(vec
.into_iter()
@@ -412,20 +439,19 @@
}
Err(e) => {
tracing::warn!("proc-macro loading for {path} failed: {e}");
- Err((e, true))
+ Err(e)
}
}
}
-fn load_crate_graph(
+fn load_crate_graph_into_db(
crate_graph: CrateGraphBuilder,
proc_macros: ProcMacrosBuilder,
source_root_config: SourceRootConfig,
vfs: &mut vfs::Vfs,
receiver: &Receiver<vfs::loader::Message>,
-) -> RootDatabase {
- let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
- let mut db = RootDatabase::new(lru_cap);
+ db: &mut RootDatabase,
+) {
let mut analysis_change = ChangeWithProcMacros::default();
db.enable_proc_attr_macros();
@@ -462,7 +488,6 @@
analysis_change.set_proc_macros(proc_macros);
db.apply_change(analysis_change);
- db
}
fn expander_to_proc_macro(
diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs
index 0ac25da..2b4151e 100644
--- a/crates/parser/src/grammar/expressions.rs
+++ b/crates/parser/src/grammar/expressions.rs
@@ -4,7 +4,7 @@
use super::*;
-pub(super) use atom::{EXPR_RECOVERY_SET, LITERAL_FIRST, literal};
+pub(super) use atom::{EXPR_RECOVERY_SET, LITERAL_FIRST, literal, parse_asm_expr};
pub(crate) use atom::{block_expr, match_arm_list};
#[derive(PartialEq, Eq)]
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
index 8ed0fc6..7665656 100644
--- a/crates/parser/src/grammar/expressions/atom.rs
+++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -253,8 +253,7 @@
let m = p.start();
p.bump_remap(T![builtin]);
p.bump(T![#]);
- if p.at_contextual_kw(T![offset_of]) {
- p.bump_remap(T![offset_of]);
+ if p.eat_contextual_kw(T![offset_of]) {
p.expect(T!['(']);
type_(p);
p.expect(T![,]);
@@ -278,8 +277,7 @@
p.expect(T![')']);
}
Some(m.complete(p, OFFSET_OF_EXPR))
- } else if p.at_contextual_kw(T![format_args]) {
- p.bump_remap(T![format_args]);
+ } else if p.eat_contextual_kw(T![format_args]) {
p.expect(T!['(']);
expr(p);
if p.eat(T![,]) {
@@ -302,7 +300,16 @@
}
p.expect(T![')']);
Some(m.complete(p, FORMAT_ARGS_EXPR))
- } else if p.at_contextual_kw(T![asm]) {
+ } else if p.eat_contextual_kw(T![asm])
+ || p.eat_contextual_kw(T![global_asm])
+ || p.eat_contextual_kw(T![naked_asm])
+ {
+ // test asm_kinds
+ // fn foo() {
+ // builtin#asm("");
+ // builtin#global_asm("");
+ // builtin#naked_asm("");
+ // }
parse_asm_expr(p, m)
} else {
m.abandon(p);
@@ -321,8 +328,7 @@
// tmp = out(reg) _,
// );
// }
-fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
- p.bump_remap(T![asm]);
+pub(crate) fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
p.expect(T!['(']);
if expr(p).is_none() {
p.err_and_bump("expected asm template");
@@ -411,11 +417,10 @@
dir_spec.abandon(p);
op.abandon(p);
op_n.abandon(p);
- p.err_and_bump("expected asm operand");
- // improves error recovery and handles err_and_bump recovering from `{` which gets
- // the parser stuck here
+ // improves error recovery
if p.at(T!['{']) {
+ p.error("expected asm operand");
// test_err bad_asm_expr
// fn foo() {
// builtin#asm(
@@ -423,6 +428,8 @@
// );
// }
expr(p);
+ } else {
+ p.err_and_bump("expected asm operand");
}
if p.at(T!['}']) {
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs
index b9f4866..8e551b0 100644
--- a/crates/parser/src/grammar/items.rs
+++ b/crates/parser/src/grammar/items.rs
@@ -261,6 +261,19 @@
T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::konst(p, m),
T![static] if (la == IDENT || la == T![_] || la == T![mut]) => consts::static_(p, m),
+ IDENT
+ if p.at_contextual_kw(T![builtin])
+ && p.nth_at(1, T![#])
+ && p.nth_at_contextual_kw(2, T![global_asm]) =>
+ {
+ p.bump_remap(T![builtin]);
+ p.bump(T![#]);
+ p.bump_remap(T![global_asm]);
+ // test global_asm
+ // builtin#global_asm("")
+ expressions::parse_asm_expr(p, m);
+ }
+
_ => return Err(m),
};
Ok(())
diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs
index e6c92de..8fff1c3 100644
--- a/crates/parser/src/lexed_str.rs
+++ b/crates/parser/src/lexed_str.rs
@@ -44,7 +44,9 @@
// Re-create the tokenizer from scratch every token because `GuardedStrPrefix` is one token in the lexer
// but we want to split it to two in edition <2024.
- while let Some(token) = rustc_lexer::tokenize(&text[conv.offset..]).next() {
+ while let Some(token) =
+ rustc_lexer::tokenize(&text[conv.offset..], rustc_lexer::FrontmatterAllowed::No).next()
+ {
let token_text = &text[conv.offset..][..token.len as usize];
conv.extend_token(&token.kind, token_text);
@@ -58,7 +60,7 @@
return None;
}
- let token = rustc_lexer::tokenize(text).next()?;
+ let token = rustc_lexer::tokenize(text, rustc_lexer::FrontmatterAllowed::No).next()?;
if token.len as usize != text.len() {
return None;
}
diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs
index 36a363a..ca02d9f 100644
--- a/crates/parser/src/parser.rs
+++ b/crates/parser/src/parser.rs
@@ -29,7 +29,7 @@
edition: Edition,
}
-const PARSER_STEP_LIMIT: usize = 15_000_000;
+const PARSER_STEP_LIMIT: usize = if cfg!(debug_assertions) { 150_000 } else { 15_000_000 };
impl<'t> Parser<'t> {
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
@@ -254,7 +254,10 @@
/// Create an error node and consume the next token.
pub(crate) fn err_and_bump(&mut self, message: &str) {
- self.err_recover(message, TokenSet::EMPTY);
+ let m = self.start();
+ self.error(message);
+ self.bump_any();
+ m.complete(self, ERROR);
}
/// Create an error node and consume the next token unless it is in the recovery set.
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs
index f534546..12a13ca 100644
--- a/crates/parser/src/syntax_kind/generated.rs
+++ b/crates/parser/src/syntax_kind/generated.rs
@@ -120,12 +120,14 @@
DYN_KW,
FORMAT_ARGS_KW,
GEN_KW,
+ GLOBAL_ASM_KW,
INLATEOUT_KW,
INOUT_KW,
LABEL_KW,
LATEOUT_KW,
MACRO_RULES_KW,
MAY_UNWIND_KW,
+ NAKED_ASM_KW,
NOMEM_KW,
NORETURN_KW,
NOSTACK_KW,
@@ -599,12 +601,14 @@
DEFAULT_KW => "default",
DYN_KW => "dyn",
FORMAT_ARGS_KW => "format_args",
+ GLOBAL_ASM_KW => "global_asm",
INLATEOUT_KW => "inlateout",
INOUT_KW => "inout",
LABEL_KW => "label",
LATEOUT_KW => "lateout",
MACRO_RULES_KW => "macro_rules",
MAY_UNWIND_KW => "may_unwind",
+ NAKED_ASM_KW => "naked_asm",
NOMEM_KW => "nomem",
NORETURN_KW => "noreturn",
NOSTACK_KW => "nostack",
@@ -699,12 +703,14 @@
DEFAULT_KW => true,
DYN_KW if edition < Edition::Edition2018 => true,
FORMAT_ARGS_KW => true,
+ GLOBAL_ASM_KW => true,
INLATEOUT_KW => true,
INOUT_KW => true,
LABEL_KW => true,
LATEOUT_KW => true,
MACRO_RULES_KW => true,
MAY_UNWIND_KW => true,
+ NAKED_ASM_KW => true,
NOMEM_KW => true,
NORETURN_KW => true,
NOSTACK_KW => true,
@@ -787,12 +793,14 @@
DEFAULT_KW => true,
DYN_KW if edition < Edition::Edition2018 => true,
FORMAT_ARGS_KW => true,
+ GLOBAL_ASM_KW => true,
INLATEOUT_KW => true,
INOUT_KW => true,
LABEL_KW => true,
LATEOUT_KW => true,
MACRO_RULES_KW => true,
MAY_UNWIND_KW => true,
+ NAKED_ASM_KW => true,
NOMEM_KW => true,
NORETURN_KW => true,
NOSTACK_KW => true,
@@ -938,12 +946,14 @@
"default" => DEFAULT_KW,
"dyn" if edition < Edition::Edition2018 => DYN_KW,
"format_args" => FORMAT_ARGS_KW,
+ "global_asm" => GLOBAL_ASM_KW,
"inlateout" => INLATEOUT_KW,
"inout" => INOUT_KW,
"label" => LABEL_KW,
"lateout" => LATEOUT_KW,
"macro_rules" => MACRO_RULES_KW,
"may_unwind" => MAY_UNWIND_KW,
+ "naked_asm" => NAKED_ASM_KW,
"nomem" => NOMEM_KW,
"noreturn" => NORETURN_KW,
"nostack" => NOSTACK_KW,
@@ -998,7 +1008,7 @@
}
}
#[macro_export]
-macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER } ; }
+macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [global_asm] => { $ crate :: SyntaxKind :: GLOBAL_ASM_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [naked_asm] => { $ crate :: SyntaxKind :: NAKED_ASM_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER } ; }
impl ::core::marker::Copy for SyntaxKind {}
impl ::core::clone::Clone for SyntaxKind {
#[inline]
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs
index 6ec4192..cef7b0e 100644
--- a/crates/parser/test_data/generated/runner.rs
+++ b/crates/parser/test_data/generated/runner.rs
@@ -21,6 +21,8 @@
#[test]
fn asm_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_expr.rs"); }
#[test]
+ fn asm_kinds() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_kinds.rs"); }
+ #[test]
fn asm_label() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_label.rs"); }
#[test]
fn assoc_const_eq() {
@@ -298,6 +300,8 @@
run_and_expect_no_errors("test_data/parser/inline/ok/generic_param_list.rs");
}
#[test]
+ fn global_asm() { run_and_expect_no_errors("test_data/parser/inline/ok/global_asm.rs"); }
+ #[test]
fn half_open_range_pat() {
run_and_expect_no_errors("test_data/parser/inline/ok/half_open_range_pat.rs");
}
diff --git a/crates/parser/test_data/parser/inline/ok/asm_kinds.rast b/crates/parser/test_data/parser/inline/ok/asm_kinds.rast
new file mode 100644
index 0000000..c337d89
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/asm_kinds.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ASM_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ ASM_KW "asm"
+ L_PAREN "("
+ LITERAL
+ STRING "\"\""
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ ASM_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ GLOBAL_ASM_KW "global_asm"
+ L_PAREN "("
+ LITERAL
+ STRING "\"\""
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ASM_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ NAKED_ASM_KW "naked_asm"
+ L_PAREN "("
+ LITERAL
+ STRING "\"\""
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/asm_kinds.rs b/crates/parser/test_data/parser/inline/ok/asm_kinds.rs
new file mode 100644
index 0000000..9c03e9d
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/asm_kinds.rs
@@ -0,0 +1,5 @@
+fn foo() {
+ builtin#asm("");
+ builtin#global_asm("");
+ builtin#naked_asm("");
+}
diff --git a/crates/parser/test_data/parser/inline/ok/global_asm.rast b/crates/parser/test_data/parser/inline/ok/global_asm.rast
new file mode 100644
index 0000000..5337c56
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/global_asm.rast
@@ -0,0 +1,10 @@
+SOURCE_FILE
+ ASM_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ GLOBAL_ASM_KW "global_asm"
+ L_PAREN "("
+ LITERAL
+ STRING "\"\""
+ R_PAREN ")"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/global_asm.rs b/crates/parser/test_data/parser/inline/ok/global_asm.rs
new file mode 100644
index 0000000..967ce1f
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/global_asm.rs
@@ -0,0 +1 @@
+builtin#global_asm("")
diff --git a/crates/proc-macro-srv/src/server_impl.rs b/crates/proc-macro-srv/src/server_impl.rs
index dd576f2..662f625 100644
--- a/crates/proc-macro-srv/src/server_impl.rs
+++ b/crates/proc-macro-srv/src/server_impl.rs
@@ -121,7 +121,7 @@
use proc_macro::bridge::LitKind;
use rustc_lexer::{LiteralKind, Token, TokenKind};
- let mut tokens = rustc_lexer::tokenize(s);
+ let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
let lit = if minus_or_lit.kind == TokenKind::Minus {
diff --git a/crates/project-model/src/build_dependencies.rs b/crates/project-model/src/build_dependencies.rs
index bbaa8f4..499caa6 100644
--- a/crates/project-model/src/build_dependencies.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -312,7 +312,9 @@
match message {
Message::BuildScriptExecuted(mut message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("running build-script: {name}"));
+ progress(format!(
+ "building compile-time-deps: build script {name} run"
+ ));
let cfgs = {
let mut acc = Vec::new();
for cfg in &message.cfgs {
@@ -343,7 +345,9 @@
}
Message::CompilerArtifact(message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("building proc-macros: {name}"));
+ progress(format!(
+ "building compile-time-deps: proc-macro {name} built"
+ ));
if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
{
// Skip rmeta file
@@ -409,13 +413,6 @@
cmd.arg("--target-dir").arg(target_dir);
}
- // --all-targets includes tests, benches and examples in addition to the
- // default lib and bins. This is an independent concept from the --target
- // flag below.
- if config.all_targets {
- cmd.arg("--all-targets");
- }
-
if let Some(target) = &config.target {
cmd.args(["--target", target]);
}
@@ -463,14 +460,26 @@
cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
cmd.arg("-Zunstable-options");
cmd.arg("--compile-time-deps");
- } else if config.wrap_rustc_in_build_scripts {
- // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
- // that to compile only proc macros and build scripts during the initial
- // `cargo check`.
- // We don't need this if we are using `--compile-time-deps` flag.
- let myself = std::env::current_exe()?;
- cmd.env("RUSTC_WRAPPER", myself);
- cmd.env("RA_RUSTC_WRAPPER", "1");
+ // we can pass this unconditionally, because we won't actually build the
+ // binaries, and as such, this will succeed even on targets without libtest
+ cmd.arg("--all-targets");
+ } else {
+ // --all-targets includes tests, benches and examples in addition to the
+ // default lib and bins. This is an independent concept from the --target
+ // flag below.
+ if config.all_targets {
+ cmd.arg("--all-targets");
+ }
+
+ if config.wrap_rustc_in_build_scripts {
+ // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
+ // that to compile only proc macros and build scripts during the initial
+ // `cargo check`.
+ // We don't need this if we are using `--compile-time-deps` flag.
+ let myself = std::env::current_exe()?;
+ cmd.env("RUSTC_WRAPPER", myself);
+ cmd.env("RA_RUSTC_WRAPPER", "1");
+ }
}
Ok(cmd)
}
diff --git a/crates/project-model/src/cargo_config_file.rs b/crates/project-model/src/cargo_config_file.rs
new file mode 100644
index 0000000..7966f74
--- /dev/null
+++ b/crates/project-model/src/cargo_config_file.rs
@@ -0,0 +1,34 @@
+//! Read `.cargo/config.toml` as a JSON object
+use rustc_hash::FxHashMap;
+use toolchain::Tool;
+
+use crate::{ManifestPath, Sysroot, utf8_stdout};
+
+pub(crate) type CargoConfigFile = serde_json::Map<String, serde_json::Value>;
+
+pub(crate) fn read(
+ manifest: &ManifestPath,
+ extra_env: &FxHashMap<String, Option<String>>,
+ sysroot: &Sysroot,
+) -> Option<CargoConfigFile> {
+ let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
+ cargo_config
+ .args(["-Z", "unstable-options", "config", "get", "--format", "json"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ if manifest.is_rust_manifest() {
+ cargo_config.arg("-Zscript");
+ }
+
+ tracing::debug!("Discovering cargo config by {:?}", cargo_config);
+ let json: serde_json::Map<String, serde_json::Value> = utf8_stdout(&mut cargo_config)
+ .inspect(|json| {
+ tracing::debug!("Discovered cargo config: {:?}", json);
+ })
+ .inspect_err(|err| {
+ tracing::debug!("Failed to discover cargo config: {:?}", err);
+ })
+ .ok()
+ .and_then(|stdout| serde_json::from_str(&stdout).ok())?;
+
+ Some(json)
+}
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 4bacc90..daadcd9 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -300,8 +300,6 @@
pub extra_args: Vec<String>,
/// Extra env vars to set when invoking the cargo command
pub extra_env: FxHashMap<String, Option<String>>,
- /// The target dir for this workspace load.
- pub target_dir: Utf8PathBuf,
/// What kind of metadata are we fetching: workspace, rustc, or sysroot.
pub kind: &'static str,
/// The toolchain version, if known.
@@ -317,188 +315,6 @@
}
impl CargoWorkspace {
- /// Fetches the metadata for the given `cargo_toml` manifest.
- /// A successful result may contain another metadata error if the initial fetching failed but
- /// the `--no-deps` retry succeeded.
- ///
- /// The sysroot is used to set the `RUSTUP_TOOLCHAIN` env var when invoking cargo
- /// to ensure that the rustup proxy uses the correct toolchain.
- pub fn fetch_metadata(
- cargo_toml: &ManifestPath,
- current_dir: &AbsPath,
- config: &CargoMetadataConfig,
- sysroot: &Sysroot,
- no_deps: bool,
- locked: bool,
- progress: &dyn Fn(String),
- ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
- let res = Self::fetch_metadata_(
- cargo_toml,
- current_dir,
- config,
- sysroot,
- no_deps,
- locked,
- progress,
- );
- if let Ok((_, Some(ref e))) = res {
- tracing::warn!(
- %cargo_toml,
- ?e,
- "`cargo metadata` failed, but retry with `--no-deps` succeeded"
- );
- }
- res
- }
-
- fn fetch_metadata_(
- cargo_toml: &ManifestPath,
- current_dir: &AbsPath,
- config: &CargoMetadataConfig,
- sysroot: &Sysroot,
- no_deps: bool,
- locked: bool,
- progress: &dyn Fn(String),
- ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
- let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
- let mut meta = MetadataCommand::new();
- meta.cargo_path(cargo.get_program());
- cargo.get_envs().for_each(|(var, val)| _ = meta.env(var, val.unwrap_or_default()));
- meta.manifest_path(cargo_toml.to_path_buf());
- match &config.features {
- CargoFeatures::All => {
- meta.features(CargoOpt::AllFeatures);
- }
- CargoFeatures::Selected { features, no_default_features } => {
- if *no_default_features {
- meta.features(CargoOpt::NoDefaultFeatures);
- }
- if !features.is_empty() {
- meta.features(CargoOpt::SomeFeatures(features.clone()));
- }
- }
- }
- meta.current_dir(current_dir);
-
- let mut other_options = vec![];
- // cargo metadata only supports a subset of flags of what cargo usually accepts, and usually
- // the only relevant flags for metadata here are unstable ones, so we pass those along
- // but nothing else
- let mut extra_args = config.extra_args.iter();
- while let Some(arg) = extra_args.next() {
- if arg == "-Z" {
- if let Some(arg) = extra_args.next() {
- other_options.push("-Z".to_owned());
- other_options.push(arg.to_owned());
- }
- }
- }
-
- if !config.targets.is_empty() {
- other_options.extend(
- config.targets.iter().flat_map(|it| ["--filter-platform".to_owned(), it.clone()]),
- );
- }
- if no_deps {
- other_options.push("--no-deps".to_owned());
- }
-
- let mut using_lockfile_copy = false;
- // The manifest is a rust file, so this means its a script manifest
- if cargo_toml.is_rust_manifest() {
- other_options.push("-Zscript".to_owned());
- } else if config
- .toolchain_version
- .as_ref()
- .is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH)
- {
- let lockfile = <_ as AsRef<Utf8Path>>::as_ref(cargo_toml).with_extension("lock");
- let target_lockfile = config
- .target_dir
- .join("rust-analyzer")
- .join("metadata")
- .join(config.kind)
- .join("Cargo.lock");
- match std::fs::copy(&lockfile, &target_lockfile) {
- Ok(_) => {
- using_lockfile_copy = true;
- other_options.push("--lockfile-path".to_owned());
- other_options.push(target_lockfile.to_string());
- }
- Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
- // There exists no lockfile yet
- using_lockfile_copy = true;
- other_options.push("--lockfile-path".to_owned());
- other_options.push(target_lockfile.to_string());
- }
- Err(e) => {
- tracing::warn!(
- "Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}",
- );
- }
- }
- }
- if using_lockfile_copy {
- other_options.push("-Zunstable-options".to_owned());
- meta.env("RUSTC_BOOTSTRAP", "1");
- }
- // No need to lock it if we copied the lockfile, we won't modify the original after all/
- // This way cargo cannot error out on us if the lockfile requires updating.
- if !using_lockfile_copy && locked {
- other_options.push("--locked".to_owned());
- }
- meta.other_options(other_options);
-
- // FIXME: Fetching metadata is a slow process, as it might require
- // calling crates.io. We should be reporting progress here, but it's
- // unclear whether cargo itself supports it.
- progress("cargo metadata: started".to_owned());
-
- let res = (|| -> anyhow::Result<(_, _)> {
- let mut errored = false;
- let output =
- spawn_with_streaming_output(meta.cargo_command(), &mut |_| (), &mut |line| {
- errored = errored || line.starts_with("error") || line.starts_with("warning");
- if errored {
- progress("cargo metadata: ?".to_owned());
- return;
- }
- progress(format!("cargo metadata: {line}"));
- })?;
- if !output.status.success() {
- progress(format!("cargo metadata: failed {}", output.status));
- let error = cargo_metadata::Error::CargoMetadata {
- stderr: String::from_utf8(output.stderr)?,
- }
- .into();
- if !no_deps {
- // If we failed to fetch metadata with deps, try again without them.
- // This makes r-a still work partially when offline.
- if let Ok((metadata, _)) = Self::fetch_metadata_(
- cargo_toml,
- current_dir,
- config,
- sysroot,
- true,
- locked,
- progress,
- ) {
- return Ok((metadata, Some(error)));
- }
- }
- return Err(error);
- }
- let stdout = from_utf8(&output.stdout)?
- .lines()
- .find(|line| line.starts_with('{'))
- .ok_or(cargo_metadata::Error::NoJson)?;
- Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None))
- })()
- .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()));
- progress("cargo metadata: finished".to_owned());
- res
- }
-
pub fn new(
mut meta: cargo_metadata::Metadata,
ws_manifest_path: ManifestPath,
@@ -733,3 +549,214 @@
self.requires_rustc_private
}
}
+
+pub(crate) struct FetchMetadata {
+ command: cargo_metadata::MetadataCommand,
+ lockfile_path: Option<Utf8PathBuf>,
+ kind: &'static str,
+ no_deps: bool,
+ no_deps_result: anyhow::Result<cargo_metadata::Metadata>,
+ other_options: Vec<String>,
+}
+
+impl FetchMetadata {
+ /// Builds a command to fetch metadata for the given `cargo_toml` manifest.
+ ///
+ /// Performs a lightweight pre-fetch using the `--no-deps` option,
+ /// available via [`FetchMetadata::no_deps_metadata`], to gather basic
+ /// information such as the `target-dir`.
+ ///
+ /// The provided sysroot is used to set the `RUSTUP_TOOLCHAIN`
+ /// environment variable when invoking Cargo, ensuring that the
+ /// rustup proxy selects the correct toolchain.
+ pub(crate) fn new(
+ cargo_toml: &ManifestPath,
+ current_dir: &AbsPath,
+ config: &CargoMetadataConfig,
+ sysroot: &Sysroot,
+ no_deps: bool,
+ ) -> Self {
+ let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
+ let mut command = MetadataCommand::new();
+ command.cargo_path(cargo.get_program());
+ cargo.get_envs().for_each(|(var, val)| _ = command.env(var, val.unwrap_or_default()));
+ command.manifest_path(cargo_toml.to_path_buf());
+ match &config.features {
+ CargoFeatures::All => {
+ command.features(CargoOpt::AllFeatures);
+ }
+ CargoFeatures::Selected { features, no_default_features } => {
+ if *no_default_features {
+ command.features(CargoOpt::NoDefaultFeatures);
+ }
+ if !features.is_empty() {
+ command.features(CargoOpt::SomeFeatures(features.clone()));
+ }
+ }
+ }
+ command.current_dir(current_dir);
+
+ let mut needs_nightly = false;
+ let mut other_options = vec![];
+ // cargo metadata only supports a subset of flags of what cargo usually accepts, and usually
+ // the only relevant flags for metadata here are unstable ones, so we pass those along
+ // but nothing else
+ let mut extra_args = config.extra_args.iter();
+ while let Some(arg) = extra_args.next() {
+ if arg == "-Z" {
+ if let Some(arg) = extra_args.next() {
+ needs_nightly = true;
+ other_options.push("-Z".to_owned());
+ other_options.push(arg.to_owned());
+ }
+ }
+ }
+
+ let mut lockfile_path = None;
+ if cargo_toml.is_rust_manifest() {
+ needs_nightly = true;
+ other_options.push("-Zscript".to_owned());
+ } else if config
+ .toolchain_version
+ .as_ref()
+ .is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH)
+ {
+ lockfile_path = Some(<_ as AsRef<Utf8Path>>::as_ref(cargo_toml).with_extension("lock"));
+ }
+
+ if !config.targets.is_empty() {
+ other_options.extend(
+ config.targets.iter().flat_map(|it| ["--filter-platform".to_owned(), it.clone()]),
+ );
+ }
+
+ command.other_options(other_options.clone());
+
+ if needs_nightly {
+ command.env("RUSTC_BOOTSTRAP", "1");
+ }
+
+ // Pre-fetch basic metadata using `--no-deps`, which:
+ // - avoids fetching registries like crates.io,
+ // - skips dependency resolution and does not modify lockfiles,
+ // - and thus doesn't require progress reporting or copying lockfiles.
+ //
+ // Useful as a fast fallback to extract info like `target-dir`.
+ let cargo_command;
+ let no_deps_result = if no_deps {
+ command.no_deps();
+ cargo_command = command.cargo_command();
+ command.exec()
+ } else {
+ let mut no_deps_command = command.clone();
+ no_deps_command.no_deps();
+ cargo_command = no_deps_command.cargo_command();
+ no_deps_command.exec()
+ }
+ .with_context(|| format!("Failed to run `{cargo_command:?}`"));
+
+ Self { command, lockfile_path, kind: config.kind, no_deps, no_deps_result, other_options }
+ }
+
+ pub(crate) fn no_deps_metadata(&self) -> Option<&cargo_metadata::Metadata> {
+ self.no_deps_result.as_ref().ok()
+ }
+
+ /// Executes the metadata-fetching command.
+ ///
+ /// A successful result may still contain a metadata error if the full fetch failed,
+ /// but the fallback `--no-deps` pre-fetch succeeded during command construction.
+ pub(crate) fn exec(
+ self,
+ target_dir: &Utf8Path,
+ locked: bool,
+ progress: &dyn Fn(String),
+ ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
+ let Self { mut command, lockfile_path, kind, no_deps, no_deps_result, mut other_options } =
+ self;
+
+ if no_deps {
+ return no_deps_result.map(|m| (m, None));
+ }
+
+ let mut using_lockfile_copy = false;
+ // The manifest is a rust file, so this means its a script manifest
+ if let Some(lockfile) = lockfile_path {
+ let target_lockfile =
+ target_dir.join("rust-analyzer").join("metadata").join(kind).join("Cargo.lock");
+ match std::fs::copy(&lockfile, &target_lockfile) {
+ Ok(_) => {
+ using_lockfile_copy = true;
+ other_options.push("--lockfile-path".to_owned());
+ other_options.push(target_lockfile.to_string());
+ }
+ Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
+ // There exists no lockfile yet
+ using_lockfile_copy = true;
+ other_options.push("--lockfile-path".to_owned());
+ other_options.push(target_lockfile.to_string());
+ }
+ Err(e) => {
+ tracing::warn!(
+ "Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}",
+ );
+ }
+ }
+ }
+ if using_lockfile_copy {
+ other_options.push("-Zunstable-options".to_owned());
+ command.env("RUSTC_BOOTSTRAP", "1");
+ }
+ // No need to lock it if we copied the lockfile, we won't modify the original after all/
+ // This way cargo cannot error out on us if the lockfile requires updating.
+ if !using_lockfile_copy && locked {
+ other_options.push("--locked".to_owned());
+ }
+ command.other_options(other_options);
+
+ // FIXME: Fetching metadata is a slow process, as it might require
+ // calling crates.io. We should be reporting progress here, but it's
+ // unclear whether cargo itself supports it.
+ progress("cargo metadata: started".to_owned());
+
+ let res = (|| -> anyhow::Result<(_, _)> {
+ let mut errored = false;
+ let output =
+ spawn_with_streaming_output(command.cargo_command(), &mut |_| (), &mut |line| {
+ errored = errored || line.starts_with("error") || line.starts_with("warning");
+ if errored {
+ progress("cargo metadata: ?".to_owned());
+ return;
+ }
+ progress(format!("cargo metadata: {line}"));
+ })?;
+ if !output.status.success() {
+ progress(format!("cargo metadata: failed {}", output.status));
+ let error = cargo_metadata::Error::CargoMetadata {
+ stderr: String::from_utf8(output.stderr)?,
+ }
+ .into();
+ if !no_deps {
+ // If we failed to fetch metadata with deps, return pre-fetched result without them.
+ // This makes r-a still work partially when offline.
+ if let Ok(metadata) = no_deps_result {
+ tracing::warn!(
+ ?error,
+ "`cargo metadata` failed and returning succeeded result with `--no-deps`"
+ );
+ return Ok((metadata, Some(error)));
+ }
+ }
+ return Err(error);
+ }
+ let stdout = from_utf8(&output.stdout)?
+ .lines()
+ .find(|line| line.starts_with('{'))
+ .ok_or(cargo_metadata::Error::NoJson)?;
+ Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None))
+ })()
+ .with_context(|| format!("Failed to run `{:?}`", command.cargo_command()));
+ progress("cargo metadata: finished".to_owned());
+ res
+ }
+}
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs
index 9e0415c..d281492 100644
--- a/crates/project-model/src/env.rs
+++ b/crates/project-model/src/env.rs
@@ -1,10 +1,9 @@
//! Cargo-like environment variables injection.
use base_db::Env;
-use paths::{Utf8Path, Utf8PathBuf};
-use rustc_hash::FxHashMap;
+use paths::Utf8Path;
use toolchain::Tool;
-use crate::{ManifestPath, PackageData, Sysroot, TargetKind, utf8_stdout};
+use crate::{ManifestPath, PackageData, TargetKind, cargo_config_file::CargoConfigFile};
/// Recreates the compile-time environment variables that Cargo sets.
///
@@ -61,104 +60,68 @@
env.set("CARGO_CRATE_NAME", cargo_name.replace('-', "_"));
}
-pub(crate) fn cargo_config_env(
- manifest: &ManifestPath,
- extra_env: &FxHashMap<String, Option<String>>,
- sysroot: &Sysroot,
-) -> Env {
- let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
- cargo_config
- .args(["-Z", "unstable-options", "config", "get", "env"])
- .env("RUSTC_BOOTSTRAP", "1");
- if manifest.is_rust_manifest() {
- cargo_config.arg("-Zscript");
- }
- // if successful we receive `env.key.value = "value" per entry
- tracing::debug!("Discovering cargo config env by {:?}", cargo_config);
- utf8_stdout(&mut cargo_config)
- .map(|stdout| parse_output_cargo_config_env(manifest, &stdout))
- .inspect(|env| {
- tracing::debug!("Discovered cargo config env: {:?}", env);
- })
- .inspect_err(|err| {
- tracing::debug!("Failed to discover cargo config env: {:?}", err);
- })
- .unwrap_or_default()
-}
-
-fn parse_output_cargo_config_env(manifest: &ManifestPath, stdout: &str) -> Env {
+pub(crate) fn cargo_config_env(manifest: &ManifestPath, config: &Option<CargoConfigFile>) -> Env {
let mut env = Env::default();
- let mut relatives = vec![];
- for (key, val) in
- stdout.lines().filter_map(|l| l.strip_prefix("env.")).filter_map(|l| l.split_once(" = "))
- {
- let val = val.trim_matches('"').to_owned();
- if let Some((key, modifier)) = key.split_once('.') {
- match modifier {
- "relative" => relatives.push((key, val)),
- "value" => _ = env.insert(key, val),
- _ => {
- tracing::warn!(
- "Unknown modifier in cargo config env: {}, expected `relative` or `value`",
- modifier
- );
- continue;
- }
- }
- } else {
- env.insert(key, val);
- }
- }
+ let Some(serde_json::Value::Object(env_json)) = config.as_ref().and_then(|c| c.get("env"))
+ else {
+ return env;
+ };
+
// FIXME: The base here should be the parent of the `.cargo/config` file, not the manifest.
// But cargo does not provide this information.
let base = <_ as AsRef<Utf8Path>>::as_ref(manifest.parent());
- for (key, relative) in relatives {
- if relative != "true" {
- continue;
- }
- if let Some(suffix) = env.get(key) {
- env.insert(key, base.join(suffix).to_string());
- }
- }
- env
-}
-pub(crate) fn cargo_config_build_target_dir(
- manifest: &ManifestPath,
- extra_env: &FxHashMap<String, Option<String>>,
- sysroot: &Sysroot,
-) -> Option<Utf8PathBuf> {
- let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
- cargo_config
- .args(["-Z", "unstable-options", "config", "get", "build.target-dir"])
- .env("RUSTC_BOOTSTRAP", "1");
- if manifest.is_rust_manifest() {
- cargo_config.arg("-Zscript");
+ for (key, entry) in env_json {
+ let serde_json::Value::Object(entry) = entry else {
+ continue;
+ };
+ let Some(value) = entry.get("value").and_then(|v| v.as_str()) else {
+ continue;
+ };
+
+ let value = if entry
+ .get("relative")
+ .and_then(|v| v.as_bool())
+ .is_some_and(std::convert::identity)
+ {
+ base.join(value).to_string()
+ } else {
+ value.to_owned()
+ };
+ env.insert(key, value);
}
- utf8_stdout(&mut cargo_config)
- .map(|stdout| {
- Utf8Path::new(stdout.trim_start_matches("build.target-dir = ").trim_matches('"'))
- .to_owned()
- })
- .ok()
+
+ env
}
#[test]
fn parse_output_cargo_config_env_works() {
- let stdout = r#"
-env.CARGO_WORKSPACE_DIR.relative = true
-env.CARGO_WORKSPACE_DIR.value = ""
-env.RELATIVE.relative = true
-env.RELATIVE.value = "../relative"
-env.INVALID.relative = invalidbool
-env.INVALID.value = "../relative"
-env.TEST.value = "test"
-"#
- .trim();
+ let raw = r#"
+{
+ "env": {
+ "CARGO_WORKSPACE_DIR": {
+ "relative": true,
+ "value": ""
+ },
+ "INVALID": {
+ "relative": "invalidbool",
+ "value": "../relative"
+ },
+ "RELATIVE": {
+ "relative": true,
+ "value": "../relative"
+ },
+ "TEST": {
+ "value": "test"
+ }
+ }
+}
+"#;
+ let config: CargoConfigFile = serde_json::from_str(raw).unwrap();
let cwd = paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap();
let manifest = paths::AbsPathBuf::assert(cwd.join("Cargo.toml"));
let manifest = ManifestPath::try_from(manifest).unwrap();
- let env = parse_output_cargo_config_env(&manifest, stdout);
+ let env = cargo_config_env(&manifest, &Some(config));
assert_eq!(env.get("CARGO_WORKSPACE_DIR").as_deref(), Some(cwd.join("").as_str()));
assert_eq!(env.get("RELATIVE").as_deref(), Some(cwd.join("../relative").as_str()));
assert_eq!(env.get("INVALID").as_deref(), Some("../relative"));
diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs
index 436af64..3bf3d06 100644
--- a/crates/project-model/src/lib.rs
+++ b/crates/project-model/src/lib.rs
@@ -24,7 +24,7 @@
use std::path::Path;
- use crate::{ManifestPath, Sysroot};
+ use crate::{ManifestPath, Sysroot, cargo_config_file::CargoConfigFile};
#[derive(Copy, Clone)]
pub enum QueryConfig<'a> {
@@ -32,11 +32,12 @@
Rustc(&'a Sysroot, &'a Path),
/// Attempt to use cargo to query the desired information, honoring cargo configurations.
/// If this fails, falls back to invoking `rustc` directly.
- Cargo(&'a Sysroot, &'a ManifestPath),
+ Cargo(&'a Sysroot, &'a ManifestPath, &'a Option<CargoConfigFile>),
}
}
mod build_dependencies;
+mod cargo_config_file;
mod cargo_workspace;
mod env;
mod manifest_path;
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 4b34fc0..9781c46 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -9,14 +9,15 @@
use anyhow::{Result, format_err};
use itertools::Itertools;
-use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf};
use rustc_hash::FxHashMap;
use stdx::format_to;
use toolchain::{Tool, probe_for_binary};
use crate::{
CargoWorkspace, ManifestPath, ProjectJson, RustSourceWorkspaceConfig,
- cargo_workspace::CargoMetadataConfig, utf8_stdout,
+ cargo_workspace::{CargoMetadataConfig, FetchMetadata},
+ utf8_stdout,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -163,18 +164,18 @@
}
}
- pub fn discover_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
- let Some(root) = self.root() else {
- return Err(anyhow::format_err!("no sysroot",));
- };
- ["libexec", "lib"]
- .into_iter()
- .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
- .find_map(|server_path| probe_for_binary(server_path.into()))
- .map(AbsPathBuf::assert)
- .ok_or_else(|| {
- anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
- })
+ pub fn discover_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
+ let root = self.root()?;
+ Some(
+ ["libexec", "lib"]
+ .into_iter()
+ .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
+ .find_map(|server_path| probe_for_binary(server_path.into()))
+ .map(AbsPathBuf::assert)
+ .ok_or_else(|| {
+ anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
+ }),
+ )
}
fn assemble(
@@ -209,7 +210,9 @@
pub fn load_workspace(
&self,
sysroot_source_config: &RustSourceWorkspaceConfig,
+ no_deps: bool,
current_dir: &AbsPath,
+ target_dir: &Utf8Path,
progress: &dyn Fn(String),
) -> Option<RustLibSrcWorkspace> {
assert!(matches!(self.workspace, RustLibSrcWorkspace::Empty), "workspace already loaded");
@@ -223,7 +226,9 @@
match self.load_library_via_cargo(
&library_manifest,
current_dir,
+ target_dir,
cargo_config,
+ no_deps,
progress,
) {
Ok(loaded) => return Some(loaded),
@@ -317,7 +322,9 @@
&self,
library_manifest: &ManifestPath,
current_dir: &AbsPath,
+ target_dir: &Utf8Path,
cargo_config: &CargoMetadataConfig,
+ no_deps: bool,
progress: &dyn Fn(String),
) -> Result<RustLibSrcWorkspace> {
tracing::debug!("Loading library metadata: {library_manifest}");
@@ -328,16 +335,11 @@
Some("nightly".to_owned()),
);
- let (mut res, _) = CargoWorkspace::fetch_metadata(
- library_manifest,
- current_dir,
- &cargo_config,
- self,
- false,
- // Make sure we never attempt to write to the sysroot
- true,
- progress,
- )?;
+ // Make sure we never attempt to write to the sysroot
+ let locked = true;
+ let (mut res, _) =
+ FetchMetadata::new(library_manifest, current_dir, &cargo_config, self, no_deps)
+ .exec(target_dir, locked, progress)?;
// Patch out `rustc-std-workspace-*` crates to point to the real crates.
// This is done prior to `CrateGraph` construction to prevent de-duplication logic from failing.
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index 4f11af2..ed72520 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -239,8 +239,13 @@
);
let cwd = AbsPathBuf::assert_utf8(temp_dir().join("smoke_test_real_sysroot_cargo"));
std::fs::create_dir_all(&cwd).unwrap();
- let loaded_sysroot =
- sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), &cwd, &|_| ());
+ let loaded_sysroot = sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::default_cargo(),
+ false,
+ &cwd,
+ &Utf8PathBuf::default(),
+ &|_| (),
+ );
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
diff --git a/crates/project-model/src/toolchain_info/rustc_cfg.rs b/crates/project-model/src/toolchain_info/rustc_cfg.rs
index a77f767..6e06e88 100644
--- a/crates/project-model/src/toolchain_info/rustc_cfg.rs
+++ b/crates/project-model/src/toolchain_info/rustc_cfg.rs
@@ -63,7 +63,7 @@
) -> anyhow::Result<String> {
const RUSTC_ARGS: [&str; 2] = ["--print", "cfg"];
let (sysroot, current_dir) = match config {
- QueryConfig::Cargo(sysroot, cargo_toml) => {
+ QueryConfig::Cargo(sysroot, cargo_toml, _) => {
let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS);
if let Some(target) = target {
@@ -109,7 +109,7 @@
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
- let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
+ let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert_ne!(get(cfg, None, &FxHashMap::default()), vec![]);
}
diff --git a/crates/project-model/src/toolchain_info/target_data_layout.rs b/crates/project-model/src/toolchain_info/target_data_layout.rs
index a4d0ec6..a28f468 100644
--- a/crates/project-model/src/toolchain_info/target_data_layout.rs
+++ b/crates/project-model/src/toolchain_info/target_data_layout.rs
@@ -20,7 +20,7 @@
})
};
let (sysroot, current_dir) = match config {
- QueryConfig::Cargo(sysroot, cargo_toml) => {
+ QueryConfig::Cargo(sysroot, cargo_toml, _) => {
let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
cmd.env("RUSTC_BOOTSTRAP", "1");
cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS).args([
@@ -66,7 +66,7 @@
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
- let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
+ let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert!(get(cfg, None, &FxHashMap::default()).is_ok());
}
diff --git a/crates/project-model/src/toolchain_info/target_tuple.rs b/crates/project-model/src/toolchain_info/target_tuple.rs
index f6ab853..9f12ede 100644
--- a/crates/project-model/src/toolchain_info/target_tuple.rs
+++ b/crates/project-model/src/toolchain_info/target_tuple.rs
@@ -5,7 +5,9 @@
use rustc_hash::FxHashMap;
use toolchain::Tool;
-use crate::{ManifestPath, Sysroot, toolchain_info::QueryConfig, utf8_stdout};
+use crate::{
+ Sysroot, cargo_config_file::CargoConfigFile, toolchain_info::QueryConfig, utf8_stdout,
+};
/// For cargo, runs `cargo -Zunstable-options config get build.target` to get the configured project target(s).
/// For rustc, runs `rustc --print -vV` to get the host target.
@@ -20,8 +22,8 @@
}
let (sysroot, current_dir) = match config {
- QueryConfig::Cargo(sysroot, cargo_toml) => {
- match cargo_config_build_target(cargo_toml, extra_env, sysroot) {
+ QueryConfig::Cargo(sysroot, cargo_toml, config_file) => {
+ match config_file.as_ref().and_then(cargo_config_build_target) {
Some(it) => return Ok(it),
None => (sysroot, cargo_toml.parent().as_ref()),
}
@@ -50,30 +52,30 @@
}
}
-fn cargo_config_build_target(
- cargo_toml: &ManifestPath,
- extra_env: &FxHashMap<String, Option<String>>,
- sysroot: &Sysroot,
-) -> Option<Vec<String>> {
- let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
- cmd.current_dir(cargo_toml.parent()).env("RUSTC_BOOTSTRAP", "1");
- cmd.args(["-Z", "unstable-options", "config", "get", "build.target"]);
- // if successful we receive `build.target = "target-tuple"`
- // or `build.target = ["<target 1>", ..]`
- // this might be `error: config value `build.target` is not set` in which case we
- // don't wanna log the error
- utf8_stdout(&mut cmd).and_then(parse_output_cargo_config_build_target).ok()
+fn cargo_config_build_target(config: &CargoConfigFile) -> Option<Vec<String>> {
+ match parse_json_cargo_config_build_target(config) {
+ Ok(v) => v,
+ Err(e) => {
+ tracing::debug!("Failed to discover cargo config build target {e:?}");
+ None
+ }
+ }
}
// Parses `"build.target = [target-tuple, target-tuple, ...]"` or `"build.target = "target-tuple"`
-fn parse_output_cargo_config_build_target(stdout: String) -> anyhow::Result<Vec<String>> {
- let trimmed = stdout.trim_start_matches("build.target = ").trim_matches('"');
-
- if !trimmed.starts_with('[') {
- return Ok([trimmed.to_owned()].to_vec());
+fn parse_json_cargo_config_build_target(
+ config: &CargoConfigFile,
+) -> anyhow::Result<Option<Vec<String>>> {
+ let target = config.get("build").and_then(|v| v.as_object()).and_then(|m| m.get("target"));
+ match target {
+ Some(serde_json::Value::String(s)) => Ok(Some(vec![s.to_owned()])),
+ Some(v) => serde_json::from_value(v.clone())
+ .map(Option::Some)
+ .context("Failed to parse `build.target` as an array of target"),
+ // t`error: config value `build.target` is not set`, in which case we
+ // don't wanna log the error
+ None => Ok(None),
}
-
- serde_json::from_str(trimmed).context("Failed to parse `build.target` as an array of target")
}
#[cfg(test)]
@@ -90,7 +92,7 @@
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
- let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
+ let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert!(get(cfg, None, &FxHashMap::default()).is_ok());
}
diff --git a/crates/project-model/src/toolchain_info/version.rs b/crates/project-model/src/toolchain_info/version.rs
index 91ba859..357053d 100644
--- a/crates/project-model/src/toolchain_info/version.rs
+++ b/crates/project-model/src/toolchain_info/version.rs
@@ -12,7 +12,7 @@
extra_env: &FxHashMap<String, Option<String>>,
) -> Result<Option<Version>, anyhow::Error> {
let (mut cmd, prefix) = match config {
- QueryConfig::Cargo(sysroot, cargo_toml) => {
+ QueryConfig::Cargo(sysroot, cargo_toml, _) => {
(sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env), "cargo ")
}
QueryConfig::Rustc(sysroot, current_dir) => {
@@ -44,7 +44,7 @@
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
- let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
+ let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert!(get(cfg, &FxHashMap::default()).is_ok());
}
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index 5bc64df..677f29e 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -7,8 +7,8 @@
use anyhow::Context;
use base_db::{
CrateBuilderId, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin,
- CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroPaths,
- TargetLayoutLoadResult,
+ CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, TargetLayoutLoadResult,
};
use cfg::{CfgAtom, CfgDiff, CfgOptions};
use intern::{Symbol, sym};
@@ -25,11 +25,9 @@
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
WorkspaceBuildScripts,
build_dependencies::BuildScriptOutput,
- cargo_workspace::{CargoMetadataConfig, DepKind, PackageData, RustLibSource},
- env::{
- cargo_config_build_target_dir, cargo_config_env, inject_cargo_env,
- inject_cargo_package_env, inject_rustc_tool_env,
- },
+ cargo_config_file,
+ cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource},
+ env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
project_json::{Crate, CrateArrayIdx},
sysroot::RustLibSrcWorkspace,
toolchain_info::{QueryConfig, rustc_cfg, target_data_layout, target_tuple, version},
@@ -270,7 +268,9 @@
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
progress("querying project metadata".to_owned());
- let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml);
+ let config_file = cargo_config_file::read(cargo_toml, extra_env, &sysroot);
+ let config_file_ = config_file.clone();
+ let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml, &config_file_);
let targets =
target_tuple::get(toolchain_config, target.as_deref(), extra_env).unwrap_or_default();
let toolchain = version::get(toolchain_config, extra_env)
@@ -282,10 +282,24 @@
.ok()
.flatten();
+ let fetch_metadata = FetchMetadata::new(
+ cargo_toml,
+ workspace_dir,
+ &CargoMetadataConfig {
+ features: features.clone(),
+ targets: targets.clone(),
+ extra_args: extra_args.clone(),
+ extra_env: extra_env.clone(),
+ toolchain_version: toolchain.clone(),
+ kind: "workspace",
+ },
+ &sysroot,
+ *no_deps,
+ );
let target_dir = config
.target_dir
.clone()
- .or_else(|| cargo_config_build_target_dir(cargo_toml, extra_env, &sysroot))
+ .or_else(|| fetch_metadata.no_deps_metadata().map(|m| m.target_directory.clone()))
.unwrap_or_else(|| workspace_dir.join("target").into());
// We spawn a bunch of processes to query various information about the workspace's
@@ -319,7 +333,7 @@
};
rustc_dir.and_then(|rustc_dir| {
info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
- match CargoWorkspace::fetch_metadata(
+ match FetchMetadata::new(
&rustc_dir,
workspace_dir,
&CargoMetadataConfig {
@@ -327,15 +341,12 @@
targets: targets.clone(),
extra_args: extra_args.clone(),
extra_env: extra_env.clone(),
- target_dir: target_dir.clone(),
toolchain_version: toolchain.clone(),
kind: "rustc-dev"
},
&sysroot,
*no_deps,
- true,
- progress,
- ) {
+ ).exec(&target_dir, true, progress) {
Ok((meta, _error)) => {
let workspace = CargoWorkspace::new(
meta,
@@ -364,39 +375,22 @@
})
});
- let cargo_metadata = s.spawn(|| {
- CargoWorkspace::fetch_metadata(
- cargo_toml,
- workspace_dir,
- &CargoMetadataConfig {
- features: features.clone(),
- targets: targets.clone(),
- extra_args: extra_args.clone(),
- extra_env: extra_env.clone(),
- target_dir: target_dir.clone(),
- toolchain_version: toolchain.clone(),
- kind: "workspace",
- },
- &sysroot,
- *no_deps,
- false,
- progress,
- )
- });
+ let cargo_metadata = s.spawn(|| fetch_metadata.exec(&target_dir, false, progress));
let loaded_sysroot = s.spawn(|| {
sysroot.load_workspace(
&RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
config,
&targets,
toolchain.clone(),
- target_dir.clone(),
)),
+ config.no_deps,
workspace_dir,
+ &target_dir,
progress,
)
});
let cargo_config_extra_env =
- s.spawn(|| cargo_config_env(cargo_toml, extra_env, &sysroot));
+ s.spawn(move || cargo_config_env(cargo_toml, &config_file));
thread::Result::Ok((
rustc_cfg.join()?,
data_layout.join()?,
@@ -475,9 +469,7 @@
let target_dir = config
.target_dir
.clone()
- .or_else(|| {
- cargo_config_build_target_dir(project_json.manifest()?, &config.extra_env, &sysroot)
- })
+ .or_else(|| cargo_target_dir(project_json.manifest()?, &config.extra_env, &sysroot))
.unwrap_or_else(|| project_root.join("target").into());
// We spawn a bunch of processes to query various information about the workspace's
@@ -499,7 +491,9 @@
if let Some(sysroot_project) = sysroot_project {
sysroot.load_workspace(
&RustSourceWorkspaceConfig::Json(*sysroot_project),
+ config.no_deps,
project_root,
+ &target_dir,
progress,
)
} else {
@@ -508,9 +502,10 @@
config,
&targets,
toolchain.clone(),
- target_dir,
)),
+ config.no_deps,
project_root,
+ &target_dir,
progress,
)
}
@@ -551,7 +546,8 @@
None => Sysroot::empty(),
};
- let query_config = QueryConfig::Cargo(&sysroot, detached_file);
+ let config_file = cargo_config_file::read(detached_file, &config.extra_env, &sysroot);
+ let query_config = QueryConfig::Cargo(&sysroot, detached_file, &config_file);
let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env)
.unwrap_or_default();
@@ -560,7 +556,7 @@
let target_dir = config
.target_dir
.clone()
- .or_else(|| cargo_config_build_target_dir(detached_file, &config.extra_env, &sysroot))
+ .or_else(|| cargo_target_dir(detached_file, &config.extra_env, &sysroot))
.unwrap_or_else(|| dir.join("target").into());
let loaded_sysroot = sysroot.load_workspace(
@@ -568,16 +564,17 @@
config,
&targets,
toolchain.clone(),
- target_dir.clone(),
)),
+ config.no_deps,
dir,
+ &target_dir,
&|_| (),
);
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
- let cargo_script = CargoWorkspace::fetch_metadata(
+ let fetch_metadata = FetchMetadata::new(
detached_file,
dir,
&CargoMetadataConfig {
@@ -585,25 +582,26 @@
targets,
extra_args: config.extra_args.clone(),
extra_env: config.extra_env.clone(),
- target_dir,
toolchain_version: toolchain.clone(),
kind: "detached-file",
},
&sysroot,
config.no_deps,
- false,
- &|_| (),
- )
- .ok()
- .map(|(ws, error)| {
- let cargo_config_extra_env =
- cargo_config_env(detached_file, &config.extra_env, &sysroot);
- (
- CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false),
- WorkspaceBuildScripts::default(),
- error.map(Arc::new),
- )
- });
+ );
+ let target_dir = config
+ .target_dir
+ .clone()
+ .or_else(|| fetch_metadata.no_deps_metadata().map(|m| m.target_directory.clone()))
+ .unwrap_or_else(|| dir.join("target").into());
+ let cargo_script =
+ fetch_metadata.exec(&target_dir, false, &|_| ()).ok().map(|(ws, error)| {
+ let cargo_config_extra_env = cargo_config_env(detached_file, &config_file);
+ (
+ CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false),
+ WorkspaceBuildScripts::default(),
+ error.map(Arc::new),
+ )
+ });
Ok(ProjectWorkspace {
kind: ProjectWorkspaceKind::DetachedFile {
@@ -744,7 +742,7 @@
}
}
- pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
+ pub fn find_sysroot_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
self.sysroot.discover_proc_macro_srv()
}
@@ -1641,11 +1639,11 @@
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
match proc_macro_dylib_path {
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
- None if has_errors => Err("failed to build proc-macro".to_owned()),
- None => Err("proc-macro crate build data is missing dylib path".to_owned()),
+ None if has_errors => Err(ProcMacroLoadingError::FailedToBuild),
+ None => Err(ProcMacroLoadingError::MissingDylibPath),
}
}
- None => Err("build scripts have not been built".to_owned()),
+ None => Err(ProcMacroLoadingError::NotYetBuilt),
};
proc_macros.insert(crate_id, proc_macro);
}
@@ -1885,15 +1883,33 @@
config: &CargoConfig,
targets: &[String],
toolchain_version: Option<Version>,
- target_dir: Utf8PathBuf,
) -> CargoMetadataConfig {
CargoMetadataConfig {
features: Default::default(),
targets: targets.to_vec(),
extra_args: Default::default(),
extra_env: config.extra_env.clone(),
- target_dir,
toolchain_version,
kind: "sysroot",
}
}
+
+fn cargo_target_dir(
+ manifest: &ManifestPath,
+ extra_env: &FxHashMap<String, Option<String>>,
+ sysroot: &Sysroot,
+) -> Option<Utf8PathBuf> {
+ let cargo = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
+ let mut meta = cargo_metadata::MetadataCommand::new();
+ meta.cargo_path(cargo.get_program());
+ meta.manifest_path(manifest);
+ // `--no-deps` doesn't (over)write lockfiles as it doesn't do any package resolve.
+ // So we can use it to get `target_directory` before copying lockfiles
+ let mut other_options = vec!["--no-deps".to_owned()];
+ if manifest.is_rust_manifest() {
+ meta.env("RUSTC_BOOTSTRAP", "1");
+ other_options.push("-Zscript".to_owned());
+ }
+ meta.other_options(other_options);
+ meta.exec().map(|m| m.target_directory).ok()
+}
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 0ee0198..fc89f48 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -796,7 +796,7 @@
// region:expressions
let (previous_exprs, previous_unknown, previous_partially_unknown) =
(num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
- for (expr_id, _) in body.exprs.iter() {
+ for (expr_id, _) in body.exprs() {
let ty = &inference_result[expr_id];
num_exprs += 1;
let unknown_or_partial = if ty.is_unknown() {
@@ -901,7 +901,7 @@
// region:patterns
let (previous_pats, previous_unknown, previous_partially_unknown) =
(num_pats, num_pats_unknown, num_pats_partially_unknown);
- for (pat_id, _) in body.pats.iter() {
+ for (pat_id, _) in body.pats() {
let ty = &inference_result[pat_id];
num_pats += 1;
let unknown_or_partial = if ty.is_unknown() {
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index 740fcd8..30ac93f 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -9,6 +9,7 @@
use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
use ide_db::base_db;
use itertools::Either;
+use paths::Utf8PathBuf;
use profile::StopWatch;
use project_model::toolchain_info::{QueryConfig, target_data_layout};
use project_model::{
@@ -75,8 +76,13 @@
};
let mut sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env);
- let loaded_sysroot =
- sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), &path, &|_| ());
+ let loaded_sysroot = sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::default_cargo(),
+ false,
+ &path,
+ &Utf8PathBuf::default(),
+ &|_| (),
+ );
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index d258c5d..37f83f6 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -25,7 +25,7 @@
eprintln!("Generating SCIP start...");
let now = Instant::now();
- let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
+ let no_progress = &|s| eprintln!("rust-analyzer: Loading {s}");
let root =
vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize();
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index e716d14..51d4c29 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -1526,7 +1526,7 @@
CompletionConfig {
enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
- && self.caps.completion_item_edit_resolve(),
+ && self.caps.has_completion_item_resolve_additionalTextEdits(),
enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
enable_auto_iter: *self.completion_autoIter_enable(source_root),
enable_auto_await: *self.completion_autoAwait_enable(source_root),
@@ -2355,10 +2355,6 @@
.and_then(|it| it.version.as_ref())
}
- pub fn client_is_helix(&self) -> bool {
- self.client_info.as_ref().map(|it| it.name == "helix").unwrap_or_default()
- }
-
pub fn client_is_neovim(&self) -> bool {
self.client_info.as_ref().map(|it| it.name == "Neovim").unwrap_or_default()
}
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index 0e41824..91d37bd 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -6,6 +6,7 @@
use cargo_metadata::PackageId;
use crossbeam_channel::{Receiver, Sender, select_biased, unbounded};
use ide_db::FxHashSet;
+use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
use serde::Deserialize as _;
@@ -379,7 +380,11 @@
package_id = msg.package_id.repr,
"artifact received"
);
- self.report_progress(Progress::DidCheckCrate(msg.target.name));
+ self.report_progress(Progress::DidCheckCrate(format!(
+ "{} ({})",
+ msg.target.name,
+ msg.target.kind.iter().format_with(", ", |kind, f| f(&kind)),
+ )));
let package_id = Arc::new(msg.package_id);
if self.diagnostics_cleared_for.insert(package_id.clone()) {
tracing::trace!(
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index a870232..62a28a1 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -101,7 +101,7 @@
pub(crate) last_reported_status: lsp_ext::ServerStatusParams,
// proc macros
- pub(crate) proc_macro_clients: Arc<[anyhow::Result<ProcMacroClient>]>,
+ pub(crate) proc_macro_clients: Arc<[Option<anyhow::Result<ProcMacroClient>>]>,
pub(crate) build_deps_changed: bool,
// Flycheck
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index 40d0556..aea116e 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -6,7 +6,7 @@
use ide_db::base_db::{
DbPanicContext,
- salsa::{self, Cancelled, UnexpectedCycle},
+ salsa::{self, Cancelled},
};
use lsp_server::{ExtractError, Response, ResponseError};
use serde::{Serialize, de::DeserializeOwned};
@@ -350,9 +350,6 @@
if let Some(panic_message) = panic_message {
message.push_str(": ");
message.push_str(panic_message);
- } else if let Some(cycle) = panic.downcast_ref::<UnexpectedCycle>() {
- tracing::error!("{cycle}");
- message.push_str(": unexpected cycle");
} else if let Ok(cancelled) = panic.downcast::<Cancelled>() {
tracing::error!("Cancellation propagated out of salsa! This is a bug");
return Err(HandlerCancelledError::Inner(*cancelled));
diff --git a/crates/rust-analyzer/src/lsp/capabilities.rs b/crates/rust-analyzer/src/lsp/capabilities.rs
index 04e31f3..f94e748 100644
--- a/crates/rust-analyzer/src/lsp/capabilities.rs
+++ b/crates/rust-analyzer/src/lsp/capabilities.rs
@@ -42,7 +42,7 @@
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions {
resolve_provider: if config.client_is_neovim() {
- config.completion_item_edit_resolve().then_some(true)
+ config.has_completion_item_resolve_additionalTextEdits().then_some(true)
} else {
Some(config.caps().completions_resolve_provider())
},
@@ -207,8 +207,8 @@
serde_json::from_value(self.0.experimental.as_ref()?.get(index)?.clone()).ok()
}
- /// Parses client capabilities and returns all completion resolve capabilities rust-analyzer supports.
- pub fn completion_item_edit_resolve(&self) -> bool {
+ #[allow(non_snake_case)]
+ pub fn has_completion_item_resolve_additionalTextEdits(&self) -> bool {
(|| {
Some(
self.0
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 8a848fb..292be1d 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -911,7 +911,8 @@
| FoldKind::Array
| FoldKind::TraitAliases
| FoldKind::ExternCrates
- | FoldKind::MatchArm => None,
+ | FoldKind::MatchArm
+ | FoldKind::Function => None,
};
let range = range(line_index, fold.range);
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 0c0438c..00cf890 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -783,9 +783,14 @@
DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it),
};
- let handle =
- discover.spawn(arg, &std::env::current_dir().unwrap()).unwrap();
- self.discover_handle = Some(handle);
+ let handle = discover.spawn(
+ arg,
+ &std::env::current_dir()
+ .expect("Failed to get cwd during project discovery"),
+ );
+ self.discover_handle = Some(handle.unwrap_or_else(|e| {
+ panic!("Failed to spawn project discovery command: {e}")
+ }));
}
}
}
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 133d5a6..e798aa6 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -18,7 +18,7 @@
use hir::{ChangeWithProcMacros, ProcMacrosBuilder, db::DefDatabase};
use ide_db::{
FxHashMap,
- base_db::{CrateGraphBuilder, ProcMacroPaths, salsa::Durability},
+ base_db::{CrateGraphBuilder, ProcMacroLoadingError, ProcMacroPaths, salsa::Durability},
};
use itertools::Itertools;
use load_cargo::{ProjectFolders, load_proc_macro};
@@ -194,8 +194,7 @@
format_to!(message, "{e}");
});
- let proc_macro_clients =
- self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None));
+ let proc_macro_clients = self.proc_macro_clients.iter().chain(iter::repeat(&None));
for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) {
if let ProjectWorkspaceKind::Cargo { error: Some(error), .. }
@@ -252,7 +251,8 @@
message.push_str("\n\n");
}
}
- _ => (),
+ // sysroot was explicitly not set so we didn't discover a server
+ None => {}
}
}
}
@@ -419,14 +419,11 @@
};
let mut builder = ProcMacrosBuilder::default();
- let proc_macro_clients = proc_macro_clients
- .iter()
- .map(|res| res.as_ref().map_err(|e| e.to_string()))
- .chain(iter::repeat_with(|| Err("proc-macro-srv is not running".into())));
+ let proc_macro_clients = proc_macro_clients.iter().chain(iter::repeat(&None));
for (client, paths) in proc_macro_clients.zip(paths) {
for (crate_id, res) in paths.iter() {
let expansion_res = match client {
- Ok(client) => match res {
+ Some(Ok(client)) => match res {
Ok((crate_name, path)) => {
progress(format!("loading proc-macros: {path}"));
let ignored_proc_macros = ignored_proc_macros
@@ -438,9 +435,14 @@
load_proc_macro(client, path, ignored_proc_macros)
}
- Err(e) => Err((e.clone(), true)),
+ Err(e) => Err(e.clone()),
},
- Err(ref e) => Err((e.clone(), true)),
+ Some(Err(e)) => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ e.to_string().into_boxed_str(),
+ )),
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running".into(),
+ )),
};
builder.insert(*crate_id, expansion_res)
}
@@ -655,7 +657,10 @@
self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
let path = match self.config.proc_macro_srv() {
Some(path) => path,
- None => ws.find_sysroot_proc_macro_srv()?,
+ None => match ws.find_sysroot_proc_macro_srv()? {
+ Ok(path) => path,
+ Err(e) => return Some(Err(e)),
+ },
};
let env: FxHashMap<_, _> = match &ws.kind {
@@ -682,14 +687,14 @@
};
info!("Using proc-macro server at {path}");
- ProcMacroClient::spawn(&path, &env).map_err(|err| {
+ Some(ProcMacroClient::spawn(&path, &env).map_err(|err| {
tracing::error!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
);
anyhow::format_err!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
)
- })
+ }))
}))
}
@@ -753,14 +758,14 @@
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro has not been built yet".to_owned(), true))))
+ .map(|id| (id, Err(ProcMacroLoadingError::NotYetBuilt)))
.collect(),
);
} else {
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro expansion is disabled".to_owned(), false))))
+ .map(|id| (id, Err(ProcMacroLoadingError::Disabled)))
.collect(),
);
}
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 59073af..1b940c7 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -880,7 +880,8 @@
#[test]
fn diagnostics_dont_block_typing() {
- if skip_slow_tests() {
+ if skip_slow_tests() || std::env::var("CI").is_ok() {
+ // FIXME: This test is failing too frequently (therefore we disable it on CI).
return;
}
diff --git a/crates/span/src/ast_id.rs b/crates/span/src/ast_id.rs
index 8e95971..a9288ec 100644
--- a/crates/span/src/ast_id.rs
+++ b/crates/span/src/ast_id.rs
@@ -92,6 +92,7 @@
Use,
Impl,
BlockExpr,
+ AsmExpr,
Fixup,
);
if f.alternate() {
@@ -107,9 +108,10 @@
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
+#[repr(u8)]
enum ErasedFileAstIdKind {
/// This needs to not change because it's depended upon by the proc macro server.
- Fixup,
+ Fixup = 0,
// The following are associated with `ErasedHasNameFileAstId`.
Enum,
Struct,
@@ -143,6 +145,10 @@
Impl,
/// Associated with [`BlockExprFileAstId`].
BlockExpr,
+ // `global_asm!()` is an item, so we need to give it an `AstId`. So we give to all inline asm
+ // because incrementality is not a problem, they will always be the only item in the macro file,
+ // and memory usage also not because they're rare.
+ AsmExpr,
/// Keep this last.
Root,
}
@@ -203,14 +209,17 @@
.or_else(|| extern_block_ast_id(node, index_map))
.or_else(|| use_ast_id(node, index_map))
.or_else(|| impl_ast_id(node, index_map))
+ .or_else(|| asm_expr_ast_id(node, index_map))
}
fn should_alloc(node: &SyntaxNode) -> bool {
- should_alloc_has_name(node)
- || should_alloc_assoc_item(node)
- || ast::ExternBlock::can_cast(node.kind())
- || ast::Use::can_cast(node.kind())
- || ast::Impl::can_cast(node.kind())
+ let kind = node.kind();
+ should_alloc_has_name(kind)
+ || should_alloc_assoc_item(kind)
+ || ast::ExternBlock::can_cast(kind)
+ || ast::Use::can_cast(kind)
+ || ast::Impl::can_cast(kind)
+ || ast::AsmExpr::can_cast(kind)
}
#[inline]
@@ -277,7 +286,6 @@
#[derive(Hash)]
struct ErasedHasNameFileAstId<'a> {
- kind: SyntaxKind,
name: &'a str,
}
@@ -331,6 +339,19 @@
}
}
+impl AstIdNode for ast::AsmExpr {}
+
+fn asm_expr_ast_id(
+ node: &SyntaxNode,
+ index_map: &mut ErasedAstIdNextIndexMap,
+) -> Option<ErasedFileAstId> {
+ if ast::AsmExpr::can_cast(node.kind()) {
+ Some(index_map.new_id(ErasedFileAstIdKind::AsmExpr, ()))
+ } else {
+ None
+ }
+}
+
impl AstIdNode for ast::Impl {}
fn impl_ast_id(
@@ -413,9 +434,9 @@
}
macro_rules! register_enum_ast_id {
- (impl AstIdNode for $($ident:ident),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
};
}
@@ -426,13 +447,12 @@
}
macro_rules! register_has_name_ast_id {
- (impl AstIdNode for $($ident:ident = $name_method:ident),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident = $name_method:ident),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
fn has_name_ast_id(node: &SyntaxNode, index_map: &mut ErasedAstIdNextIndexMap) -> Option<ErasedFileAstId> {
- let kind = node.kind();
match_ast! {
match node {
$(
@@ -440,7 +460,6 @@
let name = node.$name_method();
let name = name.as_ref().map_or("", |it| it.text_non_mutable());
let result = ErasedHasNameFileAstId {
- kind,
name,
};
Some(index_map.new_id(ErasedFileAstIdKind::$ident, result))
@@ -451,8 +470,7 @@
}
}
- fn should_alloc_has_name(node: &SyntaxNode) -> bool {
- let kind = node.kind();
+ fn should_alloc_has_name(kind: SyntaxKind) -> bool {
false $( || ast::$ident::can_cast(kind) )*
}
};
@@ -472,9 +490,9 @@
}
macro_rules! register_assoc_item_ast_id {
- (impl AstIdNode for $($ident:ident = $name_callback:expr),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident = $name_callback:expr),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
fn assoc_item_ast_id(
@@ -482,7 +500,6 @@
index_map: &mut ErasedAstIdNextIndexMap,
parent: Option<&ErasedFileAstId>,
) -> Option<ErasedFileAstId> {
- let kind = node.kind();
match_ast! {
match node {
$(
@@ -490,7 +507,6 @@
let name = $name_callback(node);
let name = name.as_ref().map_or("", |it| it.text_non_mutable());
let properties = ErasedHasNameFileAstId {
- kind,
name,
};
let result = ErasedAssocItemFileAstId {
@@ -505,8 +521,7 @@
}
}
- fn should_alloc_assoc_item(node: &SyntaxNode) -> bool {
- let kind = node.kind();
+ fn should_alloc_assoc_item(kind: SyntaxKind) -> bool {
false $( || ast::$ident::can_cast(kind) )*
}
};
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 7bb88ac..aef3fbf 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -97,6 +97,7 @@
const LOCATION: salsa::plumbing::Location =
salsa::plumbing::Location { file: file!(), line: line!() };
const DEBUG_NAME: &'static str = "SyntaxContextData";
+ const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
type Fields<'a> = SyntaxContextData;
type Struct<'a> = SyntaxContext;
}
@@ -108,7 +109,9 @@
static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<SyntaxContext>> =
zalsa_::IngredientCache::new();
CACHE.get_or_create(db.zalsa(), || {
- db.zalsa().add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ db.zalsa()
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
})
}
}
@@ -130,9 +133,12 @@
type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
fn lookup_or_create_ingredient_index(
- aux: &salsa::plumbing::Zalsa,
+ zalsa: &salsa::plumbing::Zalsa,
) -> salsa::plumbing::IngredientIndices {
- aux.add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>().into()
+ zalsa
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
+ .into()
}
#[inline]
@@ -326,14 +332,14 @@
None
} else {
// SAFETY: By our invariant, this is either a root (which we verified it's not) or a valid `salsa::Id`.
- unsafe { Some(salsa::Id::from_u32(self.0)) }
+ unsafe { Some(salsa::Id::from_index(self.0)) }
}
}
#[inline]
fn from_salsa_id(id: salsa::Id) -> Self {
// SAFETY: This comes from a Salsa ID.
- unsafe { Self::from_u32(id.as_u32()) }
+ unsafe { Self::from_u32(id.index()) }
}
#[inline]
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index 3f43947..4cbc88c 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -158,6 +158,7 @@
| TypeAlias
| Union
| Use
+| AsmExpr
MacroRules =
Attr* Visibility?
@@ -409,7 +410,8 @@
// global_asm := "global_asm!(" format_string *("," format_string) *("," operand) [","] ")"
// format_string := STRING_LITERAL / RAW_STRING_LITERAL
AsmExpr =
- Attr* 'builtin' '#' 'asm' '(' template:(Expr (',' Expr)*) (AsmPiece (',' AsmPiece)*)? ','? ')'
+ Attr* 'builtin' '#' ( 'asm' | 'global_asm' | 'naked_asm' )
+ '(' template:(Expr (',' Expr)*) (AsmPiece (',' AsmPiece)*)? ','? ')'
// operand_expr := expr / "_" / expr "=>" expr / expr "=>" "_"
AsmOperandExpr = in_expr:Expr ('=>' out_expr:Expr)?
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index e60243f..e902516 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -406,42 +406,6 @@
}
}
-impl ast::TypeParam {
- pub fn remove_default(&self) {
- if let Some((eq, last)) = self
- .syntax()
- .children_with_tokens()
- .find(|it| it.kind() == T![=])
- .zip(self.syntax().last_child_or_token())
- {
- ted::remove_all(eq..=last);
-
- // remove any trailing ws
- if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) {
- last.detach();
- }
- }
- }
-}
-
-impl ast::ConstParam {
- pub fn remove_default(&self) {
- if let Some((eq, last)) = self
- .syntax()
- .children_with_tokens()
- .find(|it| it.kind() == T![=])
- .zip(self.syntax().last_child_or_token())
- {
- ted::remove_all(eq..=last);
-
- // remove any trailing ws
- if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) {
- last.detach();
- }
- }
- }
-}
-
pub trait Removable: AstNode {
fn remove(&self);
}
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 79a9f4d..2b86246 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -118,6 +118,14 @@
pub fn asm_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![asm]) }
#[inline]
pub fn builtin_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![builtin]) }
+ #[inline]
+ pub fn global_asm_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![global_asm])
+ }
+ #[inline]
+ pub fn naked_asm_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![naked_asm])
+ }
}
pub struct AsmLabel {
pub(crate) syntax: SyntaxNode,
@@ -2087,6 +2095,7 @@
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Item {
+ AsmExpr(AsmExpr),
Const(Const),
Enum(Enum),
ExternBlock(ExternBlock),
@@ -2106,7 +2115,6 @@
Use(Use),
}
impl ast::HasAttrs for Item {}
-impl ast::HasDocComments for Item {}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Pat {
@@ -8409,6 +8417,10 @@
}
}
}
+impl From<AsmExpr> for Item {
+ #[inline]
+ fn from(node: AsmExpr) -> Item { Item::AsmExpr(node) }
+}
impl From<Const> for Item {
#[inline]
fn from(node: Const) -> Item { Item::Const(node) }
@@ -8482,7 +8494,8 @@
fn can_cast(kind: SyntaxKind) -> bool {
matches!(
kind,
- CONST
+ ASM_EXPR
+ | CONST
| ENUM
| EXTERN_BLOCK
| EXTERN_CRATE
@@ -8504,6 +8517,7 @@
#[inline]
fn cast(syntax: SyntaxNode) -> Option<Self> {
let res = match syntax.kind() {
+ ASM_EXPR => Item::AsmExpr(AsmExpr { syntax }),
CONST => Item::Const(Const { syntax }),
ENUM => Item::Enum(Enum { syntax }),
EXTERN_BLOCK => Item::ExternBlock(ExternBlock { syntax }),
@@ -8528,6 +8542,7 @@
#[inline]
fn syntax(&self) -> &SyntaxNode {
match self {
+ Item::AsmExpr(it) => &it.syntax,
Item::Const(it) => &it.syntax,
Item::Enum(it) => &it.syntax,
Item::ExternBlock(it) => &it.syntax,
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 955aada..d67f24f 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -680,7 +680,7 @@
let expr = elements.into_iter().format(", ");
expr_from_text(&format!("({expr})"))
}
-pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
+pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::BinExpr {
expr_from_text(&format!("{lhs} = {rhs}"))
}
fn expr_from_text<E: Into<ast::Expr> + AstNode>(text: &str) -> E {
@@ -842,9 +842,10 @@
}
pub fn match_arm(pat: ast::Pat, guard: Option<ast::MatchGuard>, expr: ast::Expr) -> ast::MatchArm {
+ let comma_str = if expr.is_block_like() { "" } else { "," };
return match guard {
- Some(guard) => from_text(&format!("{pat} {guard} => {expr}")),
- None => from_text(&format!("{pat} => {expr}")),
+ Some(guard) => from_text(&format!("{pat} {guard} => {expr}{comma_str}")),
+ None => from_text(&format!("{pat} => {expr}{comma_str}")),
};
fn from_text(text: &str) -> ast::MatchArm {
@@ -877,7 +878,7 @@
let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| {
let needs_comma =
arm.comma_token().is_none() && arm.expr().is_none_or(|it| !it.is_block_like());
- let comma = if needs_comma { "," } else { "" };
+ let comma = if needs_comma && arm.comma_token().is_none() { "," } else { "" };
let arm = arm.syntax();
format_to_acc!(acc, " {arm}{comma}\n")
});
diff --git a/crates/syntax/src/ast/syntax_factory/constructors.rs b/crates/syntax/src/ast/syntax_factory/constructors.rs
index 429e51b..1ba6107 100644
--- a/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -440,6 +440,19 @@
ast
}
+ pub fn expr_assignment(&self, lhs: ast::Expr, rhs: ast::Expr) -> ast::BinExpr {
+ let ast = make::expr_assignment(lhs.clone(), rhs.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(lhs.syntax().clone(), ast.lhs().unwrap().syntax().clone());
+ builder.map_node(rhs.syntax().clone(), ast.rhs().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
pub fn expr_bin(&self, lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::BinExpr {
let ast::Expr::BinExpr(ast) =
make::expr_bin_op(lhs.clone(), op, rhs.clone()).clone_for_update()
@@ -1212,6 +1225,43 @@
ast
}
+ pub fn attr_outer(&self, meta: ast::Meta) -> ast::Attr {
+ let ast = make::attr_outer(meta.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(meta.syntax().clone(), ast.meta().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn attr_inner(&self, meta: ast::Meta) -> ast::Attr {
+ let ast = make::attr_inner(meta.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(meta.syntax().clone(), ast.meta().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
+ pub fn meta_token_tree(&self, path: ast::Path, tt: ast::TokenTree) -> ast::Meta {
+ let ast = make::meta_token_tree(path.clone(), tt.clone()).clone_for_update();
+
+ if let Some(mut mapping) = self.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+ builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone());
+ builder.map_node(tt.syntax().clone(), ast.token_tree().unwrap().syntax().clone());
+ builder.finish(&mut mapping);
+ }
+
+ ast
+ }
+
pub fn token_tree(
&self,
delimiter: SyntaxKind,
@@ -1242,6 +1292,10 @@
pub fn whitespace(&self, text: &str) -> SyntaxToken {
make::tokens::whitespace(text)
}
+
+ pub fn ident(&self, text: &str) -> SyntaxToken {
+ make::tokens::ident(text)
+ }
}
// `ext` constructors
diff --git a/crates/syntax/src/syntax_editor.rs b/crates/syntax/src/syntax_editor.rs
index 31caf61..3fa5848 100644
--- a/crates/syntax/src/syntax_editor.rs
+++ b/crates/syntax/src/syntax_editor.rs
@@ -435,7 +435,7 @@
_ => {
let var_name = 2 + 2;
(var_name, true)
- }"#]];
+ },"#]];
expect.assert_eq(&edit.new_root.to_string());
assert_eq!(edit.find_annotation(placeholder_snippet).len(), 2);
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index 1d821e9..e830c6a 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -435,14 +435,16 @@
continue;
}
- let mut active_line_region = false;
- let mut inactive_line_region = false;
+ let mut active_line_region = 0;
+ let mut inactive_line_region = 0;
if let Some(idx) = trimmed.find("// :!") {
- inactive_line_region = true;
- inactive_regions.push(&trimmed[idx + "// :!".len()..]);
+ let regions = trimmed[idx + "// :!".len()..].split(", ");
+ inactive_line_region += regions.clone().count();
+ inactive_regions.extend(regions);
} else if let Some(idx) = trimmed.find("// :") {
- active_line_region = true;
- active_regions.push(&trimmed[idx + "// :".len()..]);
+ let regions = trimmed[idx + "// :".len()..].split(", ");
+ active_line_region += regions.clone().count();
+ active_regions.extend(regions);
}
let mut keep = true;
@@ -462,11 +464,11 @@
if keep {
buf.push_str(line);
}
- if active_line_region {
- active_regions.pop().unwrap();
+ if active_line_region > 0 {
+ active_regions.drain(active_regions.len() - active_line_region..);
}
- if inactive_line_region {
- inactive_regions.pop().unwrap();
+ if inactive_line_region > 0 {
+ inactive_regions.drain(inactive_regions.len() - active_line_region..);
}
}
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index d48063f..7b719b5 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -11,10 +11,13 @@
//! add:
//! asm:
//! assert:
+//! as_mut: sized
//! as_ref: sized
//! async_fn: fn, tuple, future, copy
//! bool_impl: option, fn
//! builtin_impls:
+//! borrow: sized
+//! borrow_mut: borrow
//! cell: copy, drop
//! clone: sized
//! coerce_pointee: derive, sized, unsize, coerce_unsized, dispatch_from_dyn
@@ -228,8 +231,11 @@
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Hash($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Hash($item:item) {}
+ }
+ pub use derive::Hash;
// endregion:derive
}
// endregion:hash
@@ -377,11 +383,30 @@
fn as_ref(&self) -> &T;
}
// endregion:as_ref
+ // region:as_mut
+ pub trait AsMut<T: crate::marker::PointeeSized>: crate::marker::PointeeSized {
+ fn as_mut(&mut self) -> &mut T;
+ }
+ // endregion:as_mut
// region:infallible
pub enum Infallible {}
// endregion:infallible
}
+pub mod borrow {
+ // region:borrow
+ pub trait Borrow<Borrowed: ?Sized> {
+ fn borrow(&self) -> &Borrowed;
+ }
+ // endregion:borrow
+
+ // region:borrow_mut
+ pub trait BorrowMut<Borrowed: ?Sized>: Borrow<Borrowed> {
+ fn borrow_mut(&mut self) -> &mut Borrowed;
+ }
+ // endregion:borrow_mut
+}
+
pub mod mem {
// region:manually_drop
use crate::marker::PointeeSized;
@@ -986,8 +1011,7 @@
}
#[lang = "add_assign"]
- #[const_trait]
- pub trait AddAssign<Rhs = Self> {
+ pub const trait AddAssign<Rhs = Self> {
fn add_assign(&mut self, rhs: Rhs);
}
@@ -1264,8 +1288,11 @@
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Debug($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Debug($item:item) {}
+ }
+ pub use derive::Debug;
// endregion:derive
// region:builtin_impls
@@ -1913,6 +1940,7 @@
clone::Clone, // :clone
cmp::{Eq, PartialEq}, // :eq
cmp::{Ord, PartialOrd}, // :ord
+ convert::AsMut, // :as_mut
convert::AsRef, // :as_ref
convert::{From, Into, TryFrom, TryInto}, // :from
default::Default, // :default
@@ -1931,6 +1959,8 @@
panic, // :panic
result::Result::{self, Err, Ok}, // :result
str::FromStr, // :str
+ fmt::derive::Debug, // :fmt, derive
+ hash::derive::Hash, // :hash, derive
};
}
diff --git a/crates/tt/src/iter.rs b/crates/tt/src/iter.rs
index 0418c00..2e89d76 100644
--- a/crates/tt/src/iter.rs
+++ b/crates/tt/src/iter.rs
@@ -211,11 +211,23 @@
}
}
+#[derive(Clone)]
pub enum TtElement<'a, S> {
Leaf(&'a Leaf<S>),
Subtree(&'a Subtree<S>, TtIter<'a, S>),
}
+impl<S: Copy + fmt::Debug> fmt::Debug for TtElement<'_, S> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::Leaf(leaf) => f.debug_tuple("Leaf").field(leaf).finish(),
+ Self::Subtree(subtree, inner) => {
+ f.debug_tuple("Subtree").field(subtree).field(inner).finish()
+ }
+ }
+ }
+}
+
impl<S: Copy> TtElement<'_, S> {
#[inline]
pub fn first_span(&self) -> S {
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index 14574a6..4412338 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -579,7 +579,7 @@
{
use rustc_lexer::LiteralKind;
- let token = rustc_lexer::tokenize(text).next_tuple();
+ let token = rustc_lexer::tokenize(text, rustc_lexer::FrontmatterAllowed::No).next_tuple();
let Some((rustc_lexer::Token {
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
..
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs
index 1228e2e..0c41ede 100644
--- a/crates/vfs/src/file_set.rs
+++ b/crates/vfs/src/file_set.rs
@@ -5,8 +5,8 @@
use std::fmt;
use fst::{IntoStreamer, Streamer};
-use nohash_hasher::IntMap;
-use rustc_hash::FxHashMap;
+use indexmap::IndexMap;
+use rustc_hash::{FxBuildHasher, FxHashMap};
use crate::{AnchoredPath, FileId, Vfs, VfsPath};
@@ -14,7 +14,7 @@
#[derive(Default, Clone, Eq, PartialEq)]
pub struct FileSet {
files: FxHashMap<VfsPath, FileId>,
- paths: IntMap<FileId, VfsPath>,
+ paths: IndexMap<FileId, VfsPath, FxBuildHasher>,
}
impl FileSet {
diff --git a/docs/book/src/contributing/lsp-extensions.md b/docs/book/src/contributing/lsp-extensions.md
index 1ada1cb..8c06f33 100644
--- a/docs/book/src/contributing/lsp-extensions.md
+++ b/docs/book/src/contributing/lsp-extensions.md
@@ -694,24 +694,6 @@
Cancels all running flycheck processes.
-## Syntax Tree
-
-**Method:** `rust-analyzer/syntaxTree`
-
-**Request:**
-
-```typescript
-interface SyntaxTreeParams {
- textDocument: TextDocumentIdentifier,
- range?: Range,
-}
-```
-
-**Response:** `string`
-
-Returns textual representation of a parse tree for the file/selected region.
-Primarily for debugging, but very useful for all people working on rust-analyzer itself.
-
## View Syntax Tree
**Method:** `rust-analyzer/viewSyntaxTree`
diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml
index 35a5a4d..1fc1da5 100644
--- a/lib/lsp-server/Cargo.toml
+++ b/lib/lsp-server/Cargo.toml
@@ -16,6 +16,9 @@
[dev-dependencies]
lsp-types = "=0.95"
ctrlc = "3.4.7"
+anyhow.workspace = true
+rustc-hash.workspace = true
+toolchain.workspace = true
[lints]
workspace = true
diff --git a/lib/lsp-server/examples/goto_def.rs b/lib/lsp-server/examples/goto_def.rs
deleted file mode 100644
index 6b3acda..0000000
--- a/lib/lsp-server/examples/goto_def.rs
+++ /dev/null
@@ -1,132 +0,0 @@
-//! A minimal example LSP server that can only respond to the `gotoDefinition` request. To use
-//! this example, execute it and then send an `initialize` request.
-//!
-//! ```no_run
-//! Content-Length: 85
-//!
-//! {"jsonrpc": "2.0", "method": "initialize", "id": 1, "params": {"capabilities": {}}}
-//! ```
-//!
-//! This will respond with a server response. Then send it a `initialized` notification which will
-//! have no response.
-//!
-//! ```no_run
-//! Content-Length: 59
-//!
-//! {"jsonrpc": "2.0", "method": "initialized", "params": {}}
-//! ```
-//!
-//! Once these two are sent, then we enter the main loop of the server. The only request this
-//! example can handle is `gotoDefinition`:
-//!
-//! ```no_run
-//! Content-Length: 159
-//!
-//! {"jsonrpc": "2.0", "method": "textDocument/definition", "id": 2, "params": {"textDocument": {"uri": "file://temp"}, "position": {"line": 1, "character": 1}}}
-//! ```
-//!
-//! To finish up without errors, send a shutdown request:
-//!
-//! ```no_run
-//! Content-Length: 67
-//!
-//! {"jsonrpc": "2.0", "method": "shutdown", "id": 3, "params": null}
-//! ```
-//!
-//! The server will exit the main loop and finally we send a `shutdown` notification to stop
-//! the server.
-//!
-//! ```
-//! Content-Length: 54
-//!
-//! {"jsonrpc": "2.0", "method": "exit", "params": null}
-//! ```
-
-#![allow(clippy::print_stderr)]
-
-use std::error::Error;
-
-use lsp_types::OneOf;
-use lsp_types::{
- GotoDefinitionResponse, InitializeParams, ServerCapabilities, request::GotoDefinition,
-};
-
-use lsp_server::{Connection, ExtractError, Message, Request, RequestId, Response};
-
-fn main() -> Result<(), Box<dyn Error + Sync + Send>> {
- // Note that we must have our logging only write out to stderr.
- eprintln!("starting generic LSP server");
-
- // Create the transport. Includes the stdio (stdin and stdout) versions but this could
- // also be implemented to use sockets or HTTP.
- let (connection, io_threads) = Connection::stdio();
-
- // Run the server and wait for the two threads to end (typically by trigger LSP Exit event).
- let server_capabilities = serde_json::to_value(&ServerCapabilities {
- definition_provider: Some(OneOf::Left(true)),
- ..Default::default()
- })
- .unwrap();
- let initialization_params = match connection.initialize(server_capabilities) {
- Ok(it) => it,
- Err(e) => {
- if e.channel_is_disconnected() {
- io_threads.join()?;
- }
- return Err(e.into());
- }
- };
- main_loop(connection, initialization_params)?;
- io_threads.join()?;
-
- // Shut down gracefully.
- eprintln!("shutting down server");
- Ok(())
-}
-
-fn main_loop(
- connection: Connection,
- params: serde_json::Value,
-) -> Result<(), Box<dyn Error + Sync + Send>> {
- let _params: InitializeParams = serde_json::from_value(params).unwrap();
- eprintln!("starting example main loop");
- for msg in &connection.receiver {
- eprintln!("got msg: {msg:?}");
- match msg {
- Message::Request(req) => {
- if connection.handle_shutdown(&req)? {
- return Ok(());
- }
- eprintln!("got request: {req:?}");
- match cast::<GotoDefinition>(req) {
- Ok((id, params)) => {
- eprintln!("got gotoDefinition request #{id}: {params:?}");
- let result = Some(GotoDefinitionResponse::Array(Vec::new()));
- let result = serde_json::to_value(&result).unwrap();
- let resp = Response { id, result: Some(result), error: None };
- connection.sender.send(Message::Response(resp))?;
- continue;
- }
- Err(err @ ExtractError::JsonError { .. }) => panic!("{err:?}"),
- Err(ExtractError::MethodMismatch(req)) => req,
- };
- // ...
- }
- Message::Response(resp) => {
- eprintln!("got response: {resp:?}");
- }
- Message::Notification(not) => {
- eprintln!("got notification: {not:?}");
- }
- }
- }
- Ok(())
-}
-
-fn cast<R>(req: Request) -> Result<(RequestId, R::Params), ExtractError<Request>>
-where
- R: lsp_types::request::Request,
- R::Params: serde::de::DeserializeOwned,
-{
- req.extract(R::METHOD)
-}
diff --git a/lib/lsp-server/examples/manual_test.sh b/lib/lsp-server/examples/manual_test.sh
new file mode 100755
index 0000000..d028ac4
--- /dev/null
+++ b/lib/lsp-server/examples/manual_test.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+# Simple nine-packet LSP test for examples/minimal_lsp.rs
+# Usage (two tabs):
+#
+# mkfifo /tmp/lsp_pipe # one-time setup
+# # tab 1 – run the server
+# cat /tmp/lsp_pipe | cargo run --example minimal_lsp
+#
+# # tab 2 – fire the packets (this script)
+# bash examples/manual_test.sh # blocks until server exits
+#
+# If you don’t use a second tab, run the script in the background:
+#
+# bash examples/manual_test.sh & # writer in background
+# cat /tmp/lsp_pipe | cargo run --example minimal_lsp
+#
+# The script opens /tmp/lsp_pipe for writing (exec 3>) and sends each JSON
+# packet with a correct Content-Length header.
+#
+# One-liner alternative (single terminal, no FIFO):
+#
+# cargo run --example minimal_lsp <<'EOF'
+# … nine packets …
+# EOF
+#
+# Both approaches feed identical bytes to minimal_lsp via stdin.
+
+set -eu
+PIPE=${1:-/tmp/lsp_pipe}
+
+mkfifo -m 600 "$PIPE" 2>/dev/null || true # create once, ignore if exists
+
+# open write end so the fifo stays open
+exec 3> "$PIPE"
+
+send() {
+ local body=$1
+ local len=$(printf '%s' "$body" | wc -c)
+ printf 'Content-Length: %d\r\n\r\n%s' "$len" "$body" >&3
+}
+
+send '{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"capabilities":{}}}'
+send '{"jsonrpc":"2.0","method":"initialized","params":{}}'
+send '{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///tmp/foo.rs","languageId":"rust","version":1,"text":"fn main( ){println!(\"hi\") }"}}}'
+send '{"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}'
+send '{"jsonrpc":"2.0","id":3,"method":"textDocument/hover","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}'
+send '{"jsonrpc":"2.0","id":4,"method":"textDocument/definition","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}'
+send '{"jsonrpc":"2.0","id":5,"method":"textDocument/formatting","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"options":{"tabSize":4,"insertSpaces":true}}}'
+send '{"jsonrpc":"2.0","id":6,"method":"shutdown","params":null}'
+send '{"jsonrpc":"2.0","method":"exit","params":null}'
+
+exec 3>&-
+echo "Packets sent – watch the other terminal for responses."
diff --git a/lib/lsp-server/examples/minimal_lsp.rs b/lib/lsp-server/examples/minimal_lsp.rs
new file mode 100644
index 0000000..5eef999
--- /dev/null
+++ b/lib/lsp-server/examples/minimal_lsp.rs
@@ -0,0 +1,335 @@
+//! Minimal Language‑Server‑Protocol example: **`minimal_lsp.rs`**
+//! =============================================================
+//!
+//! | ↔ / ← | LSP method | What the implementation does |
+//! |-------|------------|------------------------------|
+//! | ↔ | `initialize` / `initialized` | capability handshake |
+//! | ← | `textDocument/publishDiagnostics` | pushes a dummy info diagnostic whenever the buffer changes |
+//! | ← | `textDocument/definition` | echoes an empty location array so the jump works |
+//! | ← | `textDocument/completion` | offers one hard‑coded item `HelloFromLSP` |
+//! | ← | `textDocument/hover` | shows *Hello from minimal_lsp* markdown |
+//! | ← | `textDocument/formatting` | pipes the doc through **rustfmt** and returns a full‑file edit |
+//!
+//! ### Quick start
+//! ```bash
+//! cd rust-analyzer/lib/lsp-server
+//! cargo run --example minimal_lsp
+//! ```
+//!
+//! ### Minimal manual session (all nine packets)
+//! ```no_run
+//! # 1. initialize - server replies with capabilities
+//! Content-Length: 85
+
+//! {"jsonrpc":"2.0","id":1,"method":"initialize","params":{"capabilities":{}}}
+//!
+//! # 2. initialized - no response expected
+//! Content-Length: 59
+
+//! {"jsonrpc":"2.0","method":"initialized","params":{}}
+//!
+//! # 3. didOpen - provide initial buffer text
+//! Content-Length: 173
+
+//! {"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///tmp/foo.rs","languageId":"rust","version":1,"text":"fn main( ){println!(\"hi\") }"}}}
+//!
+//! # 4. completion - expect HelloFromLSP
+//! Content-Length: 139
+
+//! {"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}
+//!
+//! # 5. hover - expect markdown greeting
+//! Content-Length: 135
+
+//! {"jsonrpc":"2.0","id":3,"method":"textDocument/hover","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}
+//!
+//! # 6. goto-definition - dummy empty array
+//! Content-Length: 139
+
+//! {"jsonrpc":"2.0","id":4,"method":"textDocument/definition","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}
+//!
+//! # 7. formatting - rustfmt full document
+//! Content-Length: 157
+
+//! {"jsonrpc":"2.0","id":5,"method":"textDocument/formatting","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"options":{"tabSize":4,"insertSpaces":true}}}
+//!
+//! # 8. shutdown request - server acks and prepares to exit
+//! Content-Length: 67
+
+//! {"jsonrpc":"2.0","id":6,"method":"shutdown","params":null}
+//!
+//! # 9. exit notification - terminates the server
+//! Content-Length: 54
+
+//! {"jsonrpc":"2.0","method":"exit","params":null}
+//! ```
+//!
+
+use std::{error::Error, io::Write};
+
+use rustc_hash::FxHashMap; // fast hash map
+use std::process::Stdio;
+use toolchain::command; // clippy-approved wrapper
+
+#[allow(clippy::print_stderr, clippy::disallowed_types, clippy::disallowed_methods)]
+use anyhow::{Context, Result, anyhow, bail};
+use lsp_server::{Connection, Message, Request as ServerRequest, RequestId, Response};
+use lsp_types::notification::Notification as _; // for METHOD consts
+use lsp_types::request::Request as _;
+use lsp_types::{
+ CompletionItem,
+ CompletionItemKind,
+ // capability helpers
+ CompletionOptions,
+ CompletionResponse,
+ Diagnostic,
+ DiagnosticSeverity,
+ DidChangeTextDocumentParams,
+ DidOpenTextDocumentParams,
+ DocumentFormattingParams,
+ Hover,
+ HoverContents,
+ HoverProviderCapability,
+ // core
+ InitializeParams,
+ MarkedString,
+ OneOf,
+ Position,
+ PublishDiagnosticsParams,
+ Range,
+ ServerCapabilities,
+ TextDocumentSyncCapability,
+ TextDocumentSyncKind,
+ TextEdit,
+ Url,
+ // notifications
+ notification::{DidChangeTextDocument, DidOpenTextDocument, PublishDiagnostics},
+ // requests
+ request::{Completion, Formatting, GotoDefinition, HoverRequest},
+}; // for METHOD consts
+
+// =====================================================================
+// main
+// =====================================================================
+
+#[allow(clippy::print_stderr)]
+fn main() -> std::result::Result<(), Box<dyn Error + Sync + Send>> {
+ log::error!("starting minimal_lsp");
+
+ // transport
+ let (connection, io_thread) = Connection::stdio();
+
+ // advertised capabilities
+ let caps = ServerCapabilities {
+ text_document_sync: Some(TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL)),
+ completion_provider: Some(CompletionOptions::default()),
+ definition_provider: Some(OneOf::Left(true)),
+ hover_provider: Some(HoverProviderCapability::Simple(true)),
+ document_formatting_provider: Some(OneOf::Left(true)),
+ ..Default::default()
+ };
+ let init_value = serde_json::json!({
+ "capabilities": caps,
+ "offsetEncoding": ["utf-8"],
+ });
+
+ let init_params = connection.initialize(init_value)?;
+ main_loop(connection, init_params)?;
+ io_thread.join()?;
+ log::error!("shutting down server");
+ Ok(())
+}
+
+// =====================================================================
+// event loop
+// =====================================================================
+
+fn main_loop(
+ connection: Connection,
+ params: serde_json::Value,
+) -> std::result::Result<(), Box<dyn Error + Sync + Send>> {
+ let _init: InitializeParams = serde_json::from_value(params)?;
+ let mut docs: FxHashMap<Url, String> = FxHashMap::default();
+
+ for msg in &connection.receiver {
+ match msg {
+ Message::Request(req) => {
+ if connection.handle_shutdown(&req)? {
+ break;
+ }
+ if let Err(err) = handle_request(&connection, &req, &mut docs) {
+ log::error!("[lsp] request {} failed: {err}", &req.method);
+ }
+ }
+ Message::Notification(note) => {
+ if let Err(err) = handle_notification(&connection, ¬e, &mut docs) {
+ log::error!("[lsp] notification {} failed: {err}", note.method);
+ }
+ }
+ Message::Response(resp) => log::error!("[lsp] response: {resp:?}"),
+ }
+ }
+ Ok(())
+}
+
+// =====================================================================
+// notifications
+// =====================================================================
+
+fn handle_notification(
+ conn: &Connection,
+ note: &lsp_server::Notification,
+ docs: &mut FxHashMap<Url, String>,
+) -> Result<()> {
+ match note.method.as_str() {
+ DidOpenTextDocument::METHOD => {
+ let p: DidOpenTextDocumentParams = serde_json::from_value(note.params.clone())?;
+ let uri = p.text_document.uri;
+ docs.insert(uri.clone(), p.text_document.text);
+ publish_dummy_diag(conn, &uri)?;
+ }
+ DidChangeTextDocument::METHOD => {
+ let p: DidChangeTextDocumentParams = serde_json::from_value(note.params.clone())?;
+ if let Some(change) = p.content_changes.into_iter().next() {
+ let uri = p.text_document.uri;
+ docs.insert(uri.clone(), change.text);
+ publish_dummy_diag(conn, &uri)?;
+ }
+ }
+ _ => {}
+ }
+ Ok(())
+}
+
+// =====================================================================
+// requests
+// =====================================================================
+
+fn handle_request(
+ conn: &Connection,
+ req: &ServerRequest,
+ docs: &mut FxHashMap<Url, String>,
+) -> Result<()> {
+ match req.method.as_str() {
+ GotoDefinition::METHOD => {
+ send_ok(conn, req.id.clone(), &lsp_types::GotoDefinitionResponse::Array(Vec::new()))?;
+ }
+ Completion::METHOD => {
+ let item = CompletionItem {
+ label: "HelloFromLSP".into(),
+ kind: Some(CompletionItemKind::FUNCTION),
+ detail: Some("dummy completion".into()),
+ ..Default::default()
+ };
+ send_ok(conn, req.id.clone(), &CompletionResponse::Array(vec![item]))?;
+ }
+ HoverRequest::METHOD => {
+ let hover = Hover {
+ contents: HoverContents::Scalar(MarkedString::String(
+ "Hello from *minimal_lsp*".into(),
+ )),
+ range: None,
+ };
+ send_ok(conn, req.id.clone(), &hover)?;
+ }
+ Formatting::METHOD => {
+ let p: DocumentFormattingParams = serde_json::from_value(req.params.clone())?;
+ let uri = p.text_document.uri;
+ let text = docs
+ .get(&uri)
+ .ok_or_else(|| anyhow!("document not in cache – did you send DidOpen?"))?;
+ let formatted = run_rustfmt(text)?;
+ let edit = TextEdit { range: full_range(text), new_text: formatted };
+ send_ok(conn, req.id.clone(), &vec![edit])?;
+ }
+ _ => send_err(
+ conn,
+ req.id.clone(),
+ lsp_server::ErrorCode::MethodNotFound,
+ "unhandled method",
+ )?,
+ }
+ Ok(())
+}
+
+// =====================================================================
+// diagnostics
+// =====================================================================
+fn publish_dummy_diag(conn: &Connection, uri: &Url) -> Result<()> {
+ let diag = Diagnostic {
+ range: Range::new(Position::new(0, 0), Position::new(0, 1)),
+ severity: Some(DiagnosticSeverity::INFORMATION),
+ code: None,
+ code_description: None,
+ source: Some("minimal_lsp".into()),
+ message: "dummy diagnostic".into(),
+ related_information: None,
+ tags: None,
+ data: None,
+ };
+ let params =
+ PublishDiagnosticsParams { uri: uri.clone(), diagnostics: vec![diag], version: None };
+ conn.sender.send(Message::Notification(lsp_server::Notification::new(
+ PublishDiagnostics::METHOD.to_owned(),
+ params,
+ )))?;
+ Ok(())
+}
+
+// =====================================================================
+// helpers
+// =====================================================================
+
+fn run_rustfmt(input: &str) -> Result<String> {
+ let cwd = std::env::current_dir().expect("can't determine CWD");
+ let mut child = command("rustfmt", &cwd, &FxHashMap::default())
+ .arg("--emit")
+ .arg("stdout")
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .context("failed to spawn rustfmt – is it installed?")?;
+
+ let Some(stdin) = child.stdin.as_mut() else {
+ bail!("stdin unavailable");
+ };
+ stdin.write_all(input.as_bytes())?;
+ let output = child.wait_with_output()?;
+ if !output.status.success() {
+ let stderr = String::from_utf8_lossy(&output.stderr);
+ bail!("rustfmt failed: {stderr}");
+ }
+ Ok(String::from_utf8(output.stdout)?)
+}
+
+fn full_range(text: &str) -> Range {
+ let last_line_idx = text.lines().count().saturating_sub(1) as u32;
+ let last_col = text.lines().last().map_or(0, |l| l.chars().count()) as u32;
+ Range::new(Position::new(0, 0), Position::new(last_line_idx, last_col))
+}
+
+fn send_ok<T: serde::Serialize>(conn: &Connection, id: RequestId, result: &T) -> Result<()> {
+ let resp = Response { id, result: Some(serde_json::to_value(result)?), error: None };
+ conn.sender.send(Message::Response(resp))?;
+ Ok(())
+}
+
+fn send_err(
+ conn: &Connection,
+ id: RequestId,
+ code: lsp_server::ErrorCode,
+ msg: &str,
+) -> Result<()> {
+ let resp = Response {
+ id,
+ result: None,
+ error: Some(lsp_server::ResponseError {
+ code: code as i32,
+ message: msg.into(),
+ data: None,
+ }),
+ };
+ conn.sender.send(Message::Response(resp))?;
+ Ok(())
+}
diff --git a/rust-version b/rust-version
index 9027932..c2b1c15 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-ad3b7257615c28aaf8212a189ec032b8af75de51
+e05ab47e6c418fb2b9faa2eae9a7e70c65c98eaa
diff --git a/xtask/src/codegen/grammar/ast_src.rs b/xtask/src/codegen/grammar/ast_src.rs
index d8cbf89..b9f570f 100644
--- a/xtask/src/codegen/grammar/ast_src.rs
+++ b/xtask/src/codegen/grammar/ast_src.rs
@@ -116,6 +116,8 @@
// keywords we use for special macro expansions
const CONTEXTUAL_BUILTIN_KEYWORDS: &[&str] = &[
"asm",
+ "naked_asm",
+ "global_asm",
"att_syntax",
"builtin",
"clobber_abi",