Merge pull request #20148 from ShoyuVanilla/sysroot-no-deps
fix: Honor `rust-analyzer.cargo.noDeps` option when fetching sysroot metadata
diff --git a/Cargo.lock b/Cargo.lock
index caa8f28..7432a82 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -570,12 +570,6 @@
]
[[package]]
-name = "heck"
-version = "0.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
-
-[[package]]
name = "hermit-abi"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1021,6 +1015,15 @@
]
[[package]]
+name = "intrusive-collections"
+version = "0.9.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86"
+dependencies = [
+ "memoffset",
+]
+
+[[package]]
name = "itertools"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1428,6 +1431,16 @@
]
[[package]]
+name = "papaya"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f"
+dependencies = [
+ "equivalent",
+ "seize",
+]
+
+[[package]]
name = "parking_lot"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1955,16 +1968,18 @@
[[package]]
name = "salsa"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c8fff508e3d6ef42a32607f7538e17171a877a12015e32036f46e99d00c95781"
+checksum = "2e235afdb8e510f38a07138fbe5a0b64691894358a9c0cbd813b1aade110efc9"
dependencies = [
"boxcar",
"crossbeam-queue",
- "dashmap",
+ "crossbeam-utils",
"hashbrown 0.15.4",
"hashlink",
"indexmap",
+ "intrusive-collections",
+ "papaya",
"parking_lot",
"portable-atomic",
"rayon",
@@ -1978,17 +1993,16 @@
[[package]]
name = "salsa-macro-rules"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ea72b3c06f2ce6350fe3a0eeb7aaaf842d1d8352b706973c19c4f02e298a87c"
+checksum = "2edb86a7e9c91f6d30c9ce054312721dbe773a162db27bbfae834d16177b30ce"
[[package]]
name = "salsa-macros"
-version = "0.22.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ce92025bc160b27814a207cb78d680973af17f863c7f4fc56cf3a535e22f378"
+checksum = "d0778d6e209051bc4e75acfe83bcd7848601ec3dbe9c3dbb982829020e9128af"
dependencies = [
- "heck",
"proc-macro2",
"quote",
"syn",
@@ -2026,6 +2040,16 @@
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
+name = "seize"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e4b8d813387d566f627f3ea1b914c068aac94c40ae27ec43f5f33bde65abefe7"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
name = "semver"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index 0a8e6fe..d268ce5 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -49,6 +49,8 @@
# ungrammar = { path = "../ungrammar" }
# salsa = { path = "../salsa" }
+# salsa-macros = { path = "../salsa/components/salsa-macros" }
+# salsa-macro-rules = { path = "../salsa/components/salsa-macro-rules" }
[workspace.dependencies]
# local crates
@@ -136,8 +138,8 @@
rowan = "=0.15.15"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
-salsa = { version = "0.22.0", default-features = true, features = ["rayon","salsa_unstable", "macros"] }
-salsa-macros = "0.22.0"
+salsa = { version = "0.23.0", default-features = true, features = ["rayon","salsa_unstable", "macros"] }
+salsa-macros = "0.23.0"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 2a87b15..8c9393b 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -6,6 +6,7 @@
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input.
+use std::error::Error;
use std::hash::BuildHasherDefault;
use std::{fmt, mem, ops};
@@ -22,7 +23,49 @@
use crate::{CrateWorkspaceData, EditionedFileId, FxIndexSet, RootQueryDb};
-pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
+pub type ProcMacroPaths =
+ FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), ProcMacroLoadingError>>;
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ProcMacroLoadingError {
+ Disabled,
+ FailedToBuild,
+ MissingDylibPath,
+ NotYetBuilt,
+ NoProcMacros,
+ ProcMacroSrvError(Box<str>),
+}
+impl ProcMacroLoadingError {
+ pub fn is_hard_error(&self) -> bool {
+ match self {
+ ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
+ ProcMacroLoadingError::FailedToBuild
+ | ProcMacroLoadingError::MissingDylibPath
+ | ProcMacroLoadingError::NoProcMacros
+ | ProcMacroLoadingError::ProcMacroSrvError(_) => true,
+ }
+ }
+}
+
+impl Error for ProcMacroLoadingError {}
+impl fmt::Display for ProcMacroLoadingError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
+ ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
+ ProcMacroLoadingError::MissingDylibPath => {
+ write!(f, "proc-macro crate build data is missing a dylib path")
+ }
+ ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
+ ProcMacroLoadingError::NoProcMacros => {
+ write!(f, "proc macro library has no proc macros")
+ }
+ ProcMacroLoadingError::ProcMacroSrvError(msg) => {
+ write!(f, "proc macro server error: {msg}")
+ }
+ }
+ }
+}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 478fae6..ad17f17 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -14,8 +14,9 @@
input::{
BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
- DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroPaths, ReleaseChannel,
- SourceRoot, SourceRootId, TargetLayoutLoadResult, UniqueCrateData,
+ DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
+ UniqueCrateData,
},
};
use dashmap::{DashMap, mapref::entry::Entry};
@@ -33,7 +34,7 @@
#[macro_export]
macro_rules! impl_intern_key {
($id:ident, $loc:ident) => {
- #[salsa_macros::interned(no_lifetime)]
+ #[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct $id {
pub loc: $loc,
@@ -43,7 +44,7 @@
impl ::std::fmt::Debug for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
f.debug_tuple(stringify!($id))
- .field(&format_args!("{:04x}", self.0.as_u32()))
+ .field(&format_args!("{:04x}", self.0.index()))
.finish()
}
}
@@ -167,7 +168,7 @@
}
}
-#[salsa_macros::interned(no_lifetime, debug, constructor=from_span)]
+#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
diff --git a/crates/hir-def/src/expr_store/path.rs b/crates/hir-def/src/expr_store/path.rs
index db83e73..19c7ce0 100644
--- a/crates/hir-def/src/expr_store/path.rs
+++ b/crates/hir-def/src/expr_store/path.rs
@@ -29,8 +29,8 @@
// This type is being used a lot, make sure it doesn't grow unintentionally.
#[cfg(target_arch = "x86_64")]
const _: () = {
- assert!(size_of::<Path>() == 16);
- assert!(size_of::<Option<Path>>() == 16);
+ assert!(size_of::<Path>() == 24);
+ assert!(size_of::<Option<Path>>() == 24);
};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index eb3b92d..eacc3f3 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -149,7 +149,7 @@
}
#[cfg(target_arch = "x86_64")]
-const _: () = assert!(size_of::<TypeRef>() == 16);
+const _: () = assert!(size_of::<TypeRef>() == 24);
pub type TypeRefId = Idx<TypeRef>;
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index 777953d..0013c2a 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -746,3 +746,83 @@
623..690: `derive(CoercePointee)` requires `T` to be marked `?Sized`"#]],
);
}
+
+#[test]
+fn union_derive() {
+ check_errors(
+ r#"
+//- minicore: clone, copy, default, fmt, hash, ord, eq, derive
+
+#[derive(Copy)]
+union Foo1 { _v: () }
+#[derive(Clone)]
+union Foo2 { _v: () }
+#[derive(Default)]
+union Foo3 { _v: () }
+#[derive(Debug)]
+union Foo4 { _v: () }
+#[derive(Hash)]
+union Foo5 { _v: () }
+#[derive(Ord)]
+union Foo6 { _v: () }
+#[derive(PartialOrd)]
+union Foo7 { _v: () }
+#[derive(Eq)]
+union Foo8 { _v: () }
+#[derive(PartialEq)]
+union Foo9 { _v: () }
+ "#,
+ expect![[r#"
+ 78..118: this trait cannot be derived for unions
+ 119..157: this trait cannot be derived for unions
+ 158..195: this trait cannot be derived for unions
+ 196..232: this trait cannot be derived for unions
+ 233..276: this trait cannot be derived for unions
+ 313..355: this trait cannot be derived for unions"#]],
+ );
+}
+
+#[test]
+fn default_enum_without_default_attr() {
+ check_errors(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo {
+ Bar,
+}
+ "#,
+ expect!["1..41: `#[derive(Default)]` on enum with no `#[default]`"],
+ );
+}
+
+#[test]
+fn generic_enum_default() {
+ check(
+ r#"
+//- minicore: default, derive
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+"#,
+ expect![[r#"
+
+#[derive(Default)]
+enum Foo<T> {
+ Bar(T),
+ #[default]
+ Baz,
+}
+
+impl <T, > $crate::default::Default for Foo<T, > where {
+ fn default() -> Self {
+ Foo::Baz
+ }
+}"#]],
+ );
+}
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index ba75dca..338851b 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -172,7 +172,7 @@
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "of_",
+ "EnumVariants::of_",
]
"#]],
expect![[r#"
@@ -181,7 +181,7 @@
"ast_id_map_shim",
"file_item_tree_query",
"real_span_map_shim",
- "of_",
+ "EnumVariants::of_",
]
"#]],
);
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index d135584..15e68ff 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -458,6 +458,7 @@
invoc_span: Span,
tt: &tt::TopSubtree,
trait_path: tt::TopSubtree,
+ allow_unions: bool,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let info = match parse_adt(db, tt, invoc_span) {
@@ -469,6 +470,12 @@
);
}
};
+ if !allow_unions && matches!(info.shape, AdtShape::Union) {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(invoc_span)),
+ ExpandError::other(invoc_span, "this trait cannot be derived for unions"),
+ );
+ }
ExpandResult::ok(expand_simple_derive_with_parsed(
invoc_span,
info,
@@ -535,7 +542,14 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::marker::Copy },
+ true,
+ |_| quote! {span =>},
+ )
}
fn clone_expand(
@@ -544,7 +558,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, true, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -599,41 +613,63 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::default::Default }, |adt| {
- let body = match &adt.shape {
- AdtShape::Struct(fields) => {
- let name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#name),
+ let adt = match parse_adt(db, tt, span) {
+ Ok(info) => info,
+ Err(e) => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan { open: span, close: span }),
+ e,
+ );
+ }
+ };
+ let (body, constrain_to_trait) = match &adt.shape {
+ AdtShape::Struct(fields) => {
+ let name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
+ );
+ (body, true)
+ }
+ AdtShape::Enum { default_variant, variants } => {
+ if let Some(d) = default_variant {
+ let (name, fields) = &variants[*d];
+ let adt_name = &adt.name;
+ let body = fields.as_pattern_map(
+ quote!(span =>#adt_name :: #name),
span,
|_| quote!(span =>#krate::default::Default::default()),
- )
- }
- AdtShape::Enum { default_variant, variants } => {
- if let Some(d) = default_variant {
- let (name, fields) = &variants[*d];
- let adt_name = &adt.name;
- fields.as_pattern_map(
- quote!(span =>#adt_name :: #name),
- span,
- |_| quote!(span =>#krate::default::Default::default()),
- )
- } else {
- // FIXME: Return expand error here
- quote!(span =>)
- }
- }
- AdtShape::Union => {
- // FIXME: Return expand error here
- quote!(span =>)
- }
- };
- quote! {span =>
- fn default() -> Self {
- #body
+ );
+ (body, false)
+ } else {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "`#[derive(Default)]` on enum with no `#[default]`"),
+ );
}
}
- })
+ AdtShape::Union => {
+ return ExpandResult::new(
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ExpandError::other(span, "this trait cannot be derived for unions"),
+ );
+ }
+ };
+ ExpandResult::ok(expand_simple_derive_with_parsed(
+ span,
+ adt,
+ quote! {span => #krate::default::Default },
+ |_adt| {
+ quote! {span =>
+ fn default() -> Self {
+ #body
+ }
+ }
+ },
+ constrain_to_trait,
+ tt::TopSubtree::empty(tt::DelimSpan::from_single(span)),
+ ))
}
fn debug_expand(
@@ -642,7 +678,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, false, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
@@ -697,10 +733,7 @@
}
})
.collect(),
- AdtShape::Union => {
- // FIXME: Return expand error here
- vec![]
- }
+ AdtShape::Union => unreachable!(),
};
quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
@@ -718,11 +751,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, false, |adt| {
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@@ -769,7 +798,14 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
+ expand_simple_derive(
+ db,
+ span,
+ tt,
+ quote! {span => #krate::cmp::Eq },
+ true,
+ |_| quote! {span =>},
+ )
}
fn partial_eq_expand(
@@ -778,11 +814,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote! {span =>};
- }
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, false, |adt| {
let name = &adt.name;
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
@@ -854,7 +886,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -873,10 +905,6 @@
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
@@ -916,7 +944,7 @@
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
- expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
+ expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, false, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@@ -935,10 +963,6 @@
}
}
}
- if matches!(adt.shape, AdtShape::Union) {
- // FIXME: Return expand error here
- return quote!(span =>);
- }
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index f9abe4f..800b40a 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -7,6 +7,7 @@
Symbol,
sym::{self},
};
+use itertools::Itertools;
use mbe::{DelimiterKind, expect_fragment};
use span::{Edition, FileId, Span};
use stdx::format_to;
@@ -681,11 +682,19 @@
}
fn parse_string(tt: &tt::TopSubtree) -> Result<(Symbol, Span), ExpandError> {
- let delimiter = tt.top_subtree().delimiter;
- tt.iter()
- .next()
- .ok_or(delimiter.open.cover(delimiter.close))
- .and_then(|tt| match tt {
+ let mut tt = TtElement::Subtree(tt.top_subtree(), tt.iter());
+ (|| {
+ // FIXME: We wrap expression fragments in parentheses which can break this expectation
+ // here
+ // Remove this once we handle none delims correctly
+ while let TtElement::Subtree(sub, tt_iter) = &mut tt
+ && let DelimiterKind::Parenthesis | DelimiterKind::Invisible = sub.delimiter.kind
+ {
+ tt =
+ tt_iter.exactly_one().map_err(|_| sub.delimiter.open.cover(sub.delimiter.close))?;
+ }
+
+ match tt {
TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
span,
@@ -698,35 +707,11 @@
kind: tt::LitKind::StrRaw(_),
suffix: _,
})) => Ok((text.clone(), *span)),
- // FIXME: We wrap expression fragments in parentheses which can break this expectation
- // here
- // Remove this once we handle none delims correctly
- TtElement::Subtree(tt, mut tt_iter)
- if tt.delimiter.kind == DelimiterKind::Parenthesis =>
- {
- tt_iter
- .next()
- .and_then(|tt| match tt {
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::Str,
- suffix: _,
- })) => Some((unescape_symbol(text), *span)),
- TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
- symbol: text,
- span,
- kind: tt::LitKind::StrRaw(_),
- suffix: _,
- })) => Some((text.clone(), *span)),
- _ => None,
- })
- .ok_or(delimiter.open.cover(delimiter.close))
- }
TtElement::Leaf(l) => Err(*l.span()),
TtElement::Subtree(tt, _) => Err(tt.delimiter.open.cover(tt.delimiter.close)),
- })
- .map_err(|span| ExpandError::other(span, "expected string literal"))
+ }
+ })()
+ .map_err(|span| ExpandError::other(span, "expected string literal"))
}
fn include_expand(
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 7e9928c..888c140 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -145,7 +145,7 @@
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
}
-#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext)]
+#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext, revisions = usize::MAX)]
pub struct SyntaxContextWrapper {
pub data: SyntaxContext,
}
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index a73a223..6730b33 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -315,11 +315,11 @@
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_with_macro_call_body(
+ pub fn original_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
- self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
+ self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_input(db)
}
pub fn original_syntax_node_rooted(
@@ -465,7 +465,7 @@
}
}
- pub fn original_node_file_range_with_macro_call_body(
+ pub fn original_node_file_range_with_macro_call_input(
self,
db: &dyn db::ExpandDatabase,
) -> FileRange {
@@ -476,7 +476,7 @@
Some(it) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file);
- loc.kind.original_call_range_with_body(db)
+ loc.kind.original_call_range_with_input(db)
}
}
}
@@ -497,6 +497,18 @@
}
}
}
+
+ pub fn original_node_file_range_rooted_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<FileRange> {
+ match self.file_id {
+ HirFileId::FileId(file_id) => Some(FileRange { file_id, range: self.value }),
+ HirFileId::MacroFile(mac_file) => {
+ map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value)
+ }
+ }
+ }
}
impl<N: AstNode> InFile<N> {
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 6ecac14..ac61b22 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -199,9 +199,9 @@
},
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
match db.proc_macros_for_crate(def_crate).as_ref().and_then(|it| it.get_error()) {
- Some((e, hard_err)) => RenderedExpandError {
- message: e.to_owned(),
- error: hard_err,
+ Some(e) => RenderedExpandError {
+ message: e.to_string(),
+ error: e.is_hard_error(),
kind: RenderedExpandError::GENERAL_KIND,
},
None => RenderedExpandError {
@@ -688,8 +688,11 @@
/// Returns the original file range that best describes the location of this macro call.
///
- /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
- pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange {
+ /// This spans the entire macro call, including its input. That is for
+ /// - fn_like! {}, it spans the path and token tree
+ /// - #\[derive], it spans the `#[derive(...)]` attribute and the annotated item
+ /// - #\[attr], it spans the `#[attr(...)]` attribute and the annotated item
+ pub fn original_call_range_with_input(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id() {
@@ -712,8 +715,8 @@
/// Returns the original file range that best describes the location of this macro call.
///
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
- /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
- /// get only the specific derive that is being referred to.
+ /// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
+ /// attribute's range, and derives get only the specific derive that is being referred to.
pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
@@ -726,7 +729,14 @@
};
let range = match kind {
- MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db);
+ node.path()
+ .unwrap()
+ .syntax()
+ .text_range()
+ .cover(node.excl_token().unwrap().text_range())
+ }
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
@@ -1056,7 +1066,7 @@
intern::impl_internable!(ModPath, attrs::AttrInput);
-#[salsa_macros::interned(no_lifetime, debug)]
+#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[doc(alias = "MacroFileId")]
pub struct MacroCallId {
pub loc: MacroCallLoc,
diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs
index 217d991..679f611 100644
--- a/crates/hir-expand/src/name.rs
+++ b/crates/hir-expand/src/name.rs
@@ -179,9 +179,10 @@
self.symbol.as_str()
}
+ #[inline]
pub fn display<'a>(
&'a self,
- db: &dyn crate::db::ExpandDatabase,
+ db: &dyn salsa::Database,
edition: Edition,
) -> impl fmt::Display + 'a {
_ = db;
diff --git a/crates/hir-expand/src/prettify_macro_expansion_.rs b/crates/hir-expand/src/prettify_macro_expansion_.rs
index 6134c3a..6431d46 100644
--- a/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -46,7 +46,7 @@
} else if let Some(crate_name) = ¯o_def_crate.extra_data(db).display_name {
make::tokens::ident(crate_name.crate_name().as_str())
} else {
- return dollar_crate.clone();
+ dollar_crate.clone()
}
});
if replacement.text() == "$crate" {
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index 1c8ebb6..f97d721 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -4,7 +4,7 @@
use std::any::Any;
use std::{panic::RefUnwindSafe, sync};
-use base_db::{Crate, CrateBuilderId, CratesIdMap, Env};
+use base_db::{Crate, CrateBuilderId, CratesIdMap, Env, ProcMacroLoadingError};
use intern::Symbol;
use rustc_hash::FxHashMap;
use span::Span;
@@ -53,8 +53,8 @@
System(String),
}
-pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, (String, bool)>;
-type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, (Box<str>, bool)>;
+pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, ProcMacroLoadingError>;
+type StoredProcMacroLoadResult = Result<Box<[ProcMacro]>, ProcMacroLoadingError>;
#[derive(Default, Debug)]
pub struct ProcMacrosBuilder(FxHashMap<CrateBuilderId, Arc<CrateProcMacros>>);
@@ -77,9 +77,7 @@
proc_macros_crate,
match proc_macro {
Ok(it) => Arc::new(CrateProcMacros(Ok(it.into_boxed_slice()))),
- Err((e, hard_err)) => {
- Arc::new(CrateProcMacros(Err((e.into_boxed_str(), hard_err))))
- }
+ Err(e) => Arc::new(CrateProcMacros(Err(e))),
},
);
}
@@ -139,8 +137,8 @@
)
}
- pub fn get_error(&self) -> Option<(&str, bool)> {
- self.0.as_ref().err().map(|(e, hard_err)| (&**e, *hard_err))
+ pub fn get_error(&self) -> Option<&ProcMacroLoadingError> {
+ self.0.as_ref().err()
}
/// Fetch the [`CustomProcMacroExpander`]s and their corresponding names for the given crate.
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 1029969..5d3be07 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -237,15 +237,6 @@
// Interned IDs for Chalk integration
#[salsa::interned]
- fn intern_type_or_const_param_id(
- &self,
- param_id: TypeOrConstParamId,
- ) -> InternedTypeOrConstParamId;
-
- #[salsa::interned]
- fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
-
- #[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
#[salsa::interned]
@@ -282,9 +273,8 @@
#[salsa::invoke(crate::variance::variances_of)]
#[salsa::cycle(
- // cycle_fn = crate::variance::variances_of_cycle_fn,
- // cycle_initial = crate::variance::variances_of_cycle_initial,
- cycle_result = crate::variance::variances_of_cycle_initial,
+ cycle_fn = crate::variance::variances_of_cycle_fn,
+ cycle_initial = crate::variance::variances_of_cycle_initial,
)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
@@ -329,9 +319,31 @@
fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
}
-impl_intern_key!(InternedTypeOrConstParamId, TypeOrConstParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedTypeOrConstParamId {
+ pub loc: TypeOrConstParamId,
+}
+impl ::std::fmt::Debug for InternedTypeOrConstParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedTypeOrConstParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
-impl_intern_key!(InternedLifetimeParamId, LifetimeParamId);
+#[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct InternedLifetimeParamId {
+ pub loc: LifetimeParamId,
+}
+impl ::std::fmt::Debug for InternedLifetimeParamId {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_tuple(stringify!(InternedLifetimeParamId))
+ .field(&format_args!("{:04x}", self.0.index()))
+ .finish()
+ }
+}
impl_intern_key!(InternedConstParamId, ConstParamId);
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 507bab2..810fe76 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -1432,10 +1432,10 @@
match f.closure_style {
ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"),
ClosureStyle::ClosureWithId => {
- return write!(f, "{{closure#{:?}}}", id.0.as_u32());
+ return write!(f, "{{closure#{:?}}}", id.0.index());
}
ClosureStyle::ClosureWithSubst => {
- write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
+ write!(f, "{{closure#{:?}}}", id.0.index())?;
return hir_fmt_generics(f, substs.as_slice(Interner), None, None);
}
_ => (),
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index 65a273c..c3029bf 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -1229,10 +1229,11 @@
self.select_from_expr(*expr);
}
}
- Expr::Let { pat: _, expr } => {
+ Expr::Let { pat, expr } => {
self.walk_expr(*expr);
- let place = self.place_of_expr(*expr);
- self.ref_expr(*expr, place);
+ if let Some(place) = self.place_of_expr(*expr) {
+ self.consume_with_pat(place, *pat);
+ }
}
Expr::UnaryOp { expr, op: _ }
| Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
diff --git a/crates/hir-ty/src/mapping.rs b/crates/hir-ty/src/mapping.rs
index 6936d81..9d3d204 100644
--- a/crates/hir-ty/src/mapping.rs
+++ b/crates/hir-ty/src/mapping.rs
@@ -13,7 +13,8 @@
use crate::{
AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId, Interner, OpaqueTyId,
- PlaceholderIndex, chalk_db, db::HirDatabase,
+ PlaceholderIndex, chalk_db,
+ db::{HirDatabase, InternedLifetimeParamId, InternedTypeOrConstParamId},
};
pub trait ToChalk {
@@ -125,30 +126,32 @@
pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_type_or_const_param_id(interned_id)
+ let interned_id =
+ InternedTypeOrConstParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> PlaceholderIndex {
- let interned_id = db.intern_type_or_const_param_id(id);
+ let interned_id = InternedTypeOrConstParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
// SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound.
- let interned_id = FromId::from_id(unsafe { Id::from_u32(idx.idx.try_into().unwrap()) });
- db.lookup_intern_lifetime_param_id(interned_id)
+ let interned_id =
+ InternedLifetimeParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) });
+ interned_id.loc(db)
}
pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> PlaceholderIndex {
- let interned_id = db.intern_lifetime_param_id(id);
+ let interned_id = InternedLifetimeParamId::new(db, id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
- idx: interned_id.as_id().as_u32() as usize,
+ idx: interned_id.as_id().index() as usize,
}
}
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index d049c67..b5de0e5 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -166,10 +166,10 @@
self.events.lock().unwrap().take().unwrap()
}
- pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> (Vec<String>, Vec<salsa::Event>) {
let events = self.log(f);
- events
- .into_iter()
+ let executed = events
+ .iter()
.filter_map(|e| match e.kind {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
@@ -181,6 +181,7 @@
}
_ => None,
})
- .collect()
+ .collect();
+ (executed, events)
}
}
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 7fb9817..dbc68ee 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -446,7 +446,7 @@
}
#[test]
-fn let_binding_is_a_ref_capture() {
+fn let_binding_is_a_ref_capture_in_ref_binding() {
check_closure_captures(
r#"
//- minicore:copy
@@ -454,12 +454,36 @@
fn main() {
let mut s = S;
let s_ref = &mut s;
+ let mut s2 = S;
+ let s_ref2 = &mut s2;
let closure = || {
if let ref cb = s_ref {
+ } else if let ref mut cb = s_ref2 {
}
};
}
"#,
- expect!["83..135;49..54;112..117 ByRef(Shared) s_ref &'? &'? mut S"],
+ expect![[r#"
+ 129..225;49..54;149..155 ByRef(Shared) s_ref &'? &'? mut S
+ 129..225;93..99;188..198 ByRef(Mut { kind: Default }) s_ref2 &'? mut &'? mut S"#]],
+ );
+}
+
+#[test]
+fn let_binding_is_a_value_capture_in_binding() {
+ check_closure_captures(
+ r#"
+//- minicore:copy, option
+struct Box(i32);
+fn main() {
+ let b = Some(Box(0));
+ let closure = || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ expect!["73..149;37..38;103..104 ByValue b Option<Box>"],
);
}
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 0377ce9..3159499 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -1,6 +1,7 @@
use base_db::SourceDatabase;
use expect_test::Expect;
use hir_def::{DefWithBodyId, ModuleDefId};
+use salsa::EventKind;
use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB};
@@ -567,11 +568,11 @@
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "query_with_diagnostics_",
+ "TraitItems::query_with_diagnostics_",
"body_shim",
"body_with_source_map_shim",
"attrs_shim",
- "of_",
+ "ImplItems::of_",
"infer_shim",
"trait_signature_shim",
"trait_signature_with_source_map_shim",
@@ -596,8 +597,8 @@
"struct_signature_with_source_map_shim",
"generic_predicates_shim",
"value_ty_shim",
- "firewall_",
- "query_",
+ "VariantFields::firewall_",
+ "VariantFields::query_",
"lang_item",
"inherent_impls_in_crate_shim",
"impl_signature_shim",
@@ -674,11 +675,11 @@
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
- "query_with_diagnostics_",
+ "TraitItems::query_with_diagnostics_",
"body_with_source_map_shim",
"attrs_shim",
"body_shim",
- "of_",
+ "ImplItems::of_",
"infer_shim",
"attrs_shim",
"trait_signature_with_source_map_shim",
@@ -697,7 +698,7 @@
"function_signature_with_source_map_shim",
"expr_scopes_shim",
"struct_signature_with_source_map_shim",
- "query_",
+ "VariantFields::query_",
"inherent_impls_in_crate_shim",
"impl_signature_with_source_map_shim",
"impl_signature_shim",
@@ -718,10 +719,23 @@
required: &[(&str, usize)],
expect: Expect,
) {
- let events = db.log_executed(f);
- for (event, count) in required {
- let n = events.iter().filter(|it| it.contains(event)).count();
- assert_eq!(n, *count, "Expected {event} to be executed {count} times, but only got {n}");
- }
- expect.assert_debug_eq(&events);
+ let (executed, events) = db.log_executed(f);
+ salsa::attach(db, || {
+ for (event, count) in required {
+ let n = executed.iter().filter(|it| it.contains(event)).count();
+ assert_eq!(
+ n,
+ *count,
+ "Expected {event} to be executed {count} times, but only got {n}:\n \
+ Executed: {executed:#?}\n \
+ Event log: {events:#?}",
+ events = events
+ .iter()
+ .filter(|event| !matches!(event.kind, EventKind::WillCheckCancellation))
+ .map(|event| { format!("{:?}", event.kind) })
+ .collect::<Vec<_>>(),
+ );
+ }
+ expect.assert_debug_eq(&executed);
+ });
}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index 43e8f37..b154e59 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -3751,7 +3751,7 @@
}
let v: bool = true;
m!();
- // ^^^^ i32
+ // ^^ i32
}
"#,
);
@@ -3765,39 +3765,39 @@
let v: bool;
macro_rules! m { () => { v } }
m!();
- // ^^^^ bool
+ // ^^ bool
let v: char;
macro_rules! m { () => { v } }
m!();
- // ^^^^ char
+ // ^^ char
{
let v: u8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u8
+ // ^^ u8
let v: i8;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i8
+ // ^^ i8
let v: i16;
macro_rules! m { () => { v } }
m!();
- // ^^^^ i16
+ // ^^ i16
{
let v: u32;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u32
+ // ^^ u32
let v: u64;
macro_rules! m { () => { v } }
m!();
- // ^^^^ u64
+ // ^^ u64
}
}
}
diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs
index 08a215f..87d9df6 100644
--- a/crates/hir-ty/src/variance.rs
+++ b/crates/hir-ty/src/variance.rs
@@ -54,14 +54,14 @@
variances.is_empty().not().then(|| Arc::from_iter(variances))
}
-// pub(crate) fn variances_of_cycle_fn(
-// _db: &dyn HirDatabase,
-// _result: &Option<Arc<[Variance]>>,
-// _count: u32,
-// _def: GenericDefId,
-// ) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
-// salsa::CycleRecoveryAction::Iterate
-// }
+pub(crate) fn variances_of_cycle_fn(
+ _db: &dyn HirDatabase,
+ _result: &Option<Arc<[Variance]>>,
+ _count: u32,
+ _def: GenericDefId,
+) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
+ salsa::CycleRecoveryAction::Iterate
+}
pub(crate) fn variances_of_cycle_initial(
db: &dyn HirDatabase,
@@ -965,7 +965,7 @@
struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
"#,
expect![[r#"
- FixedPoint[T: bivariant, U: bivariant, V: bivariant]
+ FixedPoint[T: covariant, U: covariant, V: covariant]
"#]],
);
}
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index aba2e03..c1e814e 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -36,16 +36,16 @@
};
macro_rules! diagnostics {
- ($($diag:ident $(<$lt:lifetime>)?,)*) => {
+ ($AnyDiagnostic:ident <$db:lifetime> -> $($diag:ident $(<$lt:lifetime>)?,)*) => {
#[derive(Debug)]
- pub enum AnyDiagnostic<'db> {$(
+ pub enum $AnyDiagnostic<$db> {$(
$diag(Box<$diag $(<$lt>)?>),
)*}
$(
- impl<'db> From<$diag $(<$lt>)?> for AnyDiagnostic<'db> {
- fn from(d: $diag $(<$lt>)?) -> AnyDiagnostic<'db> {
- AnyDiagnostic::$diag(Box::new(d))
+ impl<$db> From<$diag $(<$lt>)?> for $AnyDiagnostic<$db> {
+ fn from(d: $diag $(<$lt>)?) -> $AnyDiagnostic<$db> {
+ $AnyDiagnostic::$diag(Box::new(d))
}
}
)*
@@ -66,7 +66,7 @@
// }, ...
// ]
-diagnostics![
+diagnostics![AnyDiagnostic<'db> ->
AwaitOutsideOfAsync,
BreakOutsideOfLoop,
CastToUnsized<'db>,
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index f18ca7c..cbd472f 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -765,7 +765,8 @@
},
};
- let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, Some(store))?;
+ let body_owner = self.resolver.body_owner();
+ let res = resolve_hir_value_path(db, &self.resolver, body_owner, path, HygieneId::ROOT)?;
match res {
PathResolution::Def(def) => Some(def),
_ => None,
diff --git a/crates/ide-assists/src/handlers/expand_rest_pattern.rs b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
index b71de5e..c80b78f 100644
--- a/crates/ide-assists/src/handlers/expand_rest_pattern.rs
+++ b/crates/ide-assists/src/handlers/expand_rest_pattern.rs
@@ -175,7 +175,7 @@
// ast::TuplePat(it) => (),
// FIXME
// ast::SlicePat(it) => (),
- _ => return None,
+ _ => None,
}
}
}
diff --git a/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
index 5aedff5..609ef34 100644
--- a/crates/ide-assists/src/handlers/unmerge_match_arm.rs
+++ b/crates/ide-assists/src/handlers/unmerge_match_arm.rs
@@ -1,8 +1,8 @@
use syntax::{
Direction, SyntaxKind, T,
algo::neighbor,
- ast::{self, AstNode, edit::IndentLevel, make},
- ted::{self, Position},
+ ast::{self, AstNode, edit::IndentLevel, syntax_factory::SyntaxFactory},
+ syntax_editor::{Element, Position},
};
use crate::{AssistContext, AssistId, Assists};
@@ -33,7 +33,7 @@
// ```
pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let pipe_token = ctx.find_token_syntax_at_offset(T![|])?;
- let or_pat = ast::OrPat::cast(pipe_token.parent()?)?.clone_for_update();
+ let or_pat = ast::OrPat::cast(pipe_token.parent()?)?;
if or_pat.leading_pipe().is_some_and(|it| it == pipe_token) {
return None;
}
@@ -44,13 +44,14 @@
// without `OrPat`.
let new_parent = match_arm.syntax().parent()?;
- let old_parent_range = new_parent.text_range();
acc.add(
AssistId::refactor_rewrite("unmerge_match_arm"),
"Unmerge match arm",
pipe_token.text_range(),
|edit| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = edit.make_editor(&new_parent);
let pats_after = pipe_token
.siblings_with_tokens(Direction::Next)
.filter_map(|it| ast::Pat::cast(it.into_node()?))
@@ -59,11 +60,9 @@
let new_pat = if pats_after.len() == 1 {
pats_after[0].clone()
} else {
- make::or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
+ make.or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
};
- let new_match_arm =
- make::match_arm(new_pat, match_arm.guard(), match_arm_body).clone_for_update();
-
+ let new_match_arm = make.match_arm(new_pat, match_arm.guard(), match_arm_body);
let mut pipe_index = pipe_token.index();
if pipe_token
.prev_sibling_or_token()
@@ -71,10 +70,13 @@
{
pipe_index -= 1;
}
- or_pat.syntax().splice_children(
- pipe_index..or_pat.syntax().children_with_tokens().count(),
- Vec::new(),
- );
+ for child in or_pat
+ .syntax()
+ .children_with_tokens()
+ .skip_while(|child| child.index() < pipe_index)
+ {
+ editor.delete(child.syntax_element());
+ }
let mut insert_after_old_arm = Vec::new();
@@ -95,24 +97,20 @@
== Some(T![,]);
let has_arms_after = neighbor(&match_arm, Direction::Next).is_some();
if !has_comma_after && !has_arms_after {
- insert_after_old_arm.push(make::token(T![,]).into());
+ insert_after_old_arm.push(make.token(T![,]).into());
}
let indent = IndentLevel::from_node(match_arm.syntax());
- insert_after_old_arm.push(make::tokens::whitespace(&format!("\n{indent}")).into());
+ insert_after_old_arm.push(make.whitespace(&format!("\n{indent}")).into());
insert_after_old_arm.push(new_match_arm.syntax().clone().into());
- ted::insert_all_raw(Position::after(match_arm.syntax()), insert_after_old_arm);
-
if has_comma_after {
- ted::insert_raw(
- Position::last_child_of(new_match_arm.syntax()),
- make::token(T![,]),
- );
+ insert_after_old_arm.push(make.token(T![,]).into());
}
-
- edit.replace(old_parent_range, new_parent.to_string());
+ editor.insert_all(Position::after(match_arm.syntax()), insert_after_old_arm);
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index 1a91053..87a4c2e 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -1,5 +1,7 @@
//! Assorted functions shared by several assists.
+use std::slice;
+
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
use hir::{
DisplayTarget, HasAttrs as HirHasAttrs, HirDisplay, InFile, ModuleDef, PathResolution,
@@ -912,7 +914,7 @@
) -> Option<(ReferenceConversionType, bool)> {
let str_type = hir::BuiltinType::str().ty(db);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type.clone()])
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&str_type))
.then_some((ReferenceConversionType::AsRefStr, could_deref_to_target(ty, &str_type, db)))
}
@@ -924,7 +926,7 @@
let type_argument = ty.type_arguments().next()?;
let slice_type = hir::Type::new_slice(type_argument);
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type.clone()]).then_some((
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&slice_type)).then_some((
ReferenceConversionType::AsRefSlice,
could_deref_to_target(ty, &slice_type, db),
))
@@ -937,10 +939,11 @@
) -> Option<(ReferenceConversionType, bool)> {
let type_argument = ty.type_arguments().next()?;
- ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument.clone()]).then_some((
- ReferenceConversionType::Dereferenced,
- could_deref_to_target(ty, &type_argument, db),
- ))
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, slice::from_ref(&type_argument))
+ .then_some((
+ ReferenceConversionType::Dereferenced,
+ could_deref_to_target(ty, &type_argument, db),
+ ))
}
fn handle_option_as_ref(
diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs
index 411902f..46a3630 100644
--- a/crates/ide-completion/src/tests/attribute.rs
+++ b/crates/ide-completion/src/tests/attribute.rs
@@ -878,6 +878,7 @@
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialEq macro PartialEq
de PartialEq, Eq
@@ -900,6 +901,7 @@
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -921,6 +923,7 @@
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de Eq
de Eq, PartialOrd, Ord
@@ -942,6 +945,7 @@
expect![[r#"
de Clone macro Clone
de Clone, Copy
+ de Debug macro Debug
de Default macro Default
de PartialOrd
de PartialOrd, Ord
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 5356614..e661857 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -272,5 +272,5 @@
.display_name
.as_deref()
.cloned()
- .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).as_u32() as usize))
+ .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).index() as usize))
}
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 7d460f7..4efb83b 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -317,7 +317,7 @@
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -332,7 +332,7 @@
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -341,7 +341,7 @@
if let Definition::SelfType(impl_) = self {
return match impl_.source(db).map(|src| src.syntax().cloned()) {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
@@ -360,7 +360,7 @@
};
return match def {
Some(def) => SearchScope::file_range(
- def.as_ref().original_file_range_with_macro_call_body(db),
+ def.as_ref().original_file_range_with_macro_call_input(db),
),
None => SearchScope::single_file(file_id),
};
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index 546512a..c39e00e 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -242,8 +242,8 @@
fn f() {
outer!();
-} //^^^^^^^^ error: leftover tokens
- //^^^^^^^^ error: Syntax Error in Expansion: expected expression
+} //^^^^^^ error: leftover tokens
+ //^^^^^^ error: Syntax Error in Expansion: expected expression
"#,
)
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 8a5d82b..7da799e 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -66,7 +66,7 @@
let current_module =
ctx.sema.scope(d.field_list_parent.to_node(&root).syntax()).map(|it| it.module());
let range = InFile::new(d.file, d.field_list_parent.text_range())
- .original_node_file_range_rooted(ctx.sema.db);
+ .original_node_file_range_rooted_opt(ctx.sema.db)?;
let build_text_edit = |new_syntax: &SyntaxNode, old_syntax| {
let edit = {
diff --git a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 0928262..1e80d02 100644
--- a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -239,4 +239,22 @@
"#,
)
}
+
+ #[test]
+ fn regression_20155() {
+ check_diagnostics(
+ r#"
+//- minicore: copy, option
+struct Box(i32);
+fn test() {
+ let b = Some(Box(0));
+ || {
+ if let Some(b) = b {
+ let _move = b;
+ }
+ };
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index 4327b12..fc2648e 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -77,6 +77,7 @@
redundant_item_def: String,
range: TextRange,
) -> Option<Vec<Assist>> {
+ let file_id = d.file_id.file_id()?;
let add_assoc_item_def = |builder: &mut SourceChangeBuilder| -> Option<()> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.file_id);
@@ -90,12 +91,14 @@
let trait_def = d.trait_.source(db)?.value;
let l_curly = trait_def.assoc_item_list()?.l_curly_token()?.text_range();
let where_to_insert =
- hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted(db).range;
+ hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted_opt(db)?;
+ if where_to_insert.file_id != file_id {
+ return None;
+ }
- builder.insert(where_to_insert.end(), redundant_item_def);
+ builder.insert(where_to_insert.range.end(), redundant_item_def);
Some(())
};
- let file_id = d.file_id.file_id()?;
let mut source_change_builder = SourceChangeBuilder::new(file_id.file_id(ctx.sema.db));
add_assoc_item_def(&mut source_change_builder)?;
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 1f2d671..dcca85d 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -120,8 +120,7 @@
let call = ast::MethodCallExpr::cast(expr.syntax().clone())?;
let range = InFile::new(expr_ptr.file_id, call.syntax().text_range())
- .original_node_file_range_rooted(db)
- .range;
+ .original_node_file_range_rooted_opt(db)?;
let receiver = call.receiver()?;
let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original;
@@ -174,18 +173,16 @@
let assoc_func_call_expr_string = make::expr_call(assoc_func_path, args).to_string();
- let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id;
-
Some(Assist {
id: AssistId::quick_fix("method_call_to_assoc_func_call_fix"),
label: Label::new(format!(
"Use associated func call instead: `{assoc_func_call_expr_string}`"
)),
group: None,
- target: range,
+ target: range.range,
source_change: Some(SourceChange::from_text_edit(
- file_id.file_id(ctx.sema.db),
- TextEdit::replace(range, assoc_func_call_expr_string),
+ range.file_id.file_id(ctx.sema.db),
+ TextEdit::replace(range.range, assoc_func_call_expr_string),
)),
command: None,
})
@@ -300,7 +297,7 @@
}
fn main() {
m!(());
- // ^^^^^^ error: no method `foo` on type `()`
+ // ^^ error: no method `foo` on type `()`
}
"#,
);
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs
index 4b8d07a..7a04059 100644
--- a/crates/ide/src/call_hierarchy.rs
+++ b/crates/ide/src/call_hierarchy.rs
@@ -592,7 +592,7 @@
"#,
expect!["callee Function FileId(0) 22..37 30..36"],
expect![[r#"
- caller Function FileId(0) 38..52 : FileId(0):44..50
+ caller Function FileId(0) 38..43 : FileId(0):44..50
caller Function FileId(1) 130..136 130..136 : FileId(0):44..50
callee Function FileId(0) 38..52 44..50 : FileId(0):44..50"#]],
expect![[]],
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index 2c98328..f58202a 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -60,7 +60,7 @@
let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb))
.into_offset_iter();
- let doc = map_links(doc, |target, title, range| {
+ let doc = map_links(doc, |target, title, range, link_type| {
// This check is imperfect, there's some overlap between valid intra-doc links
// and valid URLs so we choose to be too eager to try to resolve what might be
// a URL.
@@ -78,7 +78,7 @@
.map(|(_, attr_id)| attr_id.is_inner_attr())
.unwrap_or(false);
if let Some((target, title)) =
- rewrite_intra_doc_link(db, definition, target, title, is_inner_doc)
+ rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type)
{
(None, target, title)
} else if let Some(target) = rewrite_url_link(db, definition, target) {
@@ -417,6 +417,7 @@
target: &str,
title: &str,
is_inner_doc: bool,
+ link_type: LinkType,
) -> Option<(String, String)> {
let (link, ns) = parse_intra_doc_link(target);
@@ -438,7 +439,21 @@
url = url.join(&file).ok()?;
url.set_fragment(frag);
- Some((url.into(), strip_prefixes_suffixes(title).to_owned()))
+ // We want to strip the keyword prefix from the title, but only if the target is implicitly the same
+ // as the title.
+ let title = match link_type {
+ LinkType::Email
+ | LinkType::Autolink
+ | LinkType::Shortcut
+ | LinkType::Collapsed
+ | LinkType::Reference
+ | LinkType::Inline => title.to_owned(),
+ LinkType::ShortcutUnknown | LinkType::CollapsedUnknown | LinkType::ReferenceUnknown => {
+ strip_prefixes_suffixes(title).to_owned()
+ }
+ };
+
+ Some((url.into(), title))
}
/// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`).
@@ -470,7 +485,7 @@
/// Rewrites a markdown document, applying 'callback' to each link.
fn map_links<'e>(
events: impl Iterator<Item = (Event<'e>, Range<usize>)>,
- callback: impl Fn(&str, &str, Range<usize>) -> (Option<LinkType>, String, String),
+ callback: impl Fn(&str, &str, Range<usize>, LinkType) -> (Option<LinkType>, String, String),
) -> impl Iterator<Item = Event<'e>> {
let mut in_link = false;
// holds the origin link target on start event and the rewritten one on end event
@@ -497,7 +512,7 @@
}
Event::Text(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
end_link_type = link_type;
@@ -506,7 +521,7 @@
}
Event::Code(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s, range);
+ callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
end_link_type = link_type;
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index fd465f3..29fc68b 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -1082,7 +1082,7 @@
}
define_fn!();
-//^^^^^^^^^^^^^
+//^^^^^^^^^^
fn bar() {
$0foo();
}
@@ -3228,7 +3228,7 @@
use crate::m;
m!();
- // ^^^^^
+ // ^^
fn qux() {
Foo$0;
@@ -3851,4 +3851,76 @@
"#,
);
}
+
+ #[test]
+ fn goto_const_from_match_pat_with_tuple_struct() {
+ check(
+ r#"
+struct Tag(u8);
+struct Path {}
+
+const Path: u8 = 0;
+ // ^^^^
+fn main() {
+ match Tag(Path) {
+ Tag(Path$0) => {}
+ _ => {}
+ }
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_const_from_match_pat() {
+ check(
+ r#"
+type T1 = u8;
+const T1: u8 = 0;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_struct_from_match_pat() {
+ check(
+ r#"
+struct T1;
+ // ^^
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ _ => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_goto_trait_from_match_pat() {
+ check(
+ r#"
+trait T1 {}
+fn main() {
+ let x = 0;
+ match x {
+ T1$0 => {}
+ // ^^
+ _ => {}
+ }
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index a281a49..f63499a 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -10927,3 +10927,34 @@
"#]],
);
}
+
+#[test]
+fn keyword_inside_link() {
+ check(
+ r#"
+enum Foo {
+ MacroExpansion,
+}
+
+/// I return a [macro expansion](Foo::MacroExpansion).
+fn bar$0() -> Foo {
+ Foo::MacroExpansion
+}
+ "#,
+ expect.
+ "#]],
+ );
+}
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 4c7c597..7dc1814 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -844,7 +844,7 @@
// *should* contain the name
_ => {
let kind = call_kind();
- let range = kind.clone().original_call_range_with_body(db);
+ let range = kind.clone().original_call_range_with_input(db);
//If the focus range is in the attribute/derive body, we
// need to point the call site to the entire body, if not, fall back
// to the name range of the attribute/derive call
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index f48150b..9d1a5ba 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -351,7 +351,7 @@
)
.call_site();
- let file_range = fn_source.syntax().original_file_range_with_macro_call_body(sema.db);
+ let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &fn_source.file_syntax(sema.db), file_range);
@@ -425,7 +425,7 @@
let impl_source = sema.source(*def)?;
let impl_syntax = impl_source.syntax();
- let file_range = impl_syntax.original_file_range_with_macro_call_body(sema.db);
+ let file_range = impl_syntax.original_file_range_with_macro_call_input(sema.db);
let update_test =
UpdateTest::find_snapshot_macro(sema, &impl_syntax.file_syntax(sema.db), file_range);
@@ -1241,10 +1241,10 @@
[
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..345, name: \"\", kind: Module })",
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 282..312, focus_range: 286..291, name: \"tests\", kind: Module, description: \"mod tests\" })",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..310, name: \"foo_test\", kind: Function })",
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..326, name: \"foo_test2\", kind: Function }, true)",
- "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..344, name: \"main\", kind: Function })",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 298..307, name: \"foo_test\", kind: Function })",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 313..323, name: \"foo_test2\", kind: Function }, true)",
+ "(Bin, NavigationTarget { file_id: FileId(0), full_range: 327..341, name: \"main\", kind: Function })",
]
"#]],
);
@@ -1272,10 +1272,10 @@
"#,
expect![[r#"
[
- "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo0\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo1\", kind: Function }, true)",
- "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo2\", kind: Function }, true)",
+ "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo0\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo1\", kind: Function }, true)",
+ "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..214, name: \"foo2\", kind: Function }, true)",
]
"#]],
);
diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs
index 7985279..25deffe 100644
--- a/crates/ide/src/view_crate_graph.rs
+++ b/crates/ide/src/view_crate_graph.rs
@@ -79,7 +79,7 @@
}
fn node_id(&'a self, n: &Crate) -> Id<'a> {
- let id = n.as_id().as_u32();
+ let id = n.as_id().index();
Id::new(format!("_{id:?}")).unwrap()
}
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 52f5967..26ee698 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -11,7 +11,7 @@
};
use ide_db::{
ChangeWithProcMacros, FxHashMap, RootDatabase,
- base_db::{CrateGraphBuilder, Env, SourceRoot, SourceRootId},
+ base_db::{CrateGraphBuilder, Env, ProcMacroLoadingError, SourceRoot, SourceRootId},
prime_caches,
};
use itertools::Itertools;
@@ -69,6 +69,23 @@
extra_env: &FxHashMap<String, Option<String>>,
load_config: &LoadCargoConfig,
) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroClient>)> {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
+ let mut db = RootDatabase::new(lru_cap);
+
+ let (vfs, proc_macro_server) = load_workspace_into_db(ws, extra_env, load_config, &mut db)?;
+
+ Ok((db, vfs, proc_macro_server))
+}
+
+// This variant of `load_workspace` allows deferring the loading of rust-analyzer
+// into an existing database, which is useful in certain third-party scenarios,
+// now that `salsa` supports extending foreign databases (e.g. `RootDatabase`).
+pub fn load_workspace_into_db(
+ ws: ProjectWorkspace,
+ extra_env: &FxHashMap<String, Option<String>>,
+ load_config: &LoadCargoConfig,
+ db: &mut RootDatabase,
+) -> anyhow::Result<(vfs::Vfs, Option<ProcMacroClient>)> {
let (sender, receiver) = unbounded();
let mut vfs = vfs::Vfs::default();
let mut loader = {
@@ -78,23 +95,27 @@
tracing::debug!(?load_config, "LoadCargoConfig");
let proc_macro_server = match &load_config.with_proc_macro_server {
- ProcMacroServerChoice::Sysroot => ws
- .find_sysroot_proc_macro_srv()
- .and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into))
- .map_err(|e| (e, true)),
+ ProcMacroServerChoice::Sysroot => ws.find_sysroot_proc_macro_srv().map(|it| {
+ it.and_then(|it| ProcMacroClient::spawn(&it, extra_env).map_err(Into::into)).map_err(
+ |e| ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()),
+ )
+ }),
ProcMacroServerChoice::Explicit(path) => {
- ProcMacroClient::spawn(path, extra_env).map_err(Into::into).map_err(|e| (e, true))
+ Some(ProcMacroClient::spawn(path, extra_env).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str())
+ }))
}
- ProcMacroServerChoice::None => {
- Err((anyhow::format_err!("proc macro server disabled"), false))
- }
+ ProcMacroServerChoice::None => Some(Err(ProcMacroLoadingError::Disabled)),
};
match &proc_macro_server {
- Ok(server) => {
- tracing::info!(path=%server.server_path(), "Proc-macro server started")
+ Some(Ok(server)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), path=%server.server_path(), "Proc-macro server started")
}
- Err((e, _)) => {
- tracing::info!(%e, "Failed to start proc-macro server")
+ Some(Err(e)) => {
+ tracing::info!(manifest=%ws.manifest_or_root(), %e, "Failed to start proc-macro server")
+ }
+ None => {
+ tracing::info!(manifest=%ws.manifest_or_root(), "No proc-macro server started")
}
}
@@ -111,22 +132,24 @@
);
let proc_macros = {
let proc_macro_server = match &proc_macro_server {
- Ok(it) => Ok(it),
- Err((e, hard_err)) => Err((e.to_string(), *hard_err)),
+ Some(Ok(it)) => Ok(it),
+ Some(Err(e)) => {
+ Err(ProcMacroLoadingError::ProcMacroSrvError(e.to_string().into_boxed_str()))
+ }
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running, workspace is missing a sysroot".into(),
+ )),
};
proc_macros
.into_iter()
.map(|(crate_id, path)| {
(
crate_id,
- path.map_or_else(
- |e| Err((e, true)),
- |(_, path)| {
- proc_macro_server.as_ref().map_err(Clone::clone).and_then(
- |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
- )
- },
- ),
+ path.map_or_else(Err, |(_, path)| {
+ proc_macro_server.as_ref().map_err(Clone::clone).and_then(
+ |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
+ )
+ }),
)
})
.collect()
@@ -139,18 +162,20 @@
version: 0,
});
- let db = load_crate_graph(
+ load_crate_graph_into_db(
crate_graph,
proc_macros,
project_folders.source_root_config,
&mut vfs,
&receiver,
+ db,
);
if load_config.prefill_caches {
- prime_caches::parallel_prime_caches(&db, 1, &|_| ());
+ prime_caches::parallel_prime_caches(db, 1, &|_| ());
}
- Ok((db, vfs, proc_macro_server.ok()))
+
+ Ok((vfs, proc_macro_server.and_then(Result::ok)))
}
#[derive(Default)]
@@ -391,11 +416,13 @@
path: &AbsPath,
ignored_macros: &[Box<str>],
) -> ProcMacroLoadResult {
- let res: Result<Vec<_>, String> = (|| {
+ let res: Result<Vec<_>, _> = (|| {
let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ let vec = server.load_dylib(dylib).map_err(|e| {
+ ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
+ })?;
if vec.is_empty() {
- return Err("proc macro library returned no proc macros".to_owned());
+ return Err(ProcMacroLoadingError::NoProcMacros);
}
Ok(vec
.into_iter()
@@ -412,20 +439,19 @@
}
Err(e) => {
tracing::warn!("proc-macro loading for {path} failed: {e}");
- Err((e, true))
+ Err(e)
}
}
}
-fn load_crate_graph(
+fn load_crate_graph_into_db(
crate_graph: CrateGraphBuilder,
proc_macros: ProcMacrosBuilder,
source_root_config: SourceRootConfig,
vfs: &mut vfs::Vfs,
receiver: &Receiver<vfs::loader::Message>,
-) -> RootDatabase {
- let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
- let mut db = RootDatabase::new(lru_cap);
+ db: &mut RootDatabase,
+) {
let mut analysis_change = ChangeWithProcMacros::default();
db.enable_proc_attr_macros();
@@ -462,7 +488,6 @@
analysis_change.set_proc_macros(proc_macros);
db.apply_change(analysis_change);
- db
}
fn expander_to_proc_macro(
diff --git a/crates/project-model/src/build_dependencies.rs b/crates/project-model/src/build_dependencies.rs
index bbaa8f4..adbaa8e 100644
--- a/crates/project-model/src/build_dependencies.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -409,13 +409,6 @@
cmd.arg("--target-dir").arg(target_dir);
}
- // --all-targets includes tests, benches and examples in addition to the
- // default lib and bins. This is an independent concept from the --target
- // flag below.
- if config.all_targets {
- cmd.arg("--all-targets");
- }
-
if let Some(target) = &config.target {
cmd.args(["--target", target]);
}
@@ -463,14 +456,26 @@
cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
cmd.arg("-Zunstable-options");
cmd.arg("--compile-time-deps");
- } else if config.wrap_rustc_in_build_scripts {
- // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
- // that to compile only proc macros and build scripts during the initial
- // `cargo check`.
- // We don't need this if we are using `--compile-time-deps` flag.
- let myself = std::env::current_exe()?;
- cmd.env("RUSTC_WRAPPER", myself);
- cmd.env("RA_RUSTC_WRAPPER", "1");
+ // we can pass this unconditionally, because we won't actually build the
+ // binaries, and as such, this will succeed even on targets without libtest
+ cmd.arg("--all-targets");
+ } else {
+ // --all-targets includes tests, benches and examples in addition to the
+ // default lib and bins. This is an independent concept from the --target
+ // flag below.
+ if config.all_targets {
+ cmd.arg("--all-targets");
+ }
+
+ if config.wrap_rustc_in_build_scripts {
+ // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
+ // that to compile only proc macros and build scripts during the initial
+ // `cargo check`.
+ // We don't need this if we are using `--compile-time-deps` flag.
+ let myself = std::env::current_exe()?;
+ cmd.env("RUSTC_WRAPPER", myself);
+ cmd.env("RA_RUSTC_WRAPPER", "1");
+ }
}
Ok(cmd)
}
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 5a6c5b4..9f19260 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -163,18 +163,18 @@
}
}
- pub fn discover_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
- let Some(root) = self.root() else {
- return Err(anyhow::format_err!("no sysroot",));
- };
- ["libexec", "lib"]
- .into_iter()
- .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
- .find_map(|server_path| probe_for_binary(server_path.into()))
- .map(AbsPathBuf::assert)
- .ok_or_else(|| {
- anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
- })
+ pub fn discover_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
+ let root = self.root()?;
+ Some(
+ ["libexec", "lib"]
+ .into_iter()
+ .map(|segment| root.join(segment).join("rust-analyzer-proc-macro-srv"))
+ .find_map(|server_path| probe_for_binary(server_path.into()))
+ .map(AbsPathBuf::assert)
+ .ok_or_else(|| {
+ anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", root)
+ }),
+ )
}
fn assemble(
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index c5b36f1..43db84b 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -7,8 +7,8 @@
use anyhow::Context;
use base_db::{
CrateBuilderId, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin,
- CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroPaths,
- TargetLayoutLoadResult,
+ CrateWorkspaceData, DependencyBuilder, Env, LangCrateOrigin, ProcMacroLoadingError,
+ ProcMacroPaths, TargetLayoutLoadResult,
};
use cfg::{CfgAtom, CfgDiff, CfgOptions};
use intern::{Symbol, sym};
@@ -748,7 +748,7 @@
}
}
- pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
+ pub fn find_sysroot_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
self.sysroot.discover_proc_macro_srv()
}
@@ -1645,11 +1645,11 @@
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
match proc_macro_dylib_path {
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
- None if has_errors => Err("failed to build proc-macro".to_owned()),
- None => Err("proc-macro crate build data is missing dylib path".to_owned()),
+ None if has_errors => Err(ProcMacroLoadingError::FailedToBuild),
+ None => Err(ProcMacroLoadingError::MissingDylibPath),
}
}
- None => Err("build scripts have not been built".to_owned()),
+ None => Err(ProcMacroLoadingError::NotYetBuilt),
};
proc_macros.insert(crate_id, proc_macro);
}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index e716d14..51d4c29 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -1526,7 +1526,7 @@
CompletionConfig {
enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
- && self.caps.completion_item_edit_resolve(),
+ && self.caps.has_completion_item_resolve_additionalTextEdits(),
enable_self_on_the_fly: self.completion_autoself_enable(source_root).to_owned(),
enable_auto_iter: *self.completion_autoIter_enable(source_root),
enable_auto_await: *self.completion_autoAwait_enable(source_root),
@@ -2355,10 +2355,6 @@
.and_then(|it| it.version.as_ref())
}
- pub fn client_is_helix(&self) -> bool {
- self.client_info.as_ref().map(|it| it.name == "helix").unwrap_or_default()
- }
-
pub fn client_is_neovim(&self) -> bool {
self.client_info.as_ref().map(|it| it.name == "Neovim").unwrap_or_default()
}
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index a870232..62a28a1 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -101,7 +101,7 @@
pub(crate) last_reported_status: lsp_ext::ServerStatusParams,
// proc macros
- pub(crate) proc_macro_clients: Arc<[anyhow::Result<ProcMacroClient>]>,
+ pub(crate) proc_macro_clients: Arc<[Option<anyhow::Result<ProcMacroClient>>]>,
pub(crate) build_deps_changed: bool,
// Flycheck
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index 40d0556..aea116e 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -6,7 +6,7 @@
use ide_db::base_db::{
DbPanicContext,
- salsa::{self, Cancelled, UnexpectedCycle},
+ salsa::{self, Cancelled},
};
use lsp_server::{ExtractError, Response, ResponseError};
use serde::{Serialize, de::DeserializeOwned};
@@ -350,9 +350,6 @@
if let Some(panic_message) = panic_message {
message.push_str(": ");
message.push_str(panic_message);
- } else if let Some(cycle) = panic.downcast_ref::<UnexpectedCycle>() {
- tracing::error!("{cycle}");
- message.push_str(": unexpected cycle");
} else if let Ok(cancelled) = panic.downcast::<Cancelled>() {
tracing::error!("Cancellation propagated out of salsa! This is a bug");
return Err(HandlerCancelledError::Inner(*cancelled));
diff --git a/crates/rust-analyzer/src/lsp/capabilities.rs b/crates/rust-analyzer/src/lsp/capabilities.rs
index 04e31f3..f94e748 100644
--- a/crates/rust-analyzer/src/lsp/capabilities.rs
+++ b/crates/rust-analyzer/src/lsp/capabilities.rs
@@ -42,7 +42,7 @@
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions {
resolve_provider: if config.client_is_neovim() {
- config.completion_item_edit_resolve().then_some(true)
+ config.has_completion_item_resolve_additionalTextEdits().then_some(true)
} else {
Some(config.caps().completions_resolve_provider())
},
@@ -207,8 +207,8 @@
serde_json::from_value(self.0.experimental.as_ref()?.get(index)?.clone()).ok()
}
- /// Parses client capabilities and returns all completion resolve capabilities rust-analyzer supports.
- pub fn completion_item_edit_resolve(&self) -> bool {
+ #[allow(non_snake_case)]
+ pub fn has_completion_item_resolve_additionalTextEdits(&self) -> bool {
(|| {
Some(
self.0
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 133d5a6..e798aa6 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -18,7 +18,7 @@
use hir::{ChangeWithProcMacros, ProcMacrosBuilder, db::DefDatabase};
use ide_db::{
FxHashMap,
- base_db::{CrateGraphBuilder, ProcMacroPaths, salsa::Durability},
+ base_db::{CrateGraphBuilder, ProcMacroLoadingError, ProcMacroPaths, salsa::Durability},
};
use itertools::Itertools;
use load_cargo::{ProjectFolders, load_proc_macro};
@@ -194,8 +194,7 @@
format_to!(message, "{e}");
});
- let proc_macro_clients =
- self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None));
+ let proc_macro_clients = self.proc_macro_clients.iter().chain(iter::repeat(&None));
for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) {
if let ProjectWorkspaceKind::Cargo { error: Some(error), .. }
@@ -252,7 +251,8 @@
message.push_str("\n\n");
}
}
- _ => (),
+ // sysroot was explicitly not set so we didn't discover a server
+ None => {}
}
}
}
@@ -419,14 +419,11 @@
};
let mut builder = ProcMacrosBuilder::default();
- let proc_macro_clients = proc_macro_clients
- .iter()
- .map(|res| res.as_ref().map_err(|e| e.to_string()))
- .chain(iter::repeat_with(|| Err("proc-macro-srv is not running".into())));
+ let proc_macro_clients = proc_macro_clients.iter().chain(iter::repeat(&None));
for (client, paths) in proc_macro_clients.zip(paths) {
for (crate_id, res) in paths.iter() {
let expansion_res = match client {
- Ok(client) => match res {
+ Some(Ok(client)) => match res {
Ok((crate_name, path)) => {
progress(format!("loading proc-macros: {path}"));
let ignored_proc_macros = ignored_proc_macros
@@ -438,9 +435,14 @@
load_proc_macro(client, path, ignored_proc_macros)
}
- Err(e) => Err((e.clone(), true)),
+ Err(e) => Err(e.clone()),
},
- Err(ref e) => Err((e.clone(), true)),
+ Some(Err(e)) => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ e.to_string().into_boxed_str(),
+ )),
+ None => Err(ProcMacroLoadingError::ProcMacroSrvError(
+ "proc-macro-srv is not running".into(),
+ )),
};
builder.insert(*crate_id, expansion_res)
}
@@ -655,7 +657,10 @@
self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
let path = match self.config.proc_macro_srv() {
Some(path) => path,
- None => ws.find_sysroot_proc_macro_srv()?,
+ None => match ws.find_sysroot_proc_macro_srv()? {
+ Ok(path) => path,
+ Err(e) => return Some(Err(e)),
+ },
};
let env: FxHashMap<_, _> = match &ws.kind {
@@ -682,14 +687,14 @@
};
info!("Using proc-macro server at {path}");
- ProcMacroClient::spawn(&path, &env).map_err(|err| {
+ Some(ProcMacroClient::spawn(&path, &env).map_err(|err| {
tracing::error!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
);
anyhow::format_err!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
)
- })
+ }))
}))
}
@@ -753,14 +758,14 @@
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro has not been built yet".to_owned(), true))))
+ .map(|id| (id, Err(ProcMacroLoadingError::NotYetBuilt)))
.collect(),
);
} else {
change.set_proc_macros(
crate_graph
.iter()
- .map(|id| (id, Err(("proc-macro expansion is disabled".to_owned(), false))))
+ .map(|id| (id, Err(ProcMacroLoadingError::Disabled)))
.collect(),
);
}
diff --git a/crates/span/src/ast_id.rs b/crates/span/src/ast_id.rs
index 8e95971..121d2e3 100644
--- a/crates/span/src/ast_id.rs
+++ b/crates/span/src/ast_id.rs
@@ -107,9 +107,10 @@
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
+#[repr(u8)]
enum ErasedFileAstIdKind {
/// This needs to not change because it's depended upon by the proc macro server.
- Fixup,
+ Fixup = 0,
// The following are associated with `ErasedHasNameFileAstId`.
Enum,
Struct,
@@ -413,9 +414,9 @@
}
macro_rules! register_enum_ast_id {
- (impl AstIdNode for $($ident:ident),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
};
}
@@ -426,9 +427,9 @@
}
macro_rules! register_has_name_ast_id {
- (impl AstIdNode for $($ident:ident = $name_method:ident),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident = $name_method:ident),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
fn has_name_ast_id(node: &SyntaxNode, index_map: &mut ErasedAstIdNextIndexMap) -> Option<ErasedFileAstId> {
@@ -472,9 +473,9 @@
}
macro_rules! register_assoc_item_ast_id {
- (impl AstIdNode for $($ident:ident = $name_callback:expr),+ ) => {
+ (impl $AstIdNode:ident for $($ident:ident = $name_callback:expr),+ ) => {
$(
- impl AstIdNode for ast::$ident {}
+ impl $AstIdNode for ast::$ident {}
)+
fn assoc_item_ast_id(
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 7bb88ac..aef3fbf 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -97,6 +97,7 @@
const LOCATION: salsa::plumbing::Location =
salsa::plumbing::Location { file: file!(), line: line!() };
const DEBUG_NAME: &'static str = "SyntaxContextData";
+ const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
type Fields<'a> = SyntaxContextData;
type Struct<'a> = SyntaxContext;
}
@@ -108,7 +109,9 @@
static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<SyntaxContext>> =
zalsa_::IngredientCache::new();
CACHE.get_or_create(db.zalsa(), || {
- db.zalsa().add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ db.zalsa()
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
})
}
}
@@ -130,9 +133,12 @@
type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
fn lookup_or_create_ingredient_index(
- aux: &salsa::plumbing::Zalsa,
+ zalsa: &salsa::plumbing::Zalsa,
) -> salsa::plumbing::IngredientIndices {
- aux.add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>().into()
+ zalsa
+ .lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+ .get_or_create()
+ .into()
}
#[inline]
@@ -326,14 +332,14 @@
None
} else {
// SAFETY: By our invariant, this is either a root (which we verified it's not) or a valid `salsa::Id`.
- unsafe { Some(salsa::Id::from_u32(self.0)) }
+ unsafe { Some(salsa::Id::from_index(self.0)) }
}
}
#[inline]
fn from_salsa_id(id: salsa::Id) -> Self {
// SAFETY: This comes from a Salsa ID.
- unsafe { Self::from_u32(id.as_u32()) }
+ unsafe { Self::from_u32(id.index()) }
}
#[inline]
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index 1d821e9..e830c6a 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -435,14 +435,16 @@
continue;
}
- let mut active_line_region = false;
- let mut inactive_line_region = false;
+ let mut active_line_region = 0;
+ let mut inactive_line_region = 0;
if let Some(idx) = trimmed.find("// :!") {
- inactive_line_region = true;
- inactive_regions.push(&trimmed[idx + "// :!".len()..]);
+ let regions = trimmed[idx + "// :!".len()..].split(", ");
+ inactive_line_region += regions.clone().count();
+ inactive_regions.extend(regions);
} else if let Some(idx) = trimmed.find("// :") {
- active_line_region = true;
- active_regions.push(&trimmed[idx + "// :".len()..]);
+ let regions = trimmed[idx + "// :".len()..].split(", ");
+ active_line_region += regions.clone().count();
+ active_regions.extend(regions);
}
let mut keep = true;
@@ -462,11 +464,11 @@
if keep {
buf.push_str(line);
}
- if active_line_region {
- active_regions.pop().unwrap();
+ if active_line_region > 0 {
+ active_regions.drain(active_regions.len() - active_line_region..);
}
- if inactive_line_region {
- inactive_regions.pop().unwrap();
+ if inactive_line_region > 0 {
+ inactive_regions.drain(inactive_regions.len() - active_line_region..);
}
}
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index d48063f..c79aaba 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -228,8 +228,11 @@
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Hash($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Hash($item:item) {}
+ }
+ pub use derive::Hash;
// endregion:derive
}
// endregion:hash
@@ -1264,8 +1267,11 @@
}
// region:derive
- #[rustc_builtin_macro]
- pub macro Debug($item:item) {}
+ pub(crate) mod derive {
+ #[rustc_builtin_macro]
+ pub macro Debug($item:item) {}
+ }
+ pub use derive::Debug;
// endregion:derive
// region:builtin_impls
@@ -1931,6 +1937,8 @@
panic, // :panic
result::Result::{self, Err, Ok}, // :result
str::FromStr, // :str
+ fmt::derive::Debug, // :fmt, derive
+ hash::derive::Hash, // :hash, derive
};
}
diff --git a/crates/tt/src/iter.rs b/crates/tt/src/iter.rs
index 0418c00..3246156 100644
--- a/crates/tt/src/iter.rs
+++ b/crates/tt/src/iter.rs
@@ -211,6 +211,7 @@
}
}
+#[derive(Clone)]
pub enum TtElement<'a, S> {
Leaf(&'a Leaf<S>),
Subtree(&'a Subtree<S>, TtIter<'a, S>),
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs
index 1228e2e..0c41ede 100644
--- a/crates/vfs/src/file_set.rs
+++ b/crates/vfs/src/file_set.rs
@@ -5,8 +5,8 @@
use std::fmt;
use fst::{IntoStreamer, Streamer};
-use nohash_hasher::IntMap;
-use rustc_hash::FxHashMap;
+use indexmap::IndexMap;
+use rustc_hash::{FxBuildHasher, FxHashMap};
use crate::{AnchoredPath, FileId, Vfs, VfsPath};
@@ -14,7 +14,7 @@
#[derive(Default, Clone, Eq, PartialEq)]
pub struct FileSet {
files: FxHashMap<VfsPath, FileId>,
- paths: IntMap<FileId, VfsPath>,
+ paths: IndexMap<FileId, VfsPath, FxBuildHasher>,
}
impl FileSet {