Merge ref '32e7a4b92b10' from rust-lang/rust
Pull recent changes from https://github.com/rust-lang/rust via Josh.
Upstream ref: 32e7a4b92b109c24e9822c862a7c74436b50e564
Filtered ref: d39f3479bfafb04026ed3afec68aa671d13e9c3c
This merge was created using https://github.com/rust-lang/josh-sync.
diff --git a/.github/workflows/rustc-pull.yml b/.github/workflows/rustc-pull.yml
new file mode 100644
index 0000000..2a842f3
--- /dev/null
+++ b/.github/workflows/rustc-pull.yml
@@ -0,0 +1,20 @@
+name: rustc-pull
+
+on:
+ workflow_dispatch:
+ schedule:
+ # Run at 04:00 UTC every Monday and Thursday
+ - cron: '0 4 * * 1,4'
+
+jobs:
+ pull:
+ if: github.repository == 'rust-lang/rust-analyzer'
+ uses: rust-lang/josh-sync/.github/workflows/rustc-pull.yml@main
+ with:
+ zulip-stream-id: 185405
+ zulip-bot-email: "rust-analyzer-ci-bot@rust-lang.zulipchat.com"
+ pr-base-branch: master
+ branch-name: rustc-pull
+ secrets:
+ zulip-api-token: ${{ secrets.ZULIP_API_TOKEN }}
+ token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/Cargo.lock b/Cargo.lock
index c471234..7d03300 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -396,15 +396,6 @@
]
[[package]]
-name = "directories"
-version = "6.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d"
-dependencies = [
- "dirs-sys",
-]
-
-[[package]]
name = "dirs"
version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1268,7 +1259,7 @@
"expect-test",
"intern",
"parser",
- "ra-ap-rustc_lexer 0.122.0",
+ "ra-ap-rustc_lexer 0.123.0",
"rustc-hash 2.1.1",
"smallvec",
"span",
@@ -1504,7 +1495,7 @@
"drop_bomb",
"edition",
"expect-test",
- "ra-ap-rustc_lexer 0.122.0",
+ "ra-ap-rustc_lexer 0.123.0",
"rustc-literal-escaper",
"stdx",
"tracing",
@@ -1614,7 +1605,7 @@
"object",
"paths",
"proc-macro-test",
- "ra-ap-rustc_lexer 0.122.0",
+ "ra-ap-rustc_lexer 0.123.0",
"span",
"syntax-bridge",
"tt",
@@ -1688,6 +1679,7 @@
"serde_json",
"span",
"stdx",
+ "temp-dir",
"toolchain",
"tracing",
"triomphe",
@@ -1756,9 +1748,9 @@
[[package]]
name = "ra-ap-rustc_abi"
-version = "0.122.0"
+version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb01e1fec578003c85481c1cad4ff8cd8195b07c2dc85ae3f716108507ae15d5"
+checksum = "f18c877575c259d127072e9bfc41d985202262fb4d6bfdae3d1252147c2562c2"
dependencies = [
"bitflags 2.9.1",
"ra-ap-rustc_hashes",
@@ -1768,18 +1760,18 @@
[[package]]
name = "ra-ap-rustc_hashes"
-version = "0.122.0"
+version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e0ec056e72a472ffef8761ce96ece6c626eb07368c09d0105b6df30d27d07673"
+checksum = "2439ed1df3472443133b66949f81080dff88089b42f825761455463709ee1cad"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
-version = "0.122.0"
+version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fcdd1001db0295e59052e9f53aeda588bbe81e362534f4687d41bd44777b5a7"
+checksum = "57a24fe0be21be1f8ebc21dcb40129214fb4cefb0f2753f3d46b6dbe656a1a45"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@@ -1787,9 +1779,9 @@
[[package]]
name = "ra-ap-rustc_index_macros"
-version = "0.122.0"
+version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728d64dd98e25530b32e3f7c7c1e844e52722b269360daa1cdeba9dff9727a26"
+checksum = "844a27ddcad0116facae2df8e741fd788662cf93dc13029cd864f2b8013b81f9"
dependencies = [
"proc-macro2",
"quote",
@@ -1809,9 +1801,9 @@
[[package]]
name = "ra-ap-rustc_lexer"
-version = "0.122.0"
+version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "415f0821f512608d825b3215489a6a6a2c18ed9f0045953d514e7ec23d4b90ab"
+checksum = "2b734cfcb577d09877799a22742f1bd398be6c00bc428d9de56d48d11ece5771"
dependencies = [
"memchr",
"unicode-properties",
@@ -1830,9 +1822,9 @@
[[package]]
name = "ra-ap-rustc_pattern_analysis"
-version = "0.122.0"
+version = "0.123.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4657fcfdfe06e2a02ec8180d4e7c95aecf4811ba50367e363d1a2300b7623284"
+checksum = "75b0ee1f059b9dea0818c6c7267478926eee95ba4c7dcf89c8db32fa165d3904"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.1.1",
@@ -2294,6 +2286,12 @@
]
[[package]]
+name = "temp-dir"
+version = "0.1.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83176759e9416cf81ee66cb6508dbfe9c96f20b8b56265a39917551c23c70964"
+
+[[package]]
name = "tenthash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2592,7 +2590,7 @@
dependencies = [
"arrayvec",
"intern",
- "ra-ap-rustc_lexer 0.122.0",
+ "ra-ap-rustc_lexer 0.123.0",
"stdx",
"text-size",
]
@@ -3105,7 +3103,6 @@
version = "0.1.0"
dependencies = [
"anyhow",
- "directories",
"edition",
"either",
"flate2",
diff --git a/Cargo.toml b/Cargo.toml
index 700c116..e7cf021 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -89,11 +89,11 @@
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
-ra-ap-rustc_lexer = { version = "0.122", default-features = false }
+ra-ap-rustc_lexer = { version = "0.123", default-features = false }
ra-ap-rustc_parse_format = { version = "0.121", default-features = false }
-ra-ap-rustc_index = { version = "0.122", default-features = false }
-ra-ap-rustc_abi = { version = "0.122", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.122", default-features = false }
+ra-ap-rustc_index = { version = "0.123", default-features = false }
+ra-ap-rustc_abi = { version = "0.123", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.123", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@@ -156,6 +156,7 @@
"const_generics",
] }
smol_str = "0.3.2"
+temp-dir = "0.1.16"
text-size = "1.1.1"
tracing = "0.1.41"
tracing-tree = "0.4.0"
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 8c9393b..0bf4fbd 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -30,6 +30,7 @@
pub enum ProcMacroLoadingError {
Disabled,
FailedToBuild,
+ ExpectedProcMacroArtifact,
MissingDylibPath,
NotYetBuilt,
NoProcMacros,
@@ -39,7 +40,8 @@
pub fn is_hard_error(&self) -> bool {
match self {
ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false,
- ProcMacroLoadingError::FailedToBuild
+ ProcMacroLoadingError::ExpectedProcMacroArtifact
+ | ProcMacroLoadingError::FailedToBuild
| ProcMacroLoadingError::MissingDylibPath
| ProcMacroLoadingError::NoProcMacros
| ProcMacroLoadingError::ProcMacroSrvError(_) => true,
@@ -51,10 +53,16 @@
impl fmt::Display for ProcMacroLoadingError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
+ ProcMacroLoadingError::ExpectedProcMacroArtifact => {
+ write!(f, "proc-macro crate did not build proc-macro artifact")
+ }
ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"),
ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"),
ProcMacroLoadingError::MissingDylibPath => {
- write!(f, "proc-macro crate build data is missing a dylib path")
+ write!(
+ f,
+ "proc-macro crate built but the dylib path is missing, this indicates a problem with your build system."
+ )
}
ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"),
ProcMacroLoadingError::NoProcMacros => {
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index d3dfc05..5695ab7 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -16,7 +16,7 @@
use cfg::{CfgExpr, CfgOptions};
use either::Either;
-use hir_expand::{ExpandError, InFile, MacroCallId, mod_path::ModPath, name::Name};
+use hir_expand::{InFile, MacroCallId, mod_path::ModPath, name::Name};
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
@@ -281,7 +281,6 @@
#[derive(Debug, Eq, PartialEq)]
pub enum ExpressionStoreDiagnostics {
InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions },
- MacroError { node: InFile<MacroCallPtr>, err: ExpandError },
UnresolvedMacroCall { node: InFile<MacroCallPtr>, path: ModPath },
UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name },
AwaitOutsideOfAsync { node: InFile<AstPtr<ast::AwaitExpr>>, location: String },
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 4e87774..abd1382 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -960,37 +960,28 @@
impl_trait_lower_fn: ImplTraitLowerFn<'_>,
) -> TypeBound {
match node.kind() {
- ast::TypeBoundKind::PathType(path_type) => {
- let m = match node.question_mark_token() {
- Some(_) => TraitBoundModifier::Maybe,
- None => TraitBoundModifier::None,
- };
- self.lower_path_type(&path_type, impl_trait_lower_fn)
- .map(|p| {
- TypeBound::Path(self.alloc_path(p, AstPtr::new(&path_type).upcast()), m)
- })
- .unwrap_or(TypeBound::Error)
- }
- ast::TypeBoundKind::ForType(for_type) => {
- let lt_refs = match for_type.generic_param_list() {
+ ast::TypeBoundKind::PathType(binder, path_type) => {
+ let binder = match binder.and_then(|it| it.generic_param_list()) {
Some(gpl) => gpl
.lifetime_params()
.flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(<.text())))
.collect(),
None => ThinVec::default(),
};
- let path = for_type.ty().and_then(|ty| match &ty {
- ast::Type::PathType(path_type) => {
- self.lower_path_type(path_type, impl_trait_lower_fn).map(|p| (p, ty))
- }
- _ => None,
- });
- match path {
- Some((p, ty)) => {
- TypeBound::ForLifetime(lt_refs, self.alloc_path(p, AstPtr::new(&ty)))
- }
- None => TypeBound::Error,
- }
+ let m = match node.question_mark_token() {
+ Some(_) => TraitBoundModifier::Maybe,
+ None => TraitBoundModifier::None,
+ };
+ self.lower_path_type(&path_type, impl_trait_lower_fn)
+ .map(|p| {
+ let path = self.alloc_path(p, AstPtr::new(&path_type).upcast());
+ if binder.is_empty() {
+ TypeBound::Path(path, m)
+ } else {
+ TypeBound::ForLifetime(binder, path)
+ }
+ })
+ .unwrap_or(TypeBound::Error)
}
ast::TypeBoundKind::Use(gal) => TypeBound::Use(
gal.use_bound_generic_args()
@@ -1981,13 +1972,7 @@
return collector(self, None);
}
};
- if record_diagnostics {
- if let Some(err) = res.err {
- self.store
- .diagnostics
- .push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err });
- }
- }
+ // No need to push macro and parsing errors as they'll be recreated from `macro_calls()`.
match res.value {
Some((mark, expansion)) => {
@@ -1997,10 +1982,6 @@
self.store.expansions.insert(macro_call_ptr, macro_file);
}
- if record_diagnostics {
- // FIXME: Report parse errors here
- }
-
let id = collector(self, expansion.map(|it| it.tree()));
self.expander.exit(mark);
id
diff --git a/crates/hir-def/src/expr_store/lower/generics.rs b/crates/hir-def/src/expr_store/lower/generics.rs
index 02a1d27..c570df4 100644
--- a/crates/hir-def/src/expr_store/lower/generics.rs
+++ b/crates/hir-def/src/expr_store/lower/generics.rs
@@ -180,17 +180,18 @@
continue;
};
- let lifetimes: Option<Box<_>> = pred.generic_param_list().map(|param_list| {
- // Higher-Ranked Trait Bounds
- param_list
- .lifetime_params()
- .map(|lifetime_param| {
- lifetime_param
- .lifetime()
- .map_or_else(Name::missing, |lt| Name::new_lifetime(<.text()))
- })
- .collect()
- });
+ let lifetimes: Option<Box<_>> =
+ pred.for_binder().and_then(|it| it.generic_param_list()).map(|param_list| {
+ // Higher-Ranked Trait Bounds
+ param_list
+ .lifetime_params()
+ .map(|lifetime_param| {
+ lifetime_param
+ .lifetime()
+ .map_or_else(Name::missing, |lt| Name::new_lifetime(<.text()))
+ })
+ .collect()
+ });
for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
self.lower_type_bound_as_predicate(ec, bound, lifetimes.as_deref(), target);
}
diff --git a/crates/hir-def/src/expr_store/path.rs b/crates/hir-def/src/expr_store/path.rs
index 19c7ce0..55e738b 100644
--- a/crates/hir-def/src/expr_store/path.rs
+++ b/crates/hir-def/src/expr_store/path.rs
@@ -27,7 +27,7 @@
}
// This type is being used a lot, make sure it doesn't grow unintentionally.
-#[cfg(target_arch = "x86_64")]
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
const _: () = {
assert!(size_of::<Path>() == 24);
assert!(size_of::<Option<Path>>() == 24);
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index eacc3f3..da0f058 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -148,7 +148,7 @@
Error,
}
-#[cfg(target_arch = "x86_64")]
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
const _: () = assert!(size_of::<TypeRef>() == 24);
pub type TypeRefId = Idx<TypeRef>;
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index 5ae6bf6..cc531f0 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -175,8 +175,9 @@
});
}
- let receiver_ty = self.infer[*receiver].clone();
- checker.prev_receiver_ty = Some(receiver_ty);
+ if let Some(receiver_ty) = self.infer.type_of_expr_with_adjust(*receiver) {
+ checker.prev_receiver_ty = Some(receiver_ty.clone());
+ }
}
}
@@ -187,7 +188,9 @@
arms: &[MatchArm],
db: &dyn HirDatabase,
) {
- let scrut_ty = &self.infer[scrutinee_expr];
+ let Some(scrut_ty) = self.infer.type_of_expr_with_adjust(scrutinee_expr) else {
+ return;
+ };
if scrut_ty.contains_unknown() {
return;
}
@@ -200,7 +203,7 @@
// Note: Skipping the entire diagnostic rather than just not including a faulty match arm is
// preferred to avoid the chance of false positives.
for arm in arms {
- let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else {
+ let Some(pat_ty) = self.infer.type_of_pat_with_adjust(arm.pat) else {
return;
};
if pat_ty.contains_unknown() {
@@ -328,7 +331,7 @@
continue;
}
let Some(initializer) = initializer else { continue };
- let ty = &self.infer[initializer];
+ let Some(ty) = self.infer.type_of_expr_with_adjust(initializer) else { continue };
if ty.contains_unknown() {
continue;
}
@@ -433,44 +436,44 @@
Statement::Expr { expr, .. } => Some(*expr),
_ => None,
});
- if let Some(last_then_expr) = last_then_expr {
- let last_then_expr_ty = &self.infer[last_then_expr];
- if last_then_expr_ty.is_never() {
- // Only look at sources if the then branch diverges and we have an else branch.
- let source_map = db.body_with_source_map(self.owner).1;
- let Ok(source_ptr) = source_map.expr_syntax(id) else {
- return;
- };
- let root = source_ptr.file_syntax(db);
- let either::Left(ast::Expr::IfExpr(if_expr)) =
- source_ptr.value.to_node(&root)
- else {
- return;
- };
- let mut top_if_expr = if_expr;
- loop {
- let parent = top_if_expr.syntax().parent();
- let has_parent_expr_stmt_or_stmt_list =
- parent.as_ref().is_some_and(|node| {
- ast::ExprStmt::can_cast(node.kind())
- | ast::StmtList::can_cast(node.kind())
- });
- if has_parent_expr_stmt_or_stmt_list {
- // Only emit diagnostic if parent or direct ancestor is either
- // an expr stmt or a stmt list.
- break;
- }
- let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else {
- // Bail if parent is neither an if expr, an expr stmt nor a stmt list.
- return;
- };
- // Check parent if expr.
- top_if_expr = parent_if_expr;
+ if let Some(last_then_expr) = last_then_expr
+ && let Some(last_then_expr_ty) =
+ self.infer.type_of_expr_with_adjust(last_then_expr)
+ && last_then_expr_ty.is_never()
+ {
+ // Only look at sources if the then branch diverges and we have an else branch.
+ let source_map = db.body_with_source_map(self.owner).1;
+ let Ok(source_ptr) = source_map.expr_syntax(id) else {
+ return;
+ };
+ let root = source_ptr.file_syntax(db);
+ let either::Left(ast::Expr::IfExpr(if_expr)) = source_ptr.value.to_node(&root)
+ else {
+ return;
+ };
+ let mut top_if_expr = if_expr;
+ loop {
+ let parent = top_if_expr.syntax().parent();
+ let has_parent_expr_stmt_or_stmt_list =
+ parent.as_ref().is_some_and(|node| {
+ ast::ExprStmt::can_cast(node.kind())
+ | ast::StmtList::can_cast(node.kind())
+ });
+ if has_parent_expr_stmt_or_stmt_list {
+ // Only emit diagnostic if parent or direct ancestor is either
+ // an expr stmt or a stmt list.
+ break;
}
-
- self.diagnostics
- .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id })
+ let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else {
+ // Bail if parent is neither an if expr, an expr stmt nor a stmt list.
+ return;
+ };
+ // Check parent if expr.
+ top_if_expr = parent_if_expr;
}
+
+ self.diagnostics
+ .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id })
}
}
}
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index e880438..7c39afa 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -561,6 +561,32 @@
ExprOrPatId::PatId(id) => self.type_of_pat.get(id),
}
}
+ pub fn type_of_expr_with_adjust(&self, id: ExprId) -> Option<&Ty> {
+ match self.expr_adjustments.get(&id).and_then(|adjustments| {
+ adjustments
+ .iter()
+ .filter(|adj| {
+ // https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140
+ !matches!(
+ adj,
+ Adjustment {
+ kind: Adjust::NeverToAny,
+ target,
+ } if target.is_never()
+ )
+ })
+ .next_back()
+ }) {
+ Some(adjustment) => Some(&adjustment.target),
+ None => self.type_of_expr.get(id),
+ }
+ }
+ pub fn type_of_pat_with_adjust(&self, id: PatId) -> Option<&Ty> {
+ match self.pat_adjustments.get(&id).and_then(|adjustments| adjustments.last()) {
+ adjusted @ Some(_) => adjusted,
+ None => self.type_of_pat.get(id),
+ }
+ }
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}
diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs
index 372a9df..3f310c2 100644
--- a/crates/hir-ty/src/layout/adt.rs
+++ b/crates/hir-ty/src/layout/adt.rs
@@ -3,9 +3,9 @@
use std::{cmp, ops::Bound};
use hir_def::{
+ AdtId, VariantId,
layout::{Integer, ReprOptions, TargetDataLayout},
signatures::{StructFlags, VariantFields},
- AdtId, VariantId,
};
use intern::sym;
use rustc_index::IndexVec;
@@ -13,9 +13,9 @@
use triomphe::Arc;
use crate::{
- db::HirDatabase,
- layout::{field_ty, Layout, LayoutError},
Substitution, TraitEnvironment,
+ db::HirDatabase,
+ layout::{Layout, LayoutError, field_ty},
};
use super::LayoutCx;
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index f32b6af..d61e7de 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -590,9 +590,14 @@
.resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
let pointee_sized = LangItem::PointeeSized
.resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
- if meta_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) {
+ let destruct = LangItem::Destruct
+ .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate());
+ let hir_trait_id = trait_ref.hir_trait_id();
+ if meta_sized.is_some_and(|it| it == hir_trait_id)
+ || destruct.is_some_and(|it| it == hir_trait_id)
+ {
// Ignore this bound
- } else if pointee_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) {
+ } else if pointee_sized.is_some_and(|it| it == hir_trait_id) {
// Regard this as `?Sized` bound
ctx.ty_ctx().unsized_types.insert(self_ty);
} else {
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index 238753e..c4c17a9 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -2349,3 +2349,37 @@
"#]],
);
}
+
+#[test]
+fn rust_destruct_option_clone() {
+ check_types(
+ r#"
+//- minicore: option, drop
+fn test(o: &Option<i32>) {
+ o.my_clone();
+ //^^^^^^^^^^^^ Option<i32>
+}
+pub trait MyClone: Sized {
+ fn my_clone(&self) -> Self;
+}
+impl<T> const MyClone for Option<T>
+where
+ T: ~const MyClone + ~const Destruct,
+{
+ fn my_clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.my_clone()),
+ None => None,
+ }
+ }
+}
+impl const MyClone for i32 {
+ fn my_clone(&self) -> Self {
+ *self
+ }
+}
+#[lang = "destruct"]
+pub trait Destruct {}
+"#,
+ );
+}
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 1b2b769..4ddb04b 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -1922,10 +1922,6 @@
Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints);
}
- source_map
- .macro_calls()
- .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
-
expr_store_diagnostics(db, acc, &source_map);
let infer = db.infer(self.into());
@@ -2130,9 +2126,9 @@
}
}
-fn expr_store_diagnostics(
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic<'_>>,
+fn expr_store_diagnostics<'db>(
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
source_map: &ExpressionStoreSourceMap,
) {
for diag in source_map.diagnostics() {
@@ -2140,30 +2136,6 @@
ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => {
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
}
- ExpressionStoreDiagnostics::MacroError { node, err } => {
- let RenderedExpandError { message, error, kind } = err.render_to_string(db);
-
- let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
- let precise_location = if editioned_file_id == node.file_id {
- Some(
- err.span().range
- + db.ast_id_map(editioned_file_id.into())
- .get_erased(err.span().anchor.ast_id)
- .text_range()
- .start(),
- )
- } else {
- None
- };
- MacroError {
- node: (node).map(|it| it.into()),
- precise_location,
- message,
- error,
- kind,
- }
- .into()
- }
ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
precise_location: None,
@@ -2182,6 +2154,10 @@
}
});
}
+
+ source_map
+ .macro_calls()
+ .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc));
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Function {
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index ecc6e5f..0b554a9 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -441,7 +441,7 @@
) -> Option<GenericSubstitution<'db>> {
let body = self.store()?;
if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] {
- let (adt, subst) = type_of_expr_including_adjust(infer, object_expr)?.as_adt()?;
+ let (adt, subst) = infer.type_of_expr_with_adjust(object_expr)?.as_adt()?;
return Some(GenericSubstitution::new(
adt.into(),
subst.clone(),
@@ -1780,10 +1780,3 @@
let ctx = span_map.span_at(name.value.text_range().start()).ctx;
HygieneId::new(ctx.opaque_and_semitransparent(db))
}
-
-fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {
- match infer.expr_adjustment(id).and_then(|adjustments| adjustments.last()) {
- Some(adjustment) => Some(&adjustment.target),
- None => Some(&infer[id]),
- }
-}
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index 9f9d219..ab183ac 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -2,6 +2,7 @@
use syntax::{
Edition,
ast::{self, AstNode, make},
+ syntax_editor::{Position, SyntaxEditor},
};
use crate::{
@@ -147,45 +148,78 @@
let target = impl_def.syntax().text_range();
acc.add(AssistId::quick_fix(assist_id), label, target, |edit| {
- let new_impl_def = edit.make_mut(impl_def.clone());
- let first_new_item = add_trait_assoc_items_to_impl(
+ let new_item = add_trait_assoc_items_to_impl(
&ctx.sema,
ctx.config,
&missing_items,
trait_,
- &new_impl_def,
+ &impl_def,
&target_scope,
);
+ let Some((first_new_item, other_items)) = new_item.split_first() else {
+ return;
+ };
+
+ let mut first_new_item = if let DefaultMethods::No = mode
+ && let ast::AssocItem::Fn(func) = &first_new_item
+ && let Some(body) = try_gen_trait_body(
+ ctx,
+ func,
+ trait_ref,
+ &impl_def,
+ target_scope.krate().edition(ctx.sema.db),
+ )
+ && let Some(func_body) = func.body()
+ {
+ let mut func_editor = SyntaxEditor::new(first_new_item.syntax().clone_subtree());
+ func_editor.replace(func_body.syntax(), body.syntax());
+ ast::AssocItem::cast(func_editor.finish().new_root().clone())
+ } else {
+ Some(first_new_item.clone())
+ };
+
+ let new_assoc_items = first_new_item
+ .clone()
+ .into_iter()
+ .chain(other_items.iter().cloned())
+ .map(either::Either::Right)
+ .collect::<Vec<_>>();
+
+ let mut editor = edit.make_editor(impl_def.syntax());
+ if let Some(assoc_item_list) = impl_def.assoc_item_list() {
+ let items = new_assoc_items.into_iter().filter_map(either::Either::right).collect();
+ assoc_item_list.add_items(&mut editor, items);
+ } else {
+ let assoc_item_list = make::assoc_item_list(Some(new_assoc_items)).clone_for_update();
+ editor.insert_all(
+ Position::after(impl_def.syntax()),
+ vec![make::tokens::whitespace(" ").into(), assoc_item_list.syntax().clone().into()],
+ );
+ first_new_item = assoc_item_list.assoc_items().next();
+ }
+
if let Some(cap) = ctx.config.snippet_cap {
let mut placeholder = None;
if let DefaultMethods::No = mode {
- if let ast::AssocItem::Fn(func) = &first_new_item {
- if try_gen_trait_body(
- ctx,
- func,
- trait_ref,
- &impl_def,
- target_scope.krate().edition(ctx.sema.db),
- )
- .is_none()
+ if let Some(ast::AssocItem::Fn(func)) = &first_new_item {
+ if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
+ && m.syntax().text() == "todo!()"
{
- if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
- {
- if m.syntax().text() == "todo!()" {
- placeholder = Some(m);
- }
- }
+ placeholder = Some(m);
}
}
}
if let Some(macro_call) = placeholder {
- edit.add_placeholder_snippet(cap, macro_call);
- } else {
- edit.add_tabstop_before(cap, first_new_item);
+ let placeholder = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(macro_call.syntax(), placeholder);
+ } else if let Some(first_new_item) = first_new_item {
+ let tabstop = edit.make_tabstop_before(cap);
+ editor.add_annotation(first_new_item.syntax(), tabstop);
};
};
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
})
}
@@ -195,7 +229,7 @@
trait_ref: hir::TraitRef<'_>,
impl_def: &ast::Impl,
edition: Edition,
-) -> Option<()> {
+) -> Option<ast::BlockExpr> {
let trait_path = make::ext::ident_path(
&trait_ref.trait_().name(ctx.db()).display(ctx.db(), edition).to_string(),
);
@@ -322,7 +356,7 @@
}
#[test]
- fn test_impl_def_without_braces() {
+ fn test_impl_def_without_braces_macro() {
check_assist(
add_missing_impl_members,
r#"
@@ -341,6 +375,33 @@
}
#[test]
+ fn test_impl_def_without_braces_tabstop_first_item() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ type Output;
+ fn foo(&self);
+}
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo {
+ type Output;
+ fn foo(&self);
+}
+struct S;
+impl Foo for S {
+ $0type Output;
+
+ fn foo(&self) {
+ todo!()
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn fill_in_type_params_1() {
check_assist(
add_missing_impl_members,
diff --git a/crates/ide-assists/src/handlers/convert_bool_then.rs b/crates/ide-assists/src/handlers/convert_bool_then.rs
index bcd06c1e..d7b7e8d 100644
--- a/crates/ide-assists/src/handlers/convert_bool_then.rs
+++ b/crates/ide-assists/src/handlers/convert_bool_then.rs
@@ -228,8 +228,7 @@
closure_body,
Some(ast::ElseBranch::Block(make.block_expr(None, Some(none_path)))),
)
- .indent(mcall.indent_level())
- .clone_for_update();
+ .indent(mcall.indent_level());
editor.replace(mcall.syntax().clone(), if_expr.syntax().clone());
editor.add_mappings(make.finish_with_mappings());
diff --git a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
index 71a61f2..2ea032f 100644
--- a/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
+++ b/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -13,7 +13,6 @@
edit::{AstNodeEdit, IndentLevel},
make,
},
- ted,
};
use crate::{
@@ -117,7 +116,7 @@
then_block.syntax().last_child_or_token().filter(|t| t.kind() == T!['}'])?;
- let then_block_items = then_block.dedent(IndentLevel(1)).clone_for_update();
+ let then_block_items = then_block.dedent(IndentLevel(1));
let end_of_then = then_block_items.syntax().last_child_or_token()?;
let end_of_then = if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) {
@@ -132,7 +131,6 @@
"Convert to guarded return",
target,
|edit| {
- let if_expr = edit.make_mut(if_expr);
let if_indent_level = IndentLevel::from_node(if_expr.syntax());
let replacement = match if_let_pat {
None => {
@@ -143,7 +141,7 @@
let cond = invert_boolean_expression_legacy(cond_expr);
make::expr_if(cond, then_branch, None).indent(if_indent_level)
};
- new_expr.syntax().clone_for_update()
+ new_expr.syntax().clone()
}
Some(pat) => {
// If-let.
@@ -154,7 +152,7 @@
ast::make::tail_only_block_expr(early_expression),
);
let let_else_stmt = let_else_stmt.indent(if_indent_level);
- let_else_stmt.syntax().clone_for_update()
+ let_else_stmt.syntax().clone()
}
};
@@ -168,8 +166,9 @@
.take_while(|i| *i != end_of_then),
)
.collect();
-
- ted::replace_with_many(if_expr.syntax(), then_statements)
+ let mut editor = edit.make_editor(if_expr.syntax());
+ editor.replace_with_many(if_expr.syntax(), then_statements);
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@@ -214,7 +213,6 @@
"Convert to guarded return",
target,
|edit| {
- let let_stmt = edit.make_mut(let_stmt);
let let_indent_level = IndentLevel::from_node(let_stmt.syntax());
let replacement = {
@@ -225,10 +223,11 @@
ast::make::tail_only_block_expr(early_expression),
);
let let_else_stmt = let_else_stmt.indent(let_indent_level);
- let_else_stmt.syntax().clone_for_update()
+ let_else_stmt.syntax().clone()
};
-
- ted::replace(let_stmt.syntax(), replacement)
+ let mut editor = edit.make_editor(let_stmt.syntax());
+ editor.replace(let_stmt.syntax(), replacement);
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
index 54699a9..cdc0e96 100644
--- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
+++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
@@ -8,8 +8,7 @@
AstNode, AstToken, NodeOrToken,
SyntaxKind::WHITESPACE,
T,
- ast::{self, make},
- ted,
+ ast::{self, make, syntax_factory::SyntaxFactory},
};
// Assist: extract_expressions_from_format_string
@@ -58,8 +57,6 @@
"Extract format expressions",
tt.syntax().text_range(),
|edit| {
- let tt = edit.make_mut(tt);
-
// Extract existing arguments in macro
let tokens = tt.token_trees_and_tokens().collect_vec();
@@ -131,8 +128,10 @@
}
// Insert new args
- let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update();
- ted::replace(tt.syntax(), new_tt.syntax());
+ let make = SyntaxFactory::with_mappings();
+ let new_tt = make.token_tree(tt_delimiter, new_tt_bits);
+ let mut editor = edit.make_editor(tt.syntax());
+ editor.replace(tt.syntax(), new_tt.syntax());
if let Some(cap) = ctx.config.snippet_cap {
// Add placeholder snippets over placeholder args
@@ -145,15 +144,19 @@
};
if stdx::always!(placeholder.kind() == T![_]) {
- edit.add_placeholder_snippet_token(cap, placeholder);
+ let annotation = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(placeholder, annotation);
}
}
// Add the final tabstop after the format literal
if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) {
- edit.add_tabstop_after_token(cap, literal);
+ let annotation = edit.make_tabstop_after(cap);
+ editor.add_annotation(literal, annotation);
}
}
+ editor.add_mappings(make.finish_with_mappings());
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
},
);
diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
index b9c4228..9095b18 100644
--- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
+++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -16,8 +16,9 @@
SyntaxKind::*,
SyntaxNode, T,
ast::{
- self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, edit::IndentLevel,
- edit_in_place::Indent, make,
+ self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility,
+ edit::{AstNodeEdit, IndentLevel},
+ make,
},
match_ast, ted,
};
@@ -110,20 +111,30 @@
let generics = generic_params.as_ref().map(|generics| generics.clone_for_update());
// resolve GenericArg in field_list to actual type
- let field_list = field_list.clone_for_update();
- if let Some((target_scope, source_scope)) =
+ let field_list = if let Some((target_scope, source_scope)) =
ctx.sema.scope(enum_ast.syntax()).zip(ctx.sema.scope(field_list.syntax()))
{
- PathTransform::generic_transformation(&target_scope, &source_scope)
- .apply(field_list.syntax());
- }
+ let field_list = field_list.reset_indent();
+ let field_list =
+ PathTransform::generic_transformation(&target_scope, &source_scope)
+ .apply(field_list.syntax());
+ match_ast! {
+ match field_list {
+ ast::RecordFieldList(field_list) => Either::Left(field_list),
+ ast::TupleFieldList(field_list) => Either::Right(field_list),
+ _ => unreachable!(),
+ }
+ }
+ } else {
+ field_list.clone_for_update()
+ };
let def =
create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast);
let enum_ast = variant.parent_enum();
let indent = enum_ast.indent_level();
- def.reindent_to(indent);
+ let def = def.indent(indent);
ted::insert_all(
ted::Position::before(enum_ast.syntax()),
@@ -279,7 +290,7 @@
field_list.clone().into()
}
};
- field_list.reindent_to(IndentLevel::single());
+ let field_list = field_list.indent(IndentLevel::single());
let strukt = make::struct_(enum_vis, name, generics, field_list).clone_for_update();
diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs
index 31e84e9..db2d316 100644
--- a/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/crates/ide-assists/src/handlers/extract_variable.rs
@@ -7,7 +7,9 @@
NodeOrToken, SyntaxKind, SyntaxNode, T,
algo::ancestors_at_offset,
ast::{
- self, AstNode, edit::IndentLevel, edit_in_place::Indent, make,
+ self, AstNode,
+ edit::{AstNodeEdit, IndentLevel},
+ make,
syntax_factory::SyntaxFactory,
},
syntax_editor::Position,
@@ -253,12 +255,11 @@
// `expr_replace` is a descendant of `to_wrap`, so we just replace it with `name_expr`.
editor.replace(expr_replace, name_expr.syntax());
make.block_expr([new_stmt], Some(to_wrap.clone()))
- };
+ }
+ // fixup indentation of block
+ .indent_with_mapping(indent_to, &make);
editor.replace(to_wrap.syntax(), block.syntax());
-
- // fixup indentation of block
- block.indent(indent_to);
}
}
diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index ca66cb6..6063898 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -114,9 +114,13 @@
let source_scope = ctx.sema.scope(v.syntax());
let target_scope = ctx.sema.scope(strukt.syntax());
if let (Some(s), Some(t)) = (source_scope, target_scope) {
- PathTransform::generic_transformation(&t, &s).apply(v.syntax());
+ ast::Fn::cast(
+ PathTransform::generic_transformation(&t, &s).apply(v.syntax()),
+ )
+ .unwrap_or(v)
+ } else {
+ v
}
- v
}
None => return,
};
diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
index 848c638..e96250f 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -255,7 +255,6 @@
delegee: &Delegee,
edition: Edition,
) -> Option<ast::Impl> {
- let delegate: ast::Impl;
let db = ctx.db();
let ast_strukt = &strukt.strukt;
let strukt_ty = make::ty_path(make::ext::ident_path(&strukt.name.to_string()));
@@ -266,7 +265,7 @@
let bound_def = ctx.sema.source(delegee.to_owned())?.value;
let bound_params = bound_def.generic_param_list();
- delegate = make::impl_trait(
+ let delegate = make::impl_trait(
delegee.is_unsafe(db),
bound_params.clone(),
bound_params.map(|params| params.to_generic_args()),
@@ -304,7 +303,7 @@
let target_scope = ctx.sema.scope(strukt.strukt.syntax())?;
let source_scope = ctx.sema.scope(bound_def.syntax())?;
let transform = PathTransform::generic_transformation(&target_scope, &source_scope);
- transform.apply(delegate.syntax());
+ ast::Impl::cast(transform.apply(delegate.syntax()))
}
Delegee::Impls(trait_, old_impl) => {
let old_impl = ctx.sema.source(old_impl.to_owned())?.value;
@@ -358,20 +357,28 @@
// 2.3) Instantiate generics with `transform_impl`, this step also
// remove unused params.
- let mut trait_gen_args = old_impl.trait_()?.generic_arg_list();
- if let Some(trait_args) = &mut trait_gen_args {
- *trait_args = trait_args.clone_for_update();
- transform_impl(ctx, ast_strukt, &old_impl, &transform_args, trait_args.syntax())?;
- }
+ let trait_gen_args = old_impl.trait_()?.generic_arg_list().and_then(|trait_args| {
+ let trait_args = &mut trait_args.clone_for_update();
+ if let Some(new_args) = transform_impl(
+ ctx,
+ ast_strukt,
+ &old_impl,
+ &transform_args,
+ trait_args.clone_subtree(),
+ ) {
+ *trait_args = new_args.clone_subtree();
+ Some(new_args)
+ } else {
+ None
+ }
+ });
let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args());
-
let path_type =
make::ty(&trait_.name(db).display_no_db(edition).to_smolstr()).clone_for_update();
- transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?;
-
+ let path_type = transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type)?;
// 3) Generate delegate trait impl
- delegate = make::impl_trait(
+ let delegate = make::impl_trait(
trait_.is_unsafe(db),
trait_gen_params,
trait_gen_args,
@@ -385,7 +392,6 @@
None,
)
.clone_for_update();
-
// Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
let qualified_path_type =
make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?));
@@ -398,7 +404,7 @@
.filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
{
let item = item.clone_for_update();
- transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item.syntax())?;
+ let item = transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item)?;
let assoc = process_assoc_item(item, qualified_path_type.clone(), field_name)?;
delegate_assoc_items.add_item(assoc);
@@ -408,19 +414,18 @@
if let Some(wc) = delegate.where_clause() {
remove_useless_where_clauses(&delegate.trait_()?, &delegate.self_ty()?, wc);
}
+ Some(delegate)
}
}
-
- Some(delegate)
}
-fn transform_impl(
+fn transform_impl<N: ast::AstNode>(
ctx: &AssistContext<'_>,
strukt: &ast::Struct,
old_impl: &ast::Impl,
args: &Option<GenericArgList>,
- syntax: &syntax::SyntaxNode,
-) -> Option<()> {
+ syntax: N,
+) -> Option<N> {
let source_scope = ctx.sema.scope(old_impl.self_ty()?.syntax())?;
let target_scope = ctx.sema.scope(strukt.syntax())?;
let hir_old_impl = ctx.sema.to_impl_def(old_impl)?;
@@ -437,8 +442,7 @@
},
);
- transform.apply(syntax);
- Some(())
+ N::cast(transform.apply(syntax.syntax()))
}
fn remove_instantiated_params(
@@ -570,9 +574,7 @@
let scope = ctx.sema.scope(item.syntax())?;
let transform = PathTransform::adt_transformation(&scope, &scope, hir_adt, args.clone());
- transform.apply(item.syntax());
-
- Some(item)
+ N::cast(transform.apply(item.syntax()))
}
fn has_self_type(trait_: hir::Trait, ctx: &AssistContext<'_>) -> Option<()> {
@@ -767,7 +769,7 @@
)
.clone_for_update();
- Some(AssocItem::Fn(func.indent(edit::IndentLevel(1)).clone_for_update()))
+ Some(AssocItem::Fn(func.indent(edit::IndentLevel(1))))
}
fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<AssocItem> {
diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs
index 78ae815..3290a70 100644
--- a/crates/ide-assists/src/handlers/generate_function.rs
+++ b/crates/ide-assists/src/handlers/generate_function.rs
@@ -743,17 +743,30 @@
let where_preds: Vec<ast::WherePred> =
where_preds.into_iter().map(|it| it.node.clone_for_update()).collect();
- // 4. Rewrite paths
- if let Some(param) = generic_params.first() {
- let source_scope = ctx.sema.scope(param.syntax())?;
- let target_scope = ctx.sema.scope(&target.parent())?;
- if source_scope.module() != target_scope.module() {
+ let (generic_params, where_preds): (Vec<ast::GenericParam>, Vec<ast::WherePred>) =
+ if let Some(param) = generic_params.first()
+ && let source_scope = ctx.sema.scope(param.syntax())?
+ && let target_scope = ctx.sema.scope(&target.parent())?
+ && source_scope.module() != target_scope.module()
+ {
+ // 4. Rewrite paths
let transform = PathTransform::generic_transformation(&target_scope, &source_scope);
let generic_params = generic_params.iter().map(|it| it.syntax());
let where_preds = where_preds.iter().map(|it| it.syntax());
- transform.apply_all(generic_params.chain(where_preds));
- }
- }
+ transform
+ .apply_all(generic_params.chain(where_preds))
+ .into_iter()
+ .filter_map(|it| {
+ if let Some(it) = ast::GenericParam::cast(it.clone()) {
+ Some(either::Either::Left(it))
+ } else {
+ ast::WherePred::cast(it).map(either::Either::Right)
+ }
+ })
+ .partition_map(|it| it)
+ } else {
+ (generic_params, where_preds)
+ };
let generic_param_list = make::generic_param_list(generic_params);
let where_clause =
diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs
index 14601ca..31cadcf 100644
--- a/crates/ide-assists/src/handlers/generate_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_impl.rs
@@ -1,12 +1,17 @@
use syntax::{
- ast::{self, AstNode, HasName, edit_in_place::Indent, make},
+ ast::{self, AstNode, HasGenericParams, HasName, edit_in_place::Indent, make},
syntax_editor::{Position, SyntaxEditor},
};
-use crate::{AssistContext, AssistId, Assists, utils};
+use crate::{
+ AssistContext, AssistId, Assists,
+ utils::{self, DefaultMethods, IgnoreAssocItems},
+};
-fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &ast::Adt) {
+fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &impl Indent) {
let indent = nominal.indent_level();
+
+ impl_.indent(indent);
editor.insert_all(
Position::after(nominal.syntax()),
vec![
@@ -120,6 +125,126 @@
)
}
+// Assist: generate_impl_trait
+//
+// Adds this trait impl for a type.
+//
+// ```
+// trait $0Foo {
+// fn foo(&self) -> i32;
+// }
+// ```
+// ->
+// ```
+// trait Foo {
+// fn foo(&self) -> i32;
+// }
+//
+// impl Foo for ${1:_} {
+// fn foo(&self) -> i32 {
+// $0todo!()
+// }
+// }
+// ```
+pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let name = ctx.find_node_at_offset::<ast::Name>()?;
+ let trait_ = ast::Trait::cast(name.syntax().parent()?)?;
+ let target_scope = ctx.sema.scope(trait_.syntax())?;
+ let hir_trait = ctx.sema.to_def(&trait_)?;
+
+ let target = trait_.syntax().text_range();
+ acc.add(
+ AssistId::generate("generate_impl_trait"),
+ format!("Generate `{name}` impl for type"),
+ target,
+ |edit| {
+ let mut editor = edit.make_editor(trait_.syntax());
+
+ let holder_arg = ast::GenericArg::TypeArg(make::type_arg(make::ty_placeholder()));
+ let missing_items = utils::filter_assoc_items(
+ &ctx.sema,
+ &hir_trait.items(ctx.db()),
+ DefaultMethods::No,
+ IgnoreAssocItems::DocHiddenAttrPresent,
+ );
+
+ let trait_gen_args = trait_.generic_param_list().map(|list| {
+ make::generic_arg_list(list.generic_params().map(|_| holder_arg.clone()))
+ });
+
+ let make_impl_ = |body| {
+ make::impl_trait(
+ trait_.unsafe_token().is_some(),
+ None,
+ trait_gen_args.clone(),
+ None,
+ None,
+ false,
+ make::ty(&name.text()),
+ make::ty_placeholder(),
+ None,
+ None,
+ body,
+ )
+ .clone_for_update()
+ };
+
+ let impl_ = if missing_items.is_empty() {
+ make_impl_(None)
+ } else {
+ let impl_ = make_impl_(None);
+ let assoc_items = utils::add_trait_assoc_items_to_impl(
+ &ctx.sema,
+ ctx.config,
+ &missing_items,
+ hir_trait,
+ &impl_,
+ &target_scope,
+ );
+ let assoc_items = assoc_items.into_iter().map(either::Either::Right).collect();
+ let assoc_item_list = make::assoc_item_list(Some(assoc_items));
+ make_impl_(Some(assoc_item_list))
+ };
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ if let Some(generics) = impl_.trait_().and_then(|it| it.generic_arg_list()) {
+ for generic in generics.generic_args() {
+ let placeholder = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(generic.syntax(), placeholder);
+ }
+ }
+
+ if let Some(ty) = impl_.self_ty() {
+ let placeholder = edit.make_placeholder_snippet(cap);
+ editor.add_annotation(ty.syntax(), placeholder);
+ }
+
+ if let Some(expr) =
+ impl_.assoc_item_list().and_then(|it| it.assoc_items().find_map(extract_expr))
+ {
+ let tabstop = edit.make_tabstop_before(cap);
+ editor.add_annotation(expr.syntax(), tabstop);
+ } else if let Some(l_curly) =
+ impl_.assoc_item_list().and_then(|it| it.l_curly_token())
+ {
+ let tabstop = edit.make_tabstop_after(cap);
+ editor.add_annotation(l_curly, tabstop);
+ }
+ }
+
+ insert_impl(&mut editor, &impl_, &trait_);
+ edit.add_file_edits(ctx.vfs_file_id(), editor);
+ },
+ )
+}
+
+fn extract_expr(item: ast::AssocItem) -> Option<ast::Expr> {
+ let ast::AssocItem::Fn(f) = item else {
+ return None;
+ };
+ f.body()?.tail_expr()
+}
+
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_target};
@@ -492,4 +617,209 @@
"#,
);
}
+
+ #[test]
+ fn test_add_impl_trait() {
+ check_assist(
+ generate_impl_trait,
+ r#"
+ trait $0Foo {
+ fn foo(&self) -> i32;
+
+ fn bar(&self) -> i32 {
+ self.foo()
+ }
+ }
+ "#,
+ r#"
+ trait Foo {
+ fn foo(&self) -> i32;
+
+ fn bar(&self) -> i32 {
+ self.foo()
+ }
+ }
+
+ impl Foo for ${1:_} {
+ fn foo(&self) -> i32 {
+ $0todo!()
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_impl_trait_use_generic() {
+ check_assist(
+ generate_impl_trait,
+ r#"
+ trait $0Foo<T> {
+ fn foo(&self) -> T;
+
+ fn bar(&self) -> T {
+ self.foo()
+ }
+ }
+ "#,
+ r#"
+ trait Foo<T> {
+ fn foo(&self) -> T;
+
+ fn bar(&self) -> T {
+ self.foo()
+ }
+ }
+
+ impl Foo<${1:_}> for ${2:_} {
+ fn foo(&self) -> _ {
+ $0todo!()
+ }
+ }
+ "#,
+ );
+ check_assist(
+ generate_impl_trait,
+ r#"
+ trait $0Foo<T, U> {
+ fn foo(&self) -> T;
+
+ fn bar(&self) -> T {
+ self.foo()
+ }
+ }
+ "#,
+ r#"
+ trait Foo<T, U> {
+ fn foo(&self) -> T;
+
+ fn bar(&self) -> T {
+ self.foo()
+ }
+ }
+
+ impl Foo<${1:_}, ${2:_}> for ${3:_} {
+ fn foo(&self) -> _ {
+ $0todo!()
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_impl_trait_docs() {
+ check_assist(
+ generate_impl_trait,
+ r#"
+ /// foo
+ trait $0Foo {
+ /// foo method
+ fn foo(&self) -> i32;
+
+ fn bar(&self) -> i32 {
+ self.foo()
+ }
+ }
+ "#,
+ r#"
+ /// foo
+ trait Foo {
+ /// foo method
+ fn foo(&self) -> i32;
+
+ fn bar(&self) -> i32 {
+ self.foo()
+ }
+ }
+
+ impl Foo for ${1:_} {
+ fn foo(&self) -> i32 {
+ $0todo!()
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_impl_trait_assoc_types() {
+ check_assist(
+ generate_impl_trait,
+ r#"
+ trait $0Foo {
+ type Output;
+
+ fn foo(&self) -> Self::Output;
+ }
+ "#,
+ r#"
+ trait Foo {
+ type Output;
+
+ fn foo(&self) -> Self::Output;
+ }
+
+ impl Foo for ${1:_} {
+ type Output;
+
+ fn foo(&self) -> Self::Output {
+ $0todo!()
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_impl_trait_indent() {
+ check_assist(
+ generate_impl_trait,
+ r#"
+ mod foo {
+ mod bar {
+ trait $0Foo {
+ type Output;
+
+ fn foo(&self) -> Self::Output;
+ }
+ }
+ }
+ "#,
+ r#"
+ mod foo {
+ mod bar {
+ trait Foo {
+ type Output;
+
+ fn foo(&self) -> Self::Output;
+ }
+
+ impl Foo for ${1:_} {
+ type Output;
+
+ fn foo(&self) -> Self::Output {
+ $0todo!()
+ }
+ }
+ }
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_add_impl_trait_empty() {
+ check_assist(
+ generate_impl_trait,
+ r#"
+ trait $0Foo {}
+ "#,
+ r#"
+ trait Foo {}
+
+ impl Foo for ${1:_} {$0}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index dc26ec7..9c4bcdd 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -94,7 +94,7 @@
})?;
let _ = process_ref_mut(&fn_);
- let assoc_list = make::assoc_item_list().clone_for_update();
+ let assoc_list = make::assoc_item_list(None).clone_for_update();
ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_));
diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs
index 51c2f65..5bda122 100644
--- a/crates/ide-assists/src/handlers/generate_new.rs
+++ b/crates/ide-assists/src/handlers/generate_new.rs
@@ -4,12 +4,12 @@
};
use syntax::{
ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make},
- ted,
+ syntax_editor::Position,
};
use crate::{
AssistContext, AssistId, Assists,
- utils::{find_struct_impl, generate_impl},
+ utils::{find_struct_impl, generate_impl_with_item},
};
// Assist: generate_new
@@ -149,7 +149,53 @@
.clone_for_update();
fn_.indent(1.into());
- if let Some(cap) = ctx.config.snippet_cap {
+ let mut editor = builder.make_editor(strukt.syntax());
+
+ // Get the node for set annotation
+ let contain_fn = if let Some(impl_def) = impl_def {
+ fn_.indent(impl_def.indent_level());
+
+ if let Some(l_curly) = impl_def.assoc_item_list().and_then(|list| list.l_curly_token())
+ {
+ editor.insert_all(
+ Position::after(l_curly),
+ vec![
+ make::tokens::whitespace(&format!("\n{}", impl_def.indent_level() + 1))
+ .into(),
+ fn_.syntax().clone().into(),
+ make::tokens::whitespace("\n").into(),
+ ],
+ );
+ fn_.syntax().clone()
+ } else {
+ let items = vec![either::Either::Right(ast::AssocItem::Fn(fn_))];
+ let list = make::assoc_item_list(Some(items));
+ editor.insert(Position::after(impl_def.syntax()), list.syntax());
+ list.syntax().clone()
+ }
+ } else {
+ // Generate a new impl to add the method to
+ let indent_level = strukt.indent_level();
+ let body = vec![either::Either::Right(ast::AssocItem::Fn(fn_))];
+ let list = make::assoc_item_list(Some(body));
+ let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list));
+
+ impl_def.indent(strukt.indent_level());
+
+ // Insert it after the adt
+ editor.insert_all(
+ Position::after(strukt.syntax()),
+ vec![
+ make::tokens::whitespace(&format!("\n\n{indent_level}")).into(),
+ impl_def.syntax().clone().into(),
+ ],
+ );
+ impl_def.syntax().clone()
+ };
+
+ if let Some(fn_) = contain_fn.descendants().find_map(ast::Fn::cast)
+ && let Some(cap) = ctx.config.snippet_cap
+ {
match strukt.kind() {
StructKind::Tuple(_) => {
let struct_args = fn_
@@ -168,8 +214,8 @@
for (struct_arg, fn_param) in struct_args.zip(fn_params.params()) {
if let Some(fn_pat) = fn_param.pat() {
let fn_pat = fn_pat.syntax().clone();
- builder
- .add_placeholder_snippet_group(cap, vec![struct_arg, fn_pat]);
+ let placeholder = builder.make_placeholder_snippet(cap);
+ editor.add_annotation_all(vec![struct_arg, fn_pat], placeholder)
}
}
}
@@ -179,36 +225,12 @@
// Add a tabstop before the name
if let Some(name) = fn_.name() {
- builder.add_tabstop_before(cap, name);
+ let tabstop_before = builder.make_tabstop_before(cap);
+ editor.add_annotation(name.syntax(), tabstop_before);
}
}
- // Get the mutable version of the impl to modify
- let impl_def = if let Some(impl_def) = impl_def {
- fn_.indent(impl_def.indent_level());
- builder.make_mut(impl_def)
- } else {
- // Generate a new impl to add the method to
- let impl_def = generate_impl(&ast::Adt::Struct(strukt.clone()));
- let indent_level = strukt.indent_level();
- fn_.indent(indent_level);
-
- // Insert it after the adt
- let strukt = builder.make_mut(strukt.clone());
-
- ted::insert_all_raw(
- ted::Position::after(strukt.syntax()),
- vec![
- make::tokens::whitespace(&format!("\n\n{indent_level}")).into(),
- impl_def.syntax().clone().into(),
- ],
- );
-
- impl_def
- };
-
- // Add the `new` method at the start of the impl
- impl_def.get_or_create_assoc_item_list().add_item_at_start(fn_.into());
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}
diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
index 154b502..92a4bd3 100644
--- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
@@ -3,7 +3,7 @@
use syntax::{
AstNode, SyntaxKind, T,
ast::{
- self, HasGenericParams, HasName,
+ self, HasGenericParams, HasName, HasVisibility,
edit_in_place::{HasVisibilityEdit, Indent},
make,
},
@@ -164,6 +164,12 @@
/// `E0449` Trait items always share the visibility of their trait
fn remove_items_visibility(item: &ast::AssocItem) {
if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) {
+ if let Some(vis) = has_vis.visibility()
+ && let Some(token) = vis.syntax().next_sibling_or_token()
+ && token.kind() == SyntaxKind::WHITESPACE
+ {
+ ted::remove(token);
+ }
has_vis.set_visibility(None);
}
}
@@ -333,11 +339,11 @@
struct Foo;
trait NewTrait {
- fn a_func() -> Option<()>;
+ fn a_func() -> Option<()>;
}
impl NewTrait for Foo {
- fn a_func() -> Option<()> {
+ fn a_func() -> Option<()> {
Some(())
}
}"#,
diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs
index b7b8bc6..1549b41 100644
--- a/crates/ide-assists/src/handlers/inline_call.rs
+++ b/crates/ide-assists/src/handlers/inline_call.rs
@@ -537,8 +537,13 @@
if let Some(generic_arg_list) = generic_arg_list.clone() {
if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax()))
{
- PathTransform::function_call(target, source, function, generic_arg_list)
- .apply(body.syntax());
+ body.reindent_to(IndentLevel(0));
+ if let Some(new_body) = ast::BlockExpr::cast(
+ PathTransform::function_call(target, source, function, generic_arg_list)
+ .apply(body.syntax()),
+ ) {
+ body = new_body;
+ }
}
}
diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index 806c8fb..45bb6ce 100644
--- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -5,12 +5,12 @@
SyntaxKind::WHITESPACE,
T,
ast::{self, AstNode, HasName, make},
- ted::{self, Position},
+ syntax_editor::{Position, SyntaxEditor},
};
use crate::{
AssistConfig, AssistId,
- assist_context::{AssistContext, Assists, SourceChangeBuilder},
+ assist_context::{AssistContext, Assists},
utils::{
DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl, filter_assoc_items,
gen_trait_fn_body, generate_trait_impl,
@@ -126,98 +126,56 @@
let label = format!("Convert to manual `impl {replace_trait_path} for {annotated_name}`");
acc.add(AssistId::refactor("replace_derive_with_manual_impl"), label, target, |builder| {
- let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax());
+ let insert_after = Position::after(adt.syntax());
let impl_is_unsafe = trait_.map(|s| s.is_unsafe(ctx.db())).unwrap_or(false);
- let impl_def_with_items = impl_def_from_trait(
+ let impl_def = impl_def_from_trait(
&ctx.sema,
ctx.config,
adt,
&annotated_name,
trait_,
replace_trait_path,
+ impl_is_unsafe,
);
- update_attribute(builder, old_derives, old_tree, old_trait_path, attr);
+
+ let mut editor = builder.make_editor(attr.syntax());
+ update_attribute(&mut editor, old_derives, old_tree, old_trait_path, attr);
let trait_path = make::ty_path(replace_trait_path.clone());
- match (ctx.config.snippet_cap, impl_def_with_items) {
- (None, None) => {
- let impl_def = generate_trait_impl(adt, trait_path);
- if impl_is_unsafe {
- ted::insert(
- Position::first_child_of(impl_def.syntax()),
- make::token(T![unsafe]),
- );
- }
+ let (impl_def, first_assoc_item) = if let Some(impl_def) = impl_def {
+ (
+ impl_def.clone(),
+ impl_def.assoc_item_list().and_then(|list| list.assoc_items().next()),
+ )
+ } else {
+ (generate_trait_impl(impl_is_unsafe, adt, trait_path), None)
+ };
- ted::insert_all(
- insert_after,
- vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
- );
- }
- (None, Some((impl_def, _))) => {
- if impl_is_unsafe {
- ted::insert(
- Position::first_child_of(impl_def.syntax()),
- make::token(T![unsafe]),
- );
- }
- ted::insert_all(
- insert_after,
- vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
- );
- }
- (Some(cap), None) => {
- let impl_def = generate_trait_impl(adt, trait_path);
-
- if impl_is_unsafe {
- ted::insert(
- Position::first_child_of(impl_def.syntax()),
- make::token(T![unsafe]),
- );
- }
-
- if let Some(l_curly) = impl_def.assoc_item_list().and_then(|it| it.l_curly_token())
+ if let Some(cap) = ctx.config.snippet_cap {
+ if let Some(first_assoc_item) = first_assoc_item {
+ if let ast::AssocItem::Fn(ref func) = first_assoc_item
+ && let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
+ && m.syntax().text() == "todo!()"
{
- builder.add_tabstop_after_token(cap, l_curly);
- }
-
- ted::insert_all(
- insert_after,
- vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
- );
- }
- (Some(cap), Some((impl_def, first_assoc_item))) => {
- let mut added_snippet = false;
-
- if impl_is_unsafe {
- ted::insert(
- Position::first_child_of(impl_def.syntax()),
- make::token(T![unsafe]),
- );
- }
-
- if let ast::AssocItem::Fn(ref func) = first_assoc_item {
- if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) {
- if m.syntax().text() == "todo!()" {
- // Make the `todo!()` a placeholder
- builder.add_placeholder_snippet(cap, m);
- added_snippet = true;
- }
- }
- }
-
- if !added_snippet {
+ // Make the `todo!()` a placeholder
+ builder.add_placeholder_snippet(cap, m);
+ } else {
// If we haven't already added a snippet, add a tabstop before the generated function
builder.add_tabstop_before(cap, first_assoc_item);
}
-
- ted::insert_all(
- insert_after,
- vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
- );
+ } else if let Some(l_curly) =
+ impl_def.assoc_item_list().and_then(|it| it.l_curly_token())
+ {
+ builder.add_tabstop_after_token(cap, l_curly);
}
- };
+ }
+
+ editor.insert_all(
+ insert_after,
+ vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+ );
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}
@@ -228,7 +186,8 @@
annotated_name: &ast::Name,
trait_: Option<hir::Trait>,
trait_path: &ast::Path,
-) -> Option<(ast::Impl, ast::AssocItem)> {
+ impl_is_unsafe: bool,
+) -> Option<ast::Impl> {
let trait_ = trait_?;
let target_scope = sema.scope(annotated_name.syntax())?;
@@ -245,21 +204,43 @@
if trait_items.is_empty() {
return None;
}
- let impl_def = generate_trait_impl(adt, make::ty_path(trait_path.clone()));
+ let impl_def = generate_trait_impl(impl_is_unsafe, adt, make::ty_path(trait_path.clone()));
- let first_assoc_item =
+ let assoc_items =
add_trait_assoc_items_to_impl(sema, config, &trait_items, trait_, &impl_def, &target_scope);
+ let assoc_item_list = if let Some((first, other)) =
+ assoc_items.split_first().map(|(first, other)| (first.clone_subtree(), other))
+ {
+ let first_item = if let ast::AssocItem::Fn(ref func) = first
+ && let Some(body) = gen_trait_fn_body(func, trait_path, adt, None)
+ && let Some(func_body) = func.body()
+ {
+ let mut editor = SyntaxEditor::new(first.syntax().clone());
+ editor.replace(func_body.syntax(), body.syntax());
+ ast::AssocItem::cast(editor.finish().new_root().clone())
+ } else {
+ Some(first.clone())
+ };
+ let items = first_item
+ .into_iter()
+ .chain(other.iter().cloned())
+ .map(either::Either::Right)
+ .collect();
+ make::assoc_item_list(Some(items))
+ } else {
+ make::assoc_item_list(None)
+ }
+ .clone_for_update();
- // Generate a default `impl` function body for the derived trait.
- if let ast::AssocItem::Fn(ref func) = first_assoc_item {
- let _ = gen_trait_fn_body(func, trait_path, adt, None);
- };
-
- Some((impl_def, first_assoc_item))
+ let impl_def = impl_def.clone_subtree();
+ let mut editor = SyntaxEditor::new(impl_def.syntax().clone());
+ editor.replace(impl_def.assoc_item_list()?.syntax(), assoc_item_list.syntax());
+ let impl_def = ast::Impl::cast(editor.finish().new_root().clone())?;
+ Some(impl_def)
}
fn update_attribute(
- builder: &mut SourceChangeBuilder,
+ editor: &mut SyntaxEditor,
old_derives: &[ast::Path],
old_tree: &ast::TokenTree,
old_trait_path: &ast::Path,
@@ -272,8 +253,6 @@
let has_more_derives = !new_derives.is_empty();
if has_more_derives {
- let old_tree = builder.make_mut(old_tree.clone());
-
// Make the paths into flat lists of tokens in a vec
let tt = new_derives.iter().map(|path| path.syntax().clone()).map(|node| {
node.descendants_with_tokens()
@@ -288,18 +267,17 @@
let tt = tt.collect::<Vec<_>>();
let new_tree = make::token_tree(T!['('], tt).clone_for_update();
- ted::replace(old_tree.syntax(), new_tree.syntax());
+ editor.replace(old_tree.syntax(), new_tree.syntax());
} else {
// Remove the attr and any trailing whitespace
- let attr = builder.make_mut(attr.clone());
if let Some(line_break) =
attr.syntax().next_sibling_or_token().filter(|t| t.kind() == WHITESPACE)
{
- ted::remove(line_break)
+ editor.delete(line_break)
}
- ted::remove(attr.syntax())
+ editor.delete(attr.syntax())
}
}
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index cde0d87..4682c04 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -302,6 +302,7 @@
generate_function::generate_function,
generate_impl::generate_impl,
generate_impl::generate_trait_impl,
+ generate_impl::generate_impl_trait,
generate_is_empty_from_len::generate_is_empty_from_len,
generate_mut_trait_impl::generate_mut_trait_impl,
generate_new::generate_new,
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index fc1c692..91348be 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -1881,6 +1881,29 @@
}
#[test]
+fn doctest_generate_impl_trait() {
+ check_doc_test(
+ "generate_impl_trait",
+ r#####"
+trait $0Foo {
+ fn foo(&self) -> i32;
+}
+"#####,
+ r#####"
+trait Foo {
+ fn foo(&self) -> i32;
+}
+
+impl Foo for ${1:_} {
+ fn foo(&self) -> i32 {
+ $0todo!()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_generate_is_empty_from_len() {
check_doc_test(
"generate_is_empty_from_len",
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index fbce1d3..15c7a6a 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -23,10 +23,11 @@
ast::{
self, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
edit::{AstNodeEdit, IndentLevel},
- edit_in_place::{AttrsOwnerEdit, Indent, Removable},
+ edit_in_place::{AttrsOwnerEdit, Removable},
make,
syntax_factory::SyntaxFactory,
},
+ syntax_editor::SyntaxEditor,
ted,
};
@@ -178,6 +179,7 @@
/// [`filter_assoc_items()`]), clones each item for update and applies path transformation to it,
/// then inserts into `impl_`. Returns the modified `impl_` and the first associated item that got
/// inserted.
+#[must_use]
pub fn add_trait_assoc_items_to_impl(
sema: &Semantics<'_, RootDatabase>,
config: &AssistConfig,
@@ -185,71 +187,66 @@
trait_: hir::Trait,
impl_: &ast::Impl,
target_scope: &hir::SemanticsScope<'_>,
-) -> ast::AssocItem {
+) -> Vec<ast::AssocItem> {
let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1;
- let items = original_items.iter().map(|InFile { file_id, value: original_item }| {
- let cloned_item = {
- if let Some(macro_file) = file_id.macro_file() {
- let span_map = sema.db.expansion_span_map(macro_file);
- let item_prettified = prettify_macro_expansion(
- sema.db,
- original_item.syntax().clone(),
- &span_map,
- target_scope.krate().into(),
- );
- if let Some(formatted) = ast::AssocItem::cast(item_prettified) {
- return formatted;
- } else {
- stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`");
+ original_items
+ .iter()
+ .map(|InFile { file_id, value: original_item }| {
+ let mut cloned_item = {
+ if let Some(macro_file) = file_id.macro_file() {
+ let span_map = sema.db.expansion_span_map(macro_file);
+ let item_prettified = prettify_macro_expansion(
+ sema.db,
+ original_item.syntax().clone(),
+ &span_map,
+ target_scope.krate().into(),
+ );
+ if let Some(formatted) = ast::AssocItem::cast(item_prettified) {
+ return formatted;
+ } else {
+ stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`");
+ }
}
+ original_item.clone_for_update()
}
- original_item.clone_for_update()
- };
+ .reset_indent();
- if let Some(source_scope) = sema.scope(original_item.syntax()) {
- // FIXME: Paths in nested macros are not handled well. See
- // `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test.
- let transform =
- PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone());
- transform.apply(cloned_item.syntax());
- }
- cloned_item.remove_attrs_and_docs();
- cloned_item.reindent_to(new_indent_level);
- cloned_item
- });
-
- let assoc_item_list = impl_.get_or_create_assoc_item_list();
-
- let mut first_item = None;
- for item in items {
- first_item.get_or_insert_with(|| item.clone());
- match &item {
- ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
- let body = AstNodeEdit::indent(
- &make::block_expr(
- None,
- Some(match config.expr_fill_default {
- ExprFillDefaultMode::Todo => make::ext::expr_todo(),
- ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
- ExprFillDefaultMode::Default => make::ext::expr_todo(),
- }),
- ),
- new_indent_level,
- );
- ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax())
+ if let Some(source_scope) = sema.scope(original_item.syntax()) {
+ // FIXME: Paths in nested macros are not handled well. See
+ // `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test.
+ let transform =
+ PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone());
+ cloned_item = ast::AssocItem::cast(transform.apply(cloned_item.syntax())).unwrap();
}
- ast::AssocItem::TypeAlias(type_alias) => {
- if let Some(type_bound_list) = type_alias.type_bound_list() {
- type_bound_list.remove()
+ cloned_item.remove_attrs_and_docs();
+ cloned_item
+ })
+ .map(|item| {
+ match &item {
+ ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
+ let body = AstNodeEdit::indent(
+ &make::block_expr(
+ None,
+ Some(match config.expr_fill_default {
+ ExprFillDefaultMode::Todo => make::ext::expr_todo(),
+ ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
+ ExprFillDefaultMode::Default => make::ext::expr_todo(),
+ }),
+ ),
+ IndentLevel::single(),
+ );
+ ted::replace(fn_.get_or_create_body().syntax(), body.syntax());
}
+ ast::AssocItem::TypeAlias(type_alias) => {
+ if let Some(type_bound_list) = type_alias.type_bound_list() {
+ type_bound_list.remove()
+ }
+ }
+ _ => {}
}
- _ => {}
- }
-
- assoc_item_list.add_item(item)
- }
-
- first_item.unwrap()
+ AstNodeEdit::indent(&item, new_indent_level)
+ })
+ .collect()
}
pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize {
@@ -334,7 +331,7 @@
fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> {
match expr {
ast::Expr::BinExpr(bin) => {
- let bin = bin.clone_for_update();
+ let bin = bin.clone_subtree();
let op_token = bin.op_token()?;
let rev_token = match op_token.kind() {
T![==] => T![!=],
@@ -350,8 +347,9 @@
);
}
};
- ted::replace(op_token, make::token(rev_token));
- Some(bin.into())
+ let mut bin_editor = SyntaxEditor::new(bin.syntax().clone());
+ bin_editor.replace(op_token, make::token(rev_token));
+ ast::Expr::cast(bin_editor.finish().new_root().clone())
}
ast::Expr::MethodCallExpr(mce) => {
let receiver = mce.receiver()?;
@@ -664,16 +662,23 @@
/// Generates the corresponding `impl Type {}` including type and lifetime
/// parameters.
+pub(crate) fn generate_impl_with_item(
+ adt: &ast::Adt,
+ body: Option<ast::AssocItemList>,
+) -> ast::Impl {
+ generate_impl_inner(false, adt, None, true, body)
+}
+
pub(crate) fn generate_impl(adt: &ast::Adt) -> ast::Impl {
- generate_impl_inner(adt, None, true)
+ generate_impl_inner(false, adt, None, true, None)
}
/// Generates the corresponding `impl <trait> for Type {}` including type
/// and lifetime parameters, with `<trait>` appended to `impl`'s generic parameters' bounds.
///
/// This is useful for traits like `PartialEq`, since `impl<T> PartialEq for U<T>` often requires `T: PartialEq`.
-pub(crate) fn generate_trait_impl(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl {
- generate_impl_inner(adt, Some(trait_), true)
+pub(crate) fn generate_trait_impl(is_unsafe: bool, adt: &ast::Adt, trait_: ast::Type) -> ast::Impl {
+ generate_impl_inner(is_unsafe, adt, Some(trait_), true, None)
}
/// Generates the corresponding `impl <trait> for Type {}` including type
@@ -681,13 +686,15 @@
///
/// This is useful for traits like `From<T>`, since `impl<T> From<T> for U<T>` doesn't require `T: From<T>`.
pub(crate) fn generate_trait_impl_intransitive(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl {
- generate_impl_inner(adt, Some(trait_), false)
+ generate_impl_inner(false, adt, Some(trait_), false, None)
}
fn generate_impl_inner(
+ is_unsafe: bool,
adt: &ast::Adt,
trait_: Option<ast::Type>,
trait_is_transitive: bool,
+ body: Option<ast::AssocItemList>,
) -> ast::Impl {
// Ensure lifetime params are before type & const params
let generic_params = adt.generic_param_list().map(|generic_params| {
@@ -727,7 +734,7 @@
let impl_ = match trait_ {
Some(trait_) => make::impl_trait(
- false,
+ is_unsafe,
None,
None,
generic_params,
@@ -737,9 +744,9 @@
ty,
None,
adt.where_clause(),
- None,
+ body,
),
- None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), None),
+ None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), body),
}
.clone_for_update();
diff --git a/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/crates/ide-assists/src/utils/gen_trait_fn_body.rs
index c58bdd9..87e90e8 100644
--- a/crates/ide-assists/src/utils/gen_trait_fn_body.rs
+++ b/crates/ide-assists/src/utils/gen_trait_fn_body.rs
@@ -1,10 +1,7 @@
//! This module contains functions to generate default trait impl function bodies where possible.
use hir::TraitRef;
-use syntax::{
- ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make},
- ted,
-};
+use syntax::ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make};
/// Generate custom trait bodies without default implementation where possible.
///
@@ -18,21 +15,33 @@
trait_path: &ast::Path,
adt: &ast::Adt,
trait_ref: Option<TraitRef<'_>>,
-) -> Option<()> {
+) -> Option<ast::BlockExpr> {
+ let _ = func.body()?;
match trait_path.segment()?.name_ref()?.text().as_str() {
- "Clone" => gen_clone_impl(adt, func),
- "Debug" => gen_debug_impl(adt, func),
- "Default" => gen_default_impl(adt, func),
- "Hash" => gen_hash_impl(adt, func),
- "PartialEq" => gen_partial_eq(adt, func, trait_ref),
- "PartialOrd" => gen_partial_ord(adt, func, trait_ref),
+ "Clone" => {
+ stdx::always!(func.name().is_some_and(|name| name.text() == "clone"));
+ gen_clone_impl(adt)
+ }
+ "Debug" => gen_debug_impl(adt),
+ "Default" => gen_default_impl(adt),
+ "Hash" => {
+ stdx::always!(func.name().is_some_and(|name| name.text() == "hash"));
+ gen_hash_impl(adt)
+ }
+ "PartialEq" => {
+ stdx::always!(func.name().is_some_and(|name| name.text() == "eq"));
+ gen_partial_eq(adt, trait_ref)
+ }
+ "PartialOrd" => {
+ stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp"));
+ gen_partial_ord(adt, trait_ref)
+ }
_ => None,
}
}
/// Generate a `Clone` impl based on the fields and members of the target type.
-fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
- stdx::always!(func.name().is_some_and(|name| name.text() == "clone"));
+fn gen_clone_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
fn gen_clone_call(target: ast::Expr) -> ast::Expr {
let method = make::name_ref("clone");
make::expr_method_call(target, method, make::arg_list(None)).into()
@@ -139,12 +148,11 @@
}
};
let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
- ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
- Some(())
+ Some(body)
}
/// Generate a `Debug` impl based on the fields and members of the target type.
-fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+fn gen_debug_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
let annotated_name = adt.name()?;
match adt {
// `Debug` cannot be derived for unions, so no default impl can be provided.
@@ -248,8 +256,7 @@
let body = make::block_expr(None, Some(match_expr.into()));
let body = body.indent(ast::edit::IndentLevel(1));
- ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
- Some(())
+ Some(body)
}
ast::Adt::Struct(strukt) => {
@@ -296,14 +303,13 @@
let method = make::name_ref("finish");
let expr = make::expr_method_call(expr, method, make::arg_list(None)).into();
let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
- ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
- Some(())
+ Some(body)
}
}
}
/// Generate a `Debug` impl based on the fields and members of the target type.
-fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+fn gen_default_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
fn gen_default_call() -> Option<ast::Expr> {
let fn_name = make::ext::path_from_idents(["Default", "default"])?;
Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)).into())
@@ -342,15 +348,13 @@
}
};
let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1));
- ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
- Some(())
+ Some(body)
}
}
}
/// Generate a `Hash` impl based on the fields and members of the target type.
-fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
- stdx::always!(func.name().is_some_and(|name| name.text() == "hash"));
+fn gen_hash_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> {
fn gen_hash_call(target: ast::Expr) -> ast::Stmt {
let method = make::name_ref("hash");
let arg = make::expr_path(make::ext::ident_path("state"));
@@ -400,13 +404,11 @@
},
};
- ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
- Some(())
+ Some(body)
}
/// Generate a `PartialEq` impl based on the fields and members of the target type.
-fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
- stdx::always!(func.name().is_some_and(|name| name.text() == "eq"));
+fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> {
fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
match expr {
Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)),
@@ -595,12 +597,10 @@
},
};
- ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
- Some(())
+ Some(body)
}
-fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
- stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp"));
+fn gen_partial_ord(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> {
fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
let mut arms = vec![];
@@ -686,8 +686,7 @@
},
};
- ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
- Some(())
+ Some(body)
}
fn make_discriminant() -> Option<ast::Expr> {
diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 975c2f0..bcf8c0e 100644
--- a/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -276,7 +276,7 @@
let assoc_item = assoc_item.clone_for_update();
// FIXME: Paths in nested macros are not handled well. See
// `macro_generated_assoc_item2` test.
- transform.apply(assoc_item.syntax());
+ let assoc_item = ast::AssocItem::cast(transform.apply(assoc_item.syntax()))?;
assoc_item.remove_attrs_and_docs();
Some(assoc_item)
}
@@ -301,7 +301,7 @@
let fn_ = fn_.clone_for_update();
// FIXME: Paths in nested macros are not handled well. See
// `macro_generated_assoc_item2` test.
- transform.apply(fn_.syntax());
+ let fn_ = ast::Fn::cast(transform.apply(fn_.syntax()))?;
fn_.remove_attrs_and_docs();
match async_ {
AsyncSugaring::Desugar => {
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index 0ab880b..b7432d8 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -12,15 +12,16 @@
use syntax::{
NodeOrToken, SyntaxNode,
ast::{self, AstNode, HasGenericArgs, make},
- ted,
+ syntax_editor::{self, SyntaxEditor},
};
-#[derive(Default)]
+#[derive(Default, Debug)]
struct AstSubsts {
types_and_consts: Vec<TypeOrConst>,
lifetimes: Vec<ast::LifetimeArg>,
}
+#[derive(Debug)]
enum TypeOrConst {
Either(ast::TypeArg), // indistinguishable type or const param
Const(ast::ConstArg),
@@ -128,15 +129,18 @@
}
}
- pub fn apply(&self, syntax: &SyntaxNode) {
+ #[must_use]
+ pub fn apply(&self, syntax: &SyntaxNode) -> SyntaxNode {
self.build_ctx().apply(syntax)
}
- pub fn apply_all<'b>(&self, nodes: impl IntoIterator<Item = &'b SyntaxNode>) {
+ #[must_use]
+ pub fn apply_all<'b>(
+ &self,
+ nodes: impl IntoIterator<Item = &'b SyntaxNode>,
+ ) -> Vec<SyntaxNode> {
let ctx = self.build_ctx();
- for node in nodes {
- ctx.apply(node);
- }
+ nodes.into_iter().map(|node| ctx.apply(&node.clone())).collect()
}
fn prettify_target_node(&self, node: SyntaxNode) -> SyntaxNode {
@@ -236,7 +240,7 @@
Some((k.name(db).display(db, target_edition).to_string(), v.lifetime()?))
})
.collect();
- let ctx = Ctx {
+ let mut ctx = Ctx {
type_substs,
const_substs,
lifetime_substs,
@@ -272,42 +276,75 @@
}
impl Ctx<'_> {
- fn apply(&self, item: &SyntaxNode) {
+ fn apply(&self, item: &SyntaxNode) -> SyntaxNode {
// `transform_path` may update a node's parent and that would break the
// tree traversal. Thus all paths in the tree are collected into a vec
// so that such operation is safe.
- let paths = preorder_rev(item).filter_map(ast::Path::cast).collect::<Vec<_>>();
- for path in paths {
- self.transform_path(path);
- }
-
- preorder_rev(item).filter_map(ast::Lifetime::cast).for_each(|lifetime| {
+ let item = self.transform_path(item).clone_subtree();
+ let mut editor = SyntaxEditor::new(item.clone());
+ preorder_rev(&item).filter_map(ast::Lifetime::cast).for_each(|lifetime| {
if let Some(subst) = self.lifetime_substs.get(&lifetime.syntax().text().to_string()) {
- ted::replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax());
+ editor
+ .replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax());
}
});
+
+ editor.finish().new_root().clone()
}
- fn transform_default_values(&self, defaulted_params: Vec<DefaultedParam>) {
+ fn transform_default_values(&mut self, defaulted_params: Vec<DefaultedParam>) {
// By now the default values are simply copied from where they are declared
// and should be transformed. As any value is allowed to refer to previous
// generic (both type and const) parameters, they should be all iterated left-to-right.
for param in defaulted_params {
- let value = match param {
- Either::Left(k) => self.type_substs.get(&k).unwrap().syntax(),
- Either::Right(k) => self.const_substs.get(&k).unwrap(),
+ let value = match ¶m {
+ Either::Left(k) => self.type_substs.get(k).unwrap().syntax(),
+ Either::Right(k) => self.const_substs.get(k).unwrap(),
};
// `transform_path` may update a node's parent and that would break the
// tree traversal. Thus all paths in the tree are collected into a vec
// so that such operation is safe.
- let paths = preorder_rev(value).filter_map(ast::Path::cast).collect::<Vec<_>>();
- for path in paths {
- self.transform_path(path);
+ let new_value = self.transform_path(value);
+ match param {
+ Either::Left(k) => {
+ self.type_substs.insert(k, ast::Type::cast(new_value.clone()).unwrap());
+ }
+ Either::Right(k) => {
+ self.const_substs.insert(k, new_value.clone());
+ }
}
}
}
- fn transform_path(&self, path: ast::Path) -> Option<()> {
+ fn transform_path(&self, path: &SyntaxNode) -> SyntaxNode {
+ fn find_child_paths(root_path: &SyntaxNode) -> Vec<ast::Path> {
+ let mut result = Vec::new();
+ for child in root_path.children() {
+ if let Some(child_path) = ast::Path::cast(child.clone()) {
+ result.push(child_path);
+ } else {
+ result.extend(find_child_paths(&child));
+ }
+ }
+ result
+ }
+ let root_path = path.clone_subtree();
+ let result = find_child_paths(&root_path);
+ let mut editor = SyntaxEditor::new(root_path.clone());
+ for sub_path in result {
+ let new = self.transform_path(sub_path.syntax());
+ editor.replace(sub_path.syntax(), new);
+ }
+ let update_sub_item = editor.finish().new_root().clone().clone_subtree();
+ let item = find_child_paths(&update_sub_item);
+ let mut editor = SyntaxEditor::new(update_sub_item);
+ for sub_path in item {
+ self.transform_path_(&mut editor, &sub_path);
+ }
+ editor.finish().new_root().clone()
+ }
+
+ fn transform_path_(&self, editor: &mut SyntaxEditor, path: &ast::Path) -> Option<()> {
if path.qualifier().is_some() {
return None;
}
@@ -319,8 +356,7 @@
// don't try to qualify sole `self` either, they are usually locals, but are returned as modules due to namespace clashing
return None;
}
-
- let resolution = self.source_scope.speculative_resolve(&path)?;
+ let resolution = self.source_scope.speculative_resolve(path)?;
match resolution {
hir::PathResolution::TypeParam(tp) => {
@@ -360,12 +396,12 @@
let segment = make::path_segment_ty(subst.clone(), trait_ref);
let qualified = make::path_from_segments(std::iter::once(segment), false);
- ted::replace(path.syntax(), qualified.clone_for_update().syntax());
+ editor.replace(path.syntax(), qualified.clone_for_update().syntax());
} else if let Some(path_ty) = ast::PathType::cast(parent) {
let old = path_ty.syntax();
if old.parent().is_some() {
- ted::replace(old, subst.clone_subtree().clone_for_update().syntax());
+ editor.replace(old, subst.clone_subtree().clone_for_update().syntax());
} else {
// Some `path_ty` has no parent, especially ones made for default value
// of type parameters.
@@ -377,13 +413,13 @@
}
let start = path_ty.syntax().first_child().map(NodeOrToken::Node)?;
let end = path_ty.syntax().last_child().map(NodeOrToken::Node)?;
- ted::replace_all(
+ editor.replace_all(
start..=end,
new.syntax().children().map(NodeOrToken::Node).collect::<Vec<_>>(),
);
}
} else {
- ted::replace(
+ editor.replace(
path.syntax(),
subst.clone_subtree().clone_for_update().syntax(),
);
@@ -409,17 +445,28 @@
};
let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?;
let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update();
+ let mut res_editor = SyntaxEditor::new(res.syntax().clone_subtree());
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
if let Some(segment) = res.segment() {
- let old = segment.get_or_create_generic_arg_list();
- ted::replace(old.syntax(), args.clone_subtree().syntax().clone_for_update())
+ if let Some(old) = segment.generic_arg_list() {
+ res_editor.replace(
+ old.syntax(),
+ args.clone_subtree().syntax().clone_for_update(),
+ )
+ } else {
+ res_editor.insert(
+ syntax_editor::Position::last_child_of(segment.syntax()),
+ args.clone_subtree().syntax().clone_for_update(),
+ );
+ }
}
}
- ted::replace(path.syntax(), res.syntax())
+ let res = res_editor.finish().new_root().clone();
+ editor.replace(path.syntax().clone(), res);
}
hir::PathResolution::ConstParam(cp) => {
if let Some(subst) = self.const_substs.get(&cp) {
- ted::replace(path.syntax(), subst.clone_subtree().clone_for_update());
+ editor.replace(path.syntax(), subst.clone_subtree().clone_for_update());
}
}
hir::PathResolution::SelfType(imp) => {
@@ -456,13 +503,13 @@
mod_path_to_ast(&found_path, self.target_edition).qualifier()
{
let res = make::path_concat(qual, path_ty.path()?).clone_for_update();
- ted::replace(path.syntax(), res.syntax());
+ editor.replace(path.syntax(), res.syntax());
return Some(());
}
}
}
- ted::replace(path.syntax(), ast_ty.syntax());
+ editor.replace(path.syntax(), ast_ty.syntax());
}
hir::PathResolution::Local(_)
| hir::PathResolution::Def(_)
diff --git a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
index f20b6de..e31367f 100644
--- a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
+++ b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
@@ -131,4 +131,28 @@
"#,
);
}
+
+ #[test]
+ fn regression_20259() {
+ check_diagnostics(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct Foo<T>(T);
+
+impl<T> Deref for Foo<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn test(x: Foo<(i32, bool)>) {
+ let (_a, _b): &(i32, bool) = &x;
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs
index 698fd14..1901bcc 100755
--- a/crates/ide/src/folding_ranges.rs
+++ b/crates/ide/src/folding_ranges.rs
@@ -73,11 +73,13 @@
}
if fn_node.body().is_some() {
+ // Get the actual start of the function (excluding doc comments)
+ let fn_start = fn_node
+ .fn_token()
+ .map(|token| token.text_range().start())
+ .unwrap_or(node.text_range().start());
res.push(Fold {
- range: TextRange::new(
- node.text_range().start(),
- node.text_range().end(),
- ),
+ range: TextRange::new(fn_start, node.text_range().end()),
kind: FoldKind::Function,
});
continue;
@@ -688,4 +690,21 @@
"#,
)
}
+
+ #[test]
+ fn test_fold_doc_comments_with_multiline_paramlist_function() {
+ check(
+ r#"
+<fold comment>/// A very very very very very very very very very very very very very very very
+/// very very very long description</fold>
+<fold function>fn foo<fold arglist>(
+ very_long_parameter_name: u32,
+ another_very_long_parameter_name: u32,
+ third_very_long_param: u32,
+)</fold> <fold block>{
+ todo!()
+}</fold></fold>
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/inlay_hints/lifetime.rs b/crates/ide/src/inlay_hints/lifetime.rs
index 0069452..49fec0a 100644
--- a/crates/ide/src/inlay_hints/lifetime.rs
+++ b/crates/ide/src/inlay_hints/lifetime.rs
@@ -77,17 +77,18 @@
return None;
}
- let parent_for_type = func
+ let parent_for_binder = func
.syntax()
.ancestors()
.skip(1)
.take_while(|it| matches!(it.kind(), SyntaxKind::PAREN_TYPE | SyntaxKind::FOR_TYPE))
- .find_map(ast::ForType::cast);
+ .find_map(ast::ForType::cast)
+ .and_then(|it| it.for_binder());
let param_list = func.param_list()?;
- let generic_param_list = parent_for_type.as_ref().and_then(|it| it.generic_param_list());
+ let generic_param_list = parent_for_binder.as_ref().and_then(|it| it.generic_param_list());
let ret_type = func.ret_type();
- let for_kw = parent_for_type.as_ref().and_then(|it| it.for_token());
+ let for_kw = parent_for_binder.as_ref().and_then(|it| it.for_token());
hints_(
acc,
ctx,
@@ -143,15 +144,16 @@
// FIXME: Support general path types
let (param_list, ret_type) = func.path().as_ref().and_then(path_as_fn)?;
- let parent_for_type = func
+ let parent_for_binder = func
.syntax()
.ancestors()
.skip(1)
.take_while(|it| matches!(it.kind(), SyntaxKind::PAREN_TYPE | SyntaxKind::FOR_TYPE))
- .find_map(ast::ForType::cast);
+ .find_map(ast::ForType::cast)
+ .and_then(|it| it.for_binder());
- let generic_param_list = parent_for_type.as_ref().and_then(|it| it.generic_param_list());
- let for_kw = parent_for_type.as_ref().and_then(|it| it.for_token());
+ let generic_param_list = parent_for_binder.as_ref().and_then(|it| it.generic_param_list());
+ let for_kw = parent_for_binder.as_ref().and_then(|it| it.for_token());
hints_(
acc,
ctx,
diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs
index fb84e8e..a07c647 100644
--- a/crates/ide/src/rename.rs
+++ b/crates/ide/src/rename.rs
@@ -12,6 +12,7 @@
source_change::SourceChangeBuilder,
};
use itertools::Itertools;
+use std::fmt::Write;
use stdx::{always, never};
use syntax::{AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, ast};
@@ -459,35 +460,22 @@
}
fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: String) -> Option<TextEdit> {
- fn target_type_name(impl_def: &ast::Impl) -> Option<String> {
- if let Some(ast::Type::PathType(p)) = impl_def.self_ty() {
- return Some(p.path()?.segment()?.name_ref()?.text().to_string());
- }
- None
+ let mut replacement_text = new_name;
+ replacement_text.push_str(": ");
+
+ if self_param.amp_token().is_some() {
+ replacement_text.push('&');
+ }
+ if let Some(lifetime) = self_param.lifetime() {
+ write!(replacement_text, "{lifetime} ").unwrap();
+ }
+ if self_param.amp_token().and(self_param.mut_token()).is_some() {
+ replacement_text.push_str("mut ");
}
- match self_param.syntax().ancestors().find_map(ast::Impl::cast) {
- Some(impl_def) => {
- let type_name = target_type_name(&impl_def)?;
+ replacement_text.push_str("Self");
- let mut replacement_text = new_name;
- replacement_text.push_str(": ");
- match (self_param.amp_token(), self_param.mut_token()) {
- (Some(_), None) => replacement_text.push('&'),
- (Some(_), Some(_)) => replacement_text.push_str("&mut "),
- (_, _) => (),
- };
- replacement_text.push_str(type_name.as_str());
-
- Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
- }
- None => {
- cov_mark::hit!(rename_self_outside_of_methods);
- let mut replacement_text = new_name;
- replacement_text.push_str(": _");
- Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
- }
- }
+ Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
}
#[cfg(test)]
@@ -2069,7 +2057,7 @@
struct Foo { i: i32 }
impl Foo {
- fn f(foo: &mut Foo) -> i32 {
+ fn f(foo: &mut Self) -> i32 {
foo.i
}
}
@@ -2095,7 +2083,33 @@
struct Foo { i: i32 }
impl Foo {
- fn f(foo: Foo) -> i32 {
+ fn f(foo: Self) -> i32 {
+ foo.i
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_owned_self_to_parameter_with_lifetime() {
+ cov_mark::check!(rename_self_to_param);
+ check(
+ "foo",
+ r#"
+struct Foo<'a> { i: &'a i32 }
+
+impl<'a> Foo<'a> {
+ fn f(&'a $0self) -> i32 {
+ self.i
+ }
+}
+"#,
+ r#"
+struct Foo<'a> { i: &'a i32 }
+
+impl<'a> Foo<'a> {
+ fn f(foo: &'a Self) -> i32 {
foo.i
}
}
@@ -2105,7 +2119,6 @@
#[test]
fn test_self_outside_of_methods() {
- cov_mark::check!(rename_self_outside_of_methods);
check(
"foo",
r#"
@@ -2114,7 +2127,7 @@
}
"#,
r#"
-fn f(foo: _) -> i32 {
+fn f(foo: Self) -> i32 {
foo.i
}
"#,
@@ -2159,7 +2172,7 @@
struct Foo { i: i32 }
impl Foo {
- fn f(foo: &Foo) -> i32 {
+ fn f(foo: &Self) -> i32 {
let self_var = 1;
foo.i
}
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
index 7665656..ed8a91c 100644
--- a/crates/parser/src/grammar/expressions/atom.rs
+++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -572,9 +572,7 @@
// test closure_binder
// fn main() { for<'a> || (); }
if p.at(T![for]) {
- let b = p.start();
types::for_binder(p);
- b.complete(p, CLOSURE_BINDER);
}
// test const_closure
// fn main() { let cl = const || _ = 0; }
diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs
index 55c5dc4..cb1b59f 100644
--- a/crates/parser/src/grammar/generic_params.rs
+++ b/crates/parser/src/grammar/generic_params.rs
@@ -13,7 +13,7 @@
// test_err generic_param_list_recover
// fn f<T: Clone,, U:, V>() {}
-fn generic_param_list(p: &mut Parser<'_>) {
+pub(super) fn generic_param_list(p: &mut Parser<'_>) {
assert!(p.at(T![<]));
let m = p.start();
delimited(
@@ -147,7 +147,15 @@
let has_paren = p.eat(T!['(']);
match p.current() {
LIFETIME_IDENT => lifetime(p),
- T![for] => types::for_type(p, false),
+ // test for_binder_bound
+ // fn foo<T: for<'a> [const] async Trait>() {}
+ T![for] => {
+ types::for_binder(p);
+ if path_type_bound(p).is_err() {
+ m.abandon(p);
+ return false;
+ }
+ }
// test precise_capturing
// fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T, Self> {}
@@ -180,44 +188,8 @@
p.bump_any();
types::for_type(p, false)
}
- current => {
- match current {
- T![?] => p.bump_any(),
- T![~] => {
- p.bump_any();
- p.expect(T![const]);
- }
- T!['['] => {
- p.bump_any();
- p.expect(T![const]);
- p.expect(T![']']);
- }
- // test const_trait_bound
- // const fn foo(_: impl const Trait) {}
- T![const] => {
- p.bump_any();
- }
- // test async_trait_bound
- // fn async_foo(_: impl async Fn(&i32)) {}
- T![async] => {
- p.bump_any();
- }
- _ => (),
- }
- if paths::is_use_path_start(p) {
- types::path_type_bounds(p, false);
- // test_err type_bounds_macro_call_recovery
- // fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {}
- if p.at(T![!]) {
- let m = p.start();
- p.bump(T![!]);
- p.error("unexpected `!` in type path, macro calls are not allowed here");
- if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) {
- items::token_tree(p);
- }
- m.complete(p, ERROR);
- }
- } else {
+ _ => {
+ if path_type_bound(p).is_err() {
m.abandon(p);
return false;
}
@@ -231,6 +203,43 @@
true
}
+fn path_type_bound(p: &mut Parser<'_>) -> Result<(), ()> {
+ if p.eat(T![~]) {
+ p.expect(T![const]);
+ } else if p.eat(T!['[']) {
+ // test maybe_const_trait_bound
+ // const fn foo(_: impl [const] Trait) {}
+ p.expect(T![const]);
+ p.expect(T![']']);
+ } else {
+ // test const_trait_bound
+ // const fn foo(_: impl const Trait) {}
+ p.eat(T![const]);
+ }
+ // test async_trait_bound
+ // fn async_foo(_: impl async Fn(&i32)) {}
+ p.eat(T![async]);
+ p.eat(T![?]);
+
+ if paths::is_use_path_start(p) {
+ types::path_type_bounds(p, false);
+ // test_err type_bounds_macro_call_recovery
+ // fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {}
+ if p.at(T![!]) {
+ let m = p.start();
+ p.bump(T![!]);
+ p.error("unexpected `!` in type path, macro calls are not allowed here");
+ if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) {
+ items::token_tree(p);
+ }
+ m.complete(p, ERROR);
+ }
+ Ok(())
+ } else {
+ Err(())
+ }
+}
+
// test where_clause
// fn foo()
// where
diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs
index 908440b..a7e97c5 100644
--- a/crates/parser/src/grammar/types.rs
+++ b/crates/parser/src/grammar/types.rs
@@ -249,13 +249,14 @@
}
pub(super) fn for_binder(p: &mut Parser<'_>) {
- assert!(p.at(T![for]));
+ let m = p.start();
p.bump(T![for]);
if p.at(T![<]) {
- generic_params::opt_generic_param_list(p);
+ generic_params::generic_param_list(p);
} else {
p.error("expected `<`");
}
+ m.complete(p, FOR_BINDER);
}
// test for_type
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs
index 12a13ca..3a8041d 100644
--- a/crates/parser/src/syntax_kind/generated.rs
+++ b/crates/parser/src/syntax_kind/generated.rs
@@ -185,7 +185,6 @@
BREAK_EXPR,
CALL_EXPR,
CAST_EXPR,
- CLOSURE_BINDER,
CLOSURE_EXPR,
CONST,
CONST_ARG,
@@ -203,6 +202,7 @@
FN_PTR_TYPE,
FORMAT_ARGS_ARG,
FORMAT_ARGS_EXPR,
+ FOR_BINDER,
FOR_EXPR,
FOR_TYPE,
GENERIC_ARG_LIST,
@@ -358,7 +358,6 @@
| BREAK_EXPR
| CALL_EXPR
| CAST_EXPR
- | CLOSURE_BINDER
| CLOSURE_EXPR
| CONST
| CONST_ARG
@@ -376,6 +375,7 @@
| FN_PTR_TYPE
| FORMAT_ARGS_ARG
| FORMAT_ARGS_EXPR
+ | FOR_BINDER
| FOR_EXPR
| FOR_TYPE
| GENERIC_ARG_LIST
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs
index cef7b0e..c642e1a 100644
--- a/crates/parser/test_data/generated/runner.rs
+++ b/crates/parser/test_data/generated/runner.rs
@@ -253,6 +253,10 @@
run_and_expect_no_errors("test_data/parser/inline/ok/fn_pointer_unnamed_arg.rs");
}
#[test]
+ fn for_binder_bound() {
+ run_and_expect_no_errors("test_data/parser/inline/ok/for_binder_bound.rs");
+ }
+ #[test]
fn for_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/for_expr.rs"); }
#[test]
fn for_range_from() {
@@ -402,6 +406,10 @@
#[test]
fn match_guard() { run_and_expect_no_errors("test_data/parser/inline/ok/match_guard.rs"); }
#[test]
+ fn maybe_const_trait_bound() {
+ run_and_expect_no_errors("test_data/parser/inline/ok/maybe_const_trait_bound.rs");
+ }
+ #[test]
fn metas() { run_and_expect_no_errors("test_data/parser/inline/ok/metas.rs"); }
#[test]
fn method_call_expr() {
diff --git a/crates/parser/test_data/parser/err/0024_many_type_parens.rast b/crates/parser/test_data/parser/err/0024_many_type_parens.rast
index 025c12e..2fd1725 100644
--- a/crates/parser/test_data/parser/err/0024_many_type_parens.rast
+++ b/crates/parser/test_data/parser/err/0024_many_type_parens.rast
@@ -37,7 +37,7 @@
WHITESPACE " "
TYPE_BOUND
L_PAREN "("
- FOR_TYPE
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@@ -45,18 +45,18 @@
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Trait"
- GENERIC_ARG_LIST
- L_ANGLE "<"
- LIFETIME_ARG
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
R_PAREN ")"
R_ANGLE ">"
PARAM_LIST
@@ -124,7 +124,7 @@
WHITESPACE " "
TYPE_BOUND
L_PAREN "("
- FOR_TYPE
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@@ -132,18 +132,18 @@
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Trait"
- GENERIC_ARG_LIST
- L_ANGLE "<"
- LIFETIME_ARG
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
R_PAREN ")"
ERROR
R_ANGLE ">"
@@ -186,7 +186,7 @@
TUPLE_EXPR
L_PAREN "("
CLOSURE_EXPR
- CLOSURE_BINDER
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@@ -243,13 +243,14 @@
PAREN_TYPE
L_PAREN "("
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
diff --git a/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast b/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast
index 674c8d5..3768a55 100644
--- a/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast
+++ b/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast
@@ -12,13 +12,14 @@
WHERE_KW "where"
WHITESPACE " "
WHERE_PRED
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE "\n"
BLOCK_EXPR
STMT_LIST
diff --git a/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast b/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast
index cb4fb16..9c4ee6f 100644
--- a/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast
+++ b/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast
@@ -8,13 +8,14 @@
EQ "="
WHITESPACE " "
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
REF_TYPE
AMP "&"
@@ -37,13 +38,14 @@
EQ "="
WHITESPACE " "
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
TUPLE_TYPE
L_PAREN "("
@@ -70,13 +72,14 @@
EQ "="
WHITESPACE " "
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
SLICE_TYPE
L_BRACK "["
@@ -97,22 +100,24 @@
EQ "="
WHITESPACE " "
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
- WHITESPACE " "
- FOR_TYPE
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
- LIFETIME_IDENT "'b"
+ LIFETIME_IDENT "'a"
R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
FN_KW "fn"
@@ -164,31 +169,34 @@
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
- WHITESPACE " "
- FOR_TYPE
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
- LIFETIME_IDENT "'b"
+ LIFETIME_IDENT "'a"
R_ANGLE ">"
- WHITESPACE " "
- FOR_TYPE
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
- LIFETIME_IDENT "'c"
+ LIFETIME_IDENT "'b"
R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'c"
+ R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
FN_KW "fn"
diff --git a/crates/parser/test_data/parser/inline/ok/closure_binder.rast b/crates/parser/test_data/parser/inline/ok/closure_binder.rast
index c04dbe1..c96ccf7 100644
--- a/crates/parser/test_data/parser/inline/ok/closure_binder.rast
+++ b/crates/parser/test_data/parser/inline/ok/closure_binder.rast
@@ -14,7 +14,7 @@
WHITESPACE " "
EXPR_STMT
CLOSURE_EXPR
- CLOSURE_BINDER
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
diff --git a/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast b/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast
index dcc66dc..6578809 100644
--- a/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast
+++ b/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast
@@ -103,7 +103,7 @@
WHITESPACE " "
TYPE_BOUND_LIST
TYPE_BOUND
- FOR_TYPE
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@@ -111,12 +111,12 @@
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Path"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Path"
SEMICOLON ";"
WHITESPACE "\n"
TYPE_ALIAS
diff --git a/crates/parser/test_data/parser/inline/ok/for_binder_bound.rast b/crates/parser/test_data/parser/inline/ok/for_binder_bound.rast
new file mode 100644
index 0000000..17dbbf3
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/for_binder_bound.rast
@@ -0,0 +1,45 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ L_BRACK "["
+ CONST_KW "const"
+ R_BRACK "]"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs b/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs
new file mode 100644
index 0000000..427cf55
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs
@@ -0,0 +1 @@
+fn foo<T: for<'a> [const] async Trait>() {}
diff --git a/crates/parser/test_data/parser/inline/ok/for_type.rast b/crates/parser/test_data/parser/inline/ok/for_type.rast
index 7600457..5862305 100644
--- a/crates/parser/test_data/parser/inline/ok/for_type.rast
+++ b/crates/parser/test_data/parser/inline/ok/for_type.rast
@@ -8,13 +8,14 @@
EQ "="
WHITESPACE " "
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
FN_KW "fn"
@@ -39,13 +40,14 @@
EQ "="
WHITESPACE " "
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
UNSAFE_KW "unsafe"
@@ -86,13 +88,14 @@
EQ "="
WHITESPACE " "
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
diff --git a/crates/parser/test_data/parser/inline/ok/lambda_expr.rast b/crates/parser/test_data/parser/inline/ok/lambda_expr.rast
index ea401d2..bf24a57 100644
--- a/crates/parser/test_data/parser/inline/ok/lambda_expr.rast
+++ b/crates/parser/test_data/parser/inline/ok/lambda_expr.rast
@@ -202,7 +202,7 @@
WHITESPACE "\n "
EXPR_STMT
CLOSURE_EXPR
- CLOSURE_BINDER
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@@ -223,7 +223,7 @@
WHITESPACE "\n "
EXPR_STMT
CLOSURE_EXPR
- CLOSURE_BINDER
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
diff --git a/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rast
new file mode 100644
index 0000000..8d12f81
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ CONST_KW "const"
+ WHITESPACE " "
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ COLON ":"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ L_BRACK "["
+ CONST_KW "const"
+ R_BRACK "]"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rs b/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rs
new file mode 100644
index 0000000..e1da920
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rs
@@ -0,0 +1 @@
+const fn foo(_: impl [const] Trait) {}
diff --git a/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast b/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast
index 30a2842..6afa061 100644
--- a/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast
+++ b/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast
@@ -11,13 +11,14 @@
TYPE_BOUND_LIST
TYPE_BOUND
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
diff --git a/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast
index 56e2d10..cb29615 100644
--- a/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast
+++ b/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast
@@ -29,10 +29,11 @@
TYPE_BOUND
QUESTION "?"
FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
diff --git a/crates/parser/test_data/parser/inline/ok/where_pred_for.rast b/crates/parser/test_data/parser/inline/ok/where_pred_for.rast
index 0cc365e..b10b953 100644
--- a/crates/parser/test_data/parser/inline/ok/where_pred_for.rast
+++ b/crates/parser/test_data/parser/inline/ok/where_pred_for.rast
@@ -18,13 +18,14 @@
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
diff --git a/crates/parser/test_data/parser/ok/0032_where_for.rast b/crates/parser/test_data/parser/ok/0032_where_for.rast
index 86f6af9..dcaf58f 100644
--- a/crates/parser/test_data/parser/ok/0032_where_for.rast
+++ b/crates/parser/test_data/parser/ok/0032_where_for.rast
@@ -36,7 +36,7 @@
PLUS "+"
WHITESPACE " "
TYPE_BOUND
- FOR_TYPE
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
@@ -44,18 +44,18 @@
LIFETIME
LIFETIME_IDENT "'de"
R_ANGLE ">"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Deserialize"
- GENERIC_ARG_LIST
- L_ANGLE "<"
- LIFETIME_ARG
- LIFETIME
- LIFETIME_IDENT "'de"
- R_ANGLE ">"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Deserialize"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ LIFETIME_ARG
+ LIFETIME
+ LIFETIME_IDENT "'de"
+ R_ANGLE ">"
WHITESPACE " "
PLUS "+"
WHITESPACE " "
diff --git a/crates/parser/test_data/parser/ok/0067_where_for_pred.rast b/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
index 8bf1090..5cef4df 100644
--- a/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
+++ b/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
@@ -18,13 +18,14 @@
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
@@ -81,13 +82,14 @@
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
REF_TYPE
AMP "&"
@@ -135,13 +137,14 @@
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
PAREN_TYPE
L_PAREN "("
@@ -206,13 +209,14 @@
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
SLICE_TYPE
L_BRACK "["
@@ -276,13 +280,14 @@
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
PATH_TYPE
PATH
@@ -349,22 +354,24 @@
WHERE_KW "where"
WHITESPACE "\n "
WHERE_PRED
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
- WHITESPACE " "
- FOR_TYPE
+ FOR_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
- LIFETIME_IDENT "'b"
+ LIFETIME_IDENT "'a"
R_ANGLE ">"
+ WHITESPACE " "
+ FOR_TYPE
+ FOR_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'b"
+ R_ANGLE ">"
WHITESPACE " "
FN_PTR_TYPE
FN_KW "fn"
diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml
index 27fe9f7..0dbb309 100644
--- a/crates/project-model/Cargo.toml
+++ b/crates/project-model/Cargo.toml
@@ -20,6 +20,7 @@
serde_json.workspace = true
serde.workspace = true
serde_derive.workspace = true
+temp-dir.workspace = true
tracing.workspace = true
triomphe.workspace = true
la-arena.workspace = true
diff --git a/crates/project-model/src/build_dependencies.rs b/crates/project-model/src/build_dependencies.rs
index 499caa6..5bea74b 100644
--- a/crates/project-model/src/build_dependencies.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -16,11 +16,13 @@
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::Deserialize as _;
+use stdx::never;
use toolchain::Tool;
use crate::{
CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, Package, Sysroot,
- TargetKind, utf8_stdout,
+ TargetKind, cargo_config_file::make_lockfile_copy,
+ cargo_workspace::MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH, utf8_stdout,
};
/// Output of the build script and proc-macro building steps for a workspace.
@@ -30,6 +32,15 @@
error: Option<String>,
}
+#[derive(Debug, Clone, Default, PartialEq, Eq)]
+pub enum ProcMacroDylibPath {
+ Path(AbsPathBuf),
+ DylibNotFound,
+ NotProcMacro,
+ #[default]
+ NotBuilt,
+}
+
/// Output of the build script and proc-macro building step for a concrete package.
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub(crate) struct BuildScriptOutput {
@@ -43,7 +54,7 @@
/// Directory where a build script might place its output.
pub(crate) out_dir: Option<AbsPathBuf>,
/// Path to the proc-macro library file if this package exposes proc-macros.
- pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
+ pub(crate) proc_macro_dylib_path: ProcMacroDylibPath,
}
impl BuildScriptOutput {
@@ -51,7 +62,10 @@
self.cfgs.is_empty()
&& self.envs.is_empty()
&& self.out_dir.is_none()
- && self.proc_macro_dylib_path.is_none()
+ && matches!(
+ self.proc_macro_dylib_path,
+ ProcMacroDylibPath::NotBuilt | ProcMacroDylibPath::NotProcMacro
+ )
}
}
@@ -67,7 +81,7 @@
let current_dir = workspace.workspace_root();
let allowed_features = workspace.workspace_features();
- let cmd = Self::build_command(
+ let (_guard, cmd) = Self::build_command(
config,
&allowed_features,
workspace.manifest_path(),
@@ -88,7 +102,7 @@
) -> io::Result<Vec<WorkspaceBuildScripts>> {
assert_eq!(config.invocation_strategy, InvocationStrategy::Once);
- let cmd = Self::build_command(
+ let (_guard, cmd) = Self::build_command(
config,
&Default::default(),
// This is not gonna be used anyways, so just construct a dummy here
@@ -126,6 +140,8 @@
|package, cb| {
if let Some(&(package, workspace)) = by_id.get(package) {
cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
+ } else {
+ never!("Received compiler message for unknown package: {}", package);
}
},
progress,
@@ -140,12 +156,9 @@
if tracing::enabled!(tracing::Level::INFO) {
for (idx, workspace) in workspaces.iter().enumerate() {
for package in workspace.packages() {
- let package_build_data = &mut res[idx].outputs[package];
+ let package_build_data: &mut BuildScriptOutput = &mut res[idx].outputs[package];
if !package_build_data.is_empty() {
- tracing::info!(
- "{}: {package_build_data:?}",
- workspace[package].manifest.parent(),
- );
+ tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,);
}
}
}
@@ -198,10 +211,33 @@
let path = dir_entry.path();
let extension = path.extension()?;
if extension == std::env::consts::DLL_EXTENSION {
- let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned();
- let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?)
- .ok()?;
- return Some((name, path));
+ let name = path
+ .file_stem()?
+ .to_str()?
+ .split_once('-')?
+ .0
+ .trim_start_matches("lib")
+ .to_owned();
+ let path = match Utf8PathBuf::from_path_buf(path) {
+ Ok(path) => path,
+ Err(path) => {
+ tracing::warn!(
+ "Proc-macro dylib path contains non-UTF8 characters: {:?}",
+ path.display()
+ );
+ return None;
+ }
+ };
+ return match AbsPathBuf::try_from(path) {
+ Ok(path) => Some((name, path)),
+ Err(path) => {
+ tracing::error!(
+ "proc-macro dylib path is not absolute: {:?}",
+ path
+ );
+ None
+ }
+ };
}
}
None
@@ -209,28 +245,24 @@
.collect();
for p in rustc.packages() {
let package = &rustc[p];
- if package
- .targets
- .iter()
- .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true }))
- {
- if let Some((_, path)) = proc_macro_dylibs
- .iter()
- .find(|(name, _)| *name.trim_start_matches("lib") == package.name)
- {
- bs.outputs[p].proc_macro_dylib_path = Some(path.clone());
+ bs.outputs[p].proc_macro_dylib_path =
+ if package.targets.iter().any(|&it| {
+ matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })
+ }) {
+ match proc_macro_dylibs.iter().find(|(name, _)| *name == package.name) {
+ Some((_, path)) => ProcMacroDylibPath::Path(path.clone()),
+ _ => ProcMacroDylibPath::DylibNotFound,
+ }
+ } else {
+ ProcMacroDylibPath::NotProcMacro
}
- }
}
if tracing::enabled!(tracing::Level::INFO) {
for package in rustc.packages() {
let package_build_data = &bs.outputs[package];
if !package_build_data.is_empty() {
- tracing::info!(
- "{}: {package_build_data:?}",
- rustc[package].manifest.parent(),
- );
+ tracing::info!("{}: {package_build_data:?}", rustc[package].manifest,);
}
}
}
@@ -263,6 +295,12 @@
|package, cb| {
if let Some(&package) = by_id.get(package) {
cb(&workspace[package].name, &mut outputs[package]);
+ } else {
+ never!(
+ "Received compiler message for unknown package: {}\n {}",
+ package,
+ by_id.keys().join(", ")
+ );
}
},
progress,
@@ -272,10 +310,7 @@
for package in workspace.packages() {
let package_build_data = &outputs[package];
if !package_build_data.is_empty() {
- tracing::info!(
- "{}: {package_build_data:?}",
- workspace[package].manifest.parent(),
- );
+ tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,);
}
}
}
@@ -348,15 +383,23 @@
progress(format!(
"building compile-time-deps: proc-macro {name} built"
));
- if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
+ if data.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt {
+ data.proc_macro_dylib_path = ProcMacroDylibPath::NotProcMacro;
+ }
+ if !matches!(data.proc_macro_dylib_path, ProcMacroDylibPath::Path(_))
+ && message
+ .target
+ .kind
+ .contains(&cargo_metadata::TargetKind::ProcMacro)
{
- // Skip rmeta file
- if let Some(filename) =
- message.filenames.iter().find(|file| is_dylib(file))
- {
- let filename = AbsPath::assert(filename);
- data.proc_macro_dylib_path = Some(filename.to_owned());
- }
+ data.proc_macro_dylib_path =
+ match message.filenames.iter().find(|file| is_dylib(file)) {
+ Some(filename) => {
+ let filename = AbsPath::assert(filename);
+ ProcMacroDylibPath::Path(filename.to_owned())
+ }
+ None => ProcMacroDylibPath::DylibNotFound,
+ };
}
});
}
@@ -393,14 +436,15 @@
current_dir: &AbsPath,
sysroot: &Sysroot,
toolchain: Option<&semver::Version>,
- ) -> io::Result<Command> {
+ ) -> io::Result<(Option<temp_dir::TempDir>, Command)> {
match config.run_build_script_command.as_deref() {
Some([program, args @ ..]) => {
let mut cmd = toolchain::command(program, current_dir, &config.extra_env);
cmd.args(args);
- Ok(cmd)
+ Ok((None, cmd))
}
_ => {
+ let mut requires_unstable_options = false;
let mut cmd = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
@@ -416,7 +460,19 @@
if let Some(target) = &config.target {
cmd.args(["--target", target]);
}
-
+ let mut temp_dir_guard = None;
+ if toolchain
+ .is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH)
+ {
+ let lockfile_path =
+ <_ as AsRef<Utf8Path>>::as_ref(manifest_path).with_extension("lock");
+ if let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile_path) {
+ requires_unstable_options = true;
+ temp_dir_guard = Some(temp_dir);
+ cmd.arg("--lockfile-path");
+ cmd.arg(target_lockfile.as_str());
+ }
+ }
match &config.features {
CargoFeatures::All => {
cmd.arg("--all-features");
@@ -438,6 +494,7 @@
}
if manifest_path.is_rust_manifest() {
+ requires_unstable_options = true;
cmd.arg("-Zscript");
}
@@ -457,8 +514,7 @@
toolchain.is_some_and(|v| *v >= COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION);
if cargo_comp_time_deps_available {
- cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
- cmd.arg("-Zunstable-options");
+ requires_unstable_options = true;
cmd.arg("--compile-time-deps");
// we can pass this unconditionally, because we won't actually build the
// binaries, and as such, this will succeed even on targets without libtest
@@ -481,7 +537,11 @@
cmd.env("RA_RUSTC_WRAPPER", "1");
}
}
- Ok(cmd)
+ if requires_unstable_options {
+ cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
+ cmd.arg("-Zunstable-options");
+ }
+ Ok((temp_dir_guard, cmd))
}
}
}
diff --git a/crates/project-model/src/cargo_config_file.rs b/crates/project-model/src/cargo_config_file.rs
index 7966f74..a1e7ed0 100644
--- a/crates/project-model/src/cargo_config_file.rs
+++ b/crates/project-model/src/cargo_config_file.rs
@@ -1,4 +1,5 @@
//! Read `.cargo/config.toml` as a JSON object
+use paths::{Utf8Path, Utf8PathBuf};
use rustc_hash::FxHashMap;
use toolchain::Tool;
@@ -32,3 +33,24 @@
Some(json)
}
+
+pub(crate) fn make_lockfile_copy(
+ lockfile_path: &Utf8Path,
+) -> Option<(temp_dir::TempDir, Utf8PathBuf)> {
+ let temp_dir = temp_dir::TempDir::with_prefix("rust-analyzer").ok()?;
+ let target_lockfile = temp_dir.path().join("Cargo.lock").try_into().ok()?;
+ match std::fs::copy(lockfile_path, &target_lockfile) {
+ Ok(_) => {
+ tracing::debug!("Copied lock file from `{}` to `{}`", lockfile_path, target_lockfile);
+ Some((temp_dir, target_lockfile))
+ }
+ // lockfile does not yet exist, so we can just create a new one in the temp dir
+ Err(e) if e.kind() == std::io::ErrorKind::NotFound => Some((temp_dir, target_lockfile)),
+ Err(e) => {
+ tracing::warn!(
+ "Failed to copy lock file from `{lockfile_path}` to `{target_lockfile}`: {e}",
+ );
+ None
+ }
+ }
+}
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index daadcd9..e613fd5 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -15,16 +15,18 @@
use stdx::process::spawn_with_streaming_output;
use toolchain::Tool;
+use crate::cargo_config_file::make_lockfile_copy;
use crate::{CfgOverrides, InvocationStrategy};
use crate::{ManifestPath, Sysroot};
-const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version = semver::Version {
- major: 1,
- minor: 82,
- patch: 0,
- pre: semver::Prerelease::EMPTY,
- build: semver::BuildMetadata::EMPTY,
-};
+pub(crate) const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version =
+ semver::Version {
+ major: 1,
+ minor: 82,
+ patch: 0,
+ pre: semver::Prerelease::EMPTY,
+ build: semver::BuildMetadata::EMPTY,
+ };
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
/// workspace. It pretty closely mirrors `cargo metadata` output.
@@ -245,7 +247,7 @@
}
impl TargetKind {
- fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
+ pub fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
for kind in kinds {
return match kind {
cargo_metadata::TargetKind::Bin => TargetKind::Bin,
@@ -552,7 +554,10 @@
pub(crate) struct FetchMetadata {
command: cargo_metadata::MetadataCommand,
+ #[expect(dead_code)]
+ manifest_path: ManifestPath,
lockfile_path: Option<Utf8PathBuf>,
+ #[expect(dead_code)]
kind: &'static str,
no_deps: bool,
no_deps_result: anyhow::Result<cargo_metadata::Metadata>,
@@ -596,25 +601,22 @@
}
command.current_dir(current_dir);
- let mut needs_nightly = false;
let mut other_options = vec![];
// cargo metadata only supports a subset of flags of what cargo usually accepts, and usually
// the only relevant flags for metadata here are unstable ones, so we pass those along
// but nothing else
let mut extra_args = config.extra_args.iter();
while let Some(arg) = extra_args.next() {
- if arg == "-Z" {
- if let Some(arg) = extra_args.next() {
- needs_nightly = true;
- other_options.push("-Z".to_owned());
- other_options.push(arg.to_owned());
- }
+ if arg == "-Z"
+ && let Some(arg) = extra_args.next()
+ {
+ other_options.push("-Z".to_owned());
+ other_options.push(arg.to_owned());
}
}
let mut lockfile_path = None;
if cargo_toml.is_rust_manifest() {
- needs_nightly = true;
other_options.push("-Zscript".to_owned());
} else if config
.toolchain_version
@@ -632,10 +634,6 @@
command.other_options(other_options.clone());
- if needs_nightly {
- command.env("RUSTC_BOOTSTRAP", "1");
- }
-
// Pre-fetch basic metadata using `--no-deps`, which:
// - avoids fetching registries like crates.io,
// - skips dependency resolution and does not modify lockfiles,
@@ -655,7 +653,15 @@
}
.with_context(|| format!("Failed to run `{cargo_command:?}`"));
- Self { command, lockfile_path, kind: config.kind, no_deps, no_deps_result, other_options }
+ Self {
+ manifest_path: cargo_toml.clone(),
+ command,
+ lockfile_path,
+ kind: config.kind,
+ no_deps,
+ no_deps_result,
+ other_options,
+ }
}
pub(crate) fn no_deps_metadata(&self) -> Option<&cargo_metadata::Metadata> {
@@ -672,40 +678,34 @@
locked: bool,
progress: &dyn Fn(String),
) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
- let Self { mut command, lockfile_path, kind, no_deps, no_deps_result, mut other_options } =
- self;
+ _ = target_dir;
+ let Self {
+ mut command,
+ manifest_path: _,
+ lockfile_path,
+ kind: _,
+ no_deps,
+ no_deps_result,
+ mut other_options,
+ } = self;
if no_deps {
return no_deps_result.map(|m| (m, None));
}
let mut using_lockfile_copy = false;
- // The manifest is a rust file, so this means its a script manifest
- if let Some(lockfile) = lockfile_path {
- let target_lockfile =
- target_dir.join("rust-analyzer").join("metadata").join(kind).join("Cargo.lock");
- match std::fs::copy(&lockfile, &target_lockfile) {
- Ok(_) => {
- using_lockfile_copy = true;
- other_options.push("--lockfile-path".to_owned());
- other_options.push(target_lockfile.to_string());
- }
- Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
- // There exists no lockfile yet
- using_lockfile_copy = true;
- other_options.push("--lockfile-path".to_owned());
- other_options.push(target_lockfile.to_string());
- }
- Err(e) => {
- tracing::warn!(
- "Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}",
- );
- }
- }
+ let mut _temp_dir_guard;
+ if let Some(lockfile) = lockfile_path
+ && let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile)
+ {
+ _temp_dir_guard = temp_dir;
+ other_options.push("--lockfile-path".to_owned());
+ other_options.push(target_lockfile.to_string());
+ using_lockfile_copy = true;
}
- if using_lockfile_copy {
+ if using_lockfile_copy || other_options.iter().any(|it| it.starts_with("-Z")) {
+ command.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
other_options.push("-Zunstable-options".to_owned());
- command.env("RUSTC_BOOTSTRAP", "1");
}
// No need to lock it if we copied the lockfile, we won't modify the original after all/
// This way cargo cannot error out on us if the lockfile requires updating.
@@ -714,13 +714,11 @@
}
command.other_options(other_options);
- // FIXME: Fetching metadata is a slow process, as it might require
- // calling crates.io. We should be reporting progress here, but it's
- // unclear whether cargo itself supports it.
progress("cargo metadata: started".to_owned());
let res = (|| -> anyhow::Result<(_, _)> {
let mut errored = false;
+ tracing::debug!("Running `{:?}`", command.cargo_command());
let output =
spawn_with_streaming_output(command.cargo_command(), &mut |_| (), &mut |line| {
errored = errored || line.starts_with("error") || line.starts_with("warning");
diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs
index 3bf3d06..d39781b 100644
--- a/crates/project-model/src/lib.rs
+++ b/crates/project-model/src/lib.rs
@@ -59,7 +59,7 @@
use rustc_hash::FxHashSet;
pub use crate::{
- build_dependencies::WorkspaceBuildScripts,
+ build_dependencies::{ProcMacroDylibPath, WorkspaceBuildScripts},
cargo_workspace::{
CargoConfig, CargoFeatures, CargoMetadataConfig, CargoWorkspace, Package, PackageData,
PackageDependency, RustLibSource, Target, TargetData, TargetKind,
@@ -139,21 +139,22 @@
}
fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option<ManifestPath> {
- if path.file_name().unwrap_or_default() == target_file_name {
- if let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) {
- return Some(manifest);
- }
+ if path.file_name().unwrap_or_default() == target_file_name
+ && let Ok(manifest) = ManifestPath::try_from(path.to_path_buf())
+ {
+ return Some(manifest);
}
let mut curr = Some(path);
while let Some(path) = curr {
let candidate = path.join(target_file_name);
- if fs::metadata(&candidate).is_ok() {
- if let Ok(manifest) = ManifestPath::try_from(candidate) {
- return Some(manifest);
- }
+ if fs::metadata(&candidate).is_ok()
+ && let Ok(manifest) = ManifestPath::try_from(candidate)
+ {
+ return Some(manifest);
}
+
curr = path.parent();
}
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 9781c46..c0a5009 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -143,12 +143,11 @@
Some(root) => {
// special case rustc, we can look that up directly in the sysroot's bin folder
// as it should never invoke another cargo binary
- if let Tool::Rustc = tool {
- if let Some(path) =
+ if let Tool::Rustc = tool
+ && let Some(path) =
probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into())
- {
- return toolchain::command(path, current_dir, envs);
- }
+ {
+ return toolchain::command(path, current_dir, envs);
}
let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir, envs);
@@ -291,29 +290,26 @@
pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) {
self.workspace = workspace;
- if self.error.is_none() {
- if let Some(src_root) = &self.rust_lib_src_root {
- let has_core = match &self.workspace {
- RustLibSrcWorkspace::Workspace(ws) => {
- ws.packages().any(|p| ws[p].name == "core")
- }
- RustLibSrcWorkspace::Json(project_json) => project_json
- .crates()
- .filter_map(|(_, krate)| krate.display_name.clone())
- .any(|name| name.canonical_name().as_str() == "core"),
- RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
- RustLibSrcWorkspace::Empty => true,
+ if self.error.is_none()
+ && let Some(src_root) = &self.rust_lib_src_root
+ {
+ let has_core = match &self.workspace {
+ RustLibSrcWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
+ RustLibSrcWorkspace::Json(project_json) => project_json
+ .crates()
+ .filter_map(|(_, krate)| krate.display_name.clone())
+ .any(|name| name.canonical_name().as_str() == "core"),
+ RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
+ RustLibSrcWorkspace::Empty => true,
+ };
+ if !has_core {
+ let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
+ " (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
+ } else {
+ ", try running `rustup component add rust-src` to possibly fix this"
};
- if !has_core {
- let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
- " (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
- } else {
- ", try running `rustup component add rust-src` to possibly fix this"
- };
- self.error = Some(format!(
- "sysroot at `{src_root}` is missing a `core` library{var_note}",
- ));
- }
+ self.error =
+ Some(format!("sysroot at `{src_root}` is missing a `core` library{var_note}",));
}
}
}
diff --git a/crates/project-model/src/toolchain_info/rustc_cfg.rs b/crates/project-model/src/toolchain_info/rustc_cfg.rs
index 6e06e88..ab69c8e 100644
--- a/crates/project-model/src/toolchain_info/rustc_cfg.rs
+++ b/crates/project-model/src/toolchain_info/rustc_cfg.rs
@@ -65,6 +65,7 @@
let (sysroot, current_dir) = match config {
QueryConfig::Cargo(sysroot, cargo_toml, _) => {
let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
+ cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS);
if let Some(target) = target {
cmd.args(["--target", target]);
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index 677f29e..5b36e10 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -24,7 +24,7 @@
CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package,
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
WorkspaceBuildScripts,
- build_dependencies::BuildScriptOutput,
+ build_dependencies::{BuildScriptOutput, ProcMacroDylibPath},
cargo_config_file,
cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource},
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
@@ -424,12 +424,12 @@
sysroot.set_workspace(loaded_sysroot);
}
- if !cargo.requires_rustc_private() {
- if let Err(e) = &mut rustc {
- // We don't need the rustc sources here,
- // so just discard the error.
- _ = e.take();
- }
+ if !cargo.requires_rustc_private()
+ && let Err(e) = &mut rustc
+ {
+ // We don't need the rustc sources here,
+ // so just discard the error.
+ _ = e.take();
}
Ok(ProjectWorkspace {
@@ -1163,17 +1163,15 @@
crate = display_name.as_ref().map(|name| name.canonical_name().as_str()),
"added root to crate graph"
);
- if *is_proc_macro {
- if let Some(path) = proc_macro_dylib_path.clone() {
- let node = Ok((
- display_name
- .as_ref()
- .map(|it| it.canonical_name().as_str().to_owned())
- .unwrap_or_else(|| format!("crate{}", idx.0)),
- path,
- ));
- proc_macros.insert(crate_graph_crate_id, node);
- }
+ if *is_proc_macro && let Some(path) = proc_macro_dylib_path.clone() {
+ let node = Ok((
+ display_name
+ .as_ref()
+ .map(|it| it.canonical_name().as_str().to_owned())
+ .unwrap_or_else(|| format!("crate{}", idx.0)),
+ path,
+ ));
+ proc_macros.insert(crate_graph_crate_id, node);
}
(idx, crate_graph_crate_id)
},
@@ -1318,16 +1316,17 @@
public_deps.add_to_crate_graph(crate_graph, from);
// Add dep edge of all targets to the package's lib target
- if let Some((to, name)) = lib_tgt.clone() {
- if to != from && kind != TargetKind::BuildScript {
- // (build script can not depend on its library target)
+ if let Some((to, name)) = lib_tgt.clone()
+ && to != from
+ && kind != TargetKind::BuildScript
+ {
+ // (build script can not depend on its library target)
- // For root projects with dashes in their name,
- // cargo metadata does not do any normalization,
- // so we do it ourselves currently
- let name = CrateName::normalize_dashes(&name);
- add_dep(crate_graph, from, name, to);
- }
+ // For root projects with dashes in their name,
+ // cargo metadata does not do any normalization,
+ // so we do it ourselves currently
+ let name = CrateName::normalize_dashes(&name);
+ add_dep(crate_graph, from, name, to);
}
}
}
@@ -1638,9 +1637,19 @@
let proc_macro = match build_data {
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
match proc_macro_dylib_path {
- Some(path) => Ok((cargo_name.to_owned(), path.clone())),
- None if has_errors => Err(ProcMacroLoadingError::FailedToBuild),
- None => Err(ProcMacroLoadingError::MissingDylibPath),
+ ProcMacroDylibPath::Path(path) => Ok((cargo_name.to_owned(), path.clone())),
+ ProcMacroDylibPath::NotBuilt => Err(ProcMacroLoadingError::NotYetBuilt),
+ ProcMacroDylibPath::NotProcMacro | ProcMacroDylibPath::DylibNotFound
+ if has_errors =>
+ {
+ Err(ProcMacroLoadingError::FailedToBuild)
+ }
+ ProcMacroDylibPath::NotProcMacro => {
+ Err(ProcMacroLoadingError::ExpectedProcMacroArtifact)
+ }
+ ProcMacroDylibPath::DylibNotFound => {
+ Err(ProcMacroLoadingError::MissingDylibPath)
+ }
}
}
None => Err(ProcMacroLoadingError::NotYetBuilt),
@@ -1905,7 +1914,8 @@
meta.manifest_path(manifest);
// `--no-deps` doesn't (over)write lockfiles as it doesn't do any package resolve.
// So we can use it to get `target_directory` before copying lockfiles
- let mut other_options = vec!["--no-deps".to_owned()];
+ meta.no_deps();
+ let mut other_options = vec![];
if manifest.is_rust_manifest() {
meta.env("RUSTC_BOOTSTRAP", "1");
other_options.push("-Zscript".to_owned());
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index fc89f48..4f75d14 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -656,22 +656,26 @@
let mut sw = self.stop_watch();
let mut all = 0;
let mut fail = 0;
- for &body in bodies {
- if matches!(body, DefWithBody::Variant(_)) {
+ for &body_id in bodies {
+ if matches!(body_id, DefWithBody::Variant(_)) {
continue;
}
+ let module = body_id.module(db);
+ if !self.should_process(db, body_id, module) {
+ continue;
+ }
+
all += 1;
- let Err(e) = db.mir_body(body.into()) else {
+ let Err(e) = db.mir_body(body_id.into()) else {
continue;
};
if verbosity.is_spammy() {
- let full_name = body
- .module(db)
+ let full_name = module
.path_to_root(db)
.into_iter()
.rev()
.filter_map(|it| it.name(db))
- .chain(Some(body.name(db).unwrap_or_else(Name::missing)))
+ .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
.map(|it| it.display(db, Edition::LATEST).to_string())
.join("::");
bar.println(format!("Mir body for {full_name} failed due {e:?}"));
@@ -727,26 +731,9 @@
let name = body_id.name(db).unwrap_or_else(Name::missing);
let module = body_id.module(db);
let display_target = module.krate().to_display_target(db);
- let full_name = move || {
- module
- .krate()
- .display_name(db)
- .map(|it| it.canonical_name().as_str().to_owned())
- .into_iter()
- .chain(
- module
- .path_to_root(db)
- .into_iter()
- .filter_map(|it| it.name(db))
- .rev()
- .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
- .map(|it| it.display(db, Edition::LATEST).to_string()),
- )
- .join("::")
- };
if let Some(only_name) = self.only.as_deref() {
if name.display(db, Edition::LATEST).to_string() != only_name
- && full_name() != only_name
+ && full_name(db, body_id, module) != only_name
{
continue;
}
@@ -763,12 +750,17 @@
let original_file = src.file_id.original_file(db);
let path = vfs.file_path(original_file.file_id(db));
let syntax_range = src.text_range();
- format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
+ format!(
+ "processing: {} ({} {:?})",
+ full_name(db, body_id, module),
+ path,
+ syntax_range
+ )
} else {
- format!("processing: {}", full_name())
+ format!("processing: {}", full_name(db, body_id, module))
}
} else {
- format!("processing: {}", full_name())
+ format!("processing: {}", full_name(db, body_id, module))
}
};
if verbosity.is_spammy() {
@@ -781,9 +773,11 @@
Ok(inference_result) => inference_result,
Err(p) => {
if let Some(s) = p.downcast_ref::<&str>() {
- eprintln!("infer panicked for {}: {}", full_name(), s);
+ eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
} else if let Some(s) = p.downcast_ref::<String>() {
- eprintln!("infer panicked for {}: {}", full_name(), s);
+ eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
+ } else {
+ eprintln!("infer panicked for {}", full_name(db, body_id, module));
}
panics += 1;
bar.inc(1);
@@ -890,7 +884,7 @@
if verbosity.is_spammy() {
bar.println(format!(
"In {}: {} exprs, {} unknown, {} partial",
- full_name(),
+ full_name(db, body_id, module),
num_exprs - previous_exprs,
num_exprs_unknown - previous_unknown,
num_exprs_partially_unknown - previous_partially_unknown
@@ -993,7 +987,7 @@
if verbosity.is_spammy() {
bar.println(format!(
"In {}: {} pats, {} unknown, {} partial",
- full_name(),
+ full_name(db, body_id, module),
num_pats - previous_pats,
num_pats_unknown - previous_unknown,
num_pats_partially_unknown - previous_partially_unknown
@@ -1049,34 +1043,8 @@
bar.tick();
for &body_id in bodies {
let module = body_id.module(db);
- let full_name = move || {
- module
- .krate()
- .display_name(db)
- .map(|it| it.canonical_name().as_str().to_owned())
- .into_iter()
- .chain(
- module
- .path_to_root(db)
- .into_iter()
- .filter_map(|it| it.name(db))
- .rev()
- .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
- .map(|it| it.display(db, Edition::LATEST).to_string()),
- )
- .join("::")
- };
- if let Some(only_name) = self.only.as_deref() {
- if body_id
- .name(db)
- .unwrap_or_else(Name::missing)
- .display(db, Edition::LATEST)
- .to_string()
- != only_name
- && full_name() != only_name
- {
- continue;
- }
+ if !self.should_process(db, body_id, module) {
+ continue;
}
let msg = move || {
if verbosity.is_verbose() {
@@ -1090,12 +1058,17 @@
let original_file = src.file_id.original_file(db);
let path = vfs.file_path(original_file.file_id(db));
let syntax_range = src.text_range();
- format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
+ format!(
+ "processing: {} ({} {:?})",
+ full_name(db, body_id, module),
+ path,
+ syntax_range
+ )
} else {
- format!("processing: {}", full_name())
+ format!("processing: {}", full_name(db, body_id, module))
}
} else {
- format!("processing: {}", full_name())
+ format!("processing: {}", full_name(db, body_id, module))
}
};
if verbosity.is_spammy() {
@@ -1205,11 +1178,42 @@
eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len());
}
+ fn should_process(&self, db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> bool {
+ if let Some(only_name) = self.only.as_deref() {
+ let name = body_id.name(db).unwrap_or_else(Name::missing);
+
+ if name.display(db, Edition::LATEST).to_string() != only_name
+ && full_name(db, body_id, module) != only_name
+ {
+ return false;
+ }
+ }
+ true
+ }
+
fn stop_watch(&self) -> StopWatch {
StopWatch::start()
}
}
+fn full_name(db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> String {
+ module
+ .krate()
+ .display_name(db)
+ .map(|it| it.canonical_name().as_str().to_owned())
+ .into_iter()
+ .chain(
+ module
+ .path_to_root(db)
+ .into_iter()
+ .filter_map(|it| it.name(db))
+ .rev()
+ .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
+ .map(|it| it.display(db, Edition::LATEST).to_string()),
+ )
+ .join("::")
+}
+
fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: ExprId) -> String {
let src = match sm.expr_syntax(expr_id) {
Ok(s) => s,
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 51d4c29..9456fd8 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -2162,6 +2162,7 @@
extra_test_bin_args: self.runnables_extraTestBinaryArgs(source_root).clone(),
extra_env: self.extra_env(source_root).clone(),
target_dir: self.target_dir_from_config(source_root),
+ set_test: true,
}
}
@@ -2219,6 +2220,7 @@
extra_test_bin_args: self.runnables_extraTestBinaryArgs(source_root).clone(),
extra_env: self.check_extra_env(source_root),
target_dir: self.target_dir_from_config(source_root),
+ set_test: *self.cfg_setTest(source_root),
},
ansi_color_output: self.color_diagnostic_output(),
},
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index 91d37bd..512ce0b 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -31,6 +31,7 @@
pub(crate) struct CargoOptions {
pub(crate) target_tuples: Vec<String>,
pub(crate) all_targets: bool,
+ pub(crate) set_test: bool,
pub(crate) no_default_features: bool,
pub(crate) all_features: bool,
pub(crate) features: Vec<String>,
@@ -54,7 +55,13 @@
cmd.args(["--target", target.as_str()]);
}
if self.all_targets {
- cmd.arg("--all-targets");
+ if self.set_test {
+ cmd.arg("--all-targets");
+ } else {
+ // No --benches unfortunately, as this implies --tests (see https://github.com/rust-lang/cargo/issues/6454),
+ // and users setting `cfg.seTest = false` probably prefer disabling benches than enabling tests.
+ cmd.args(["--lib", "--bins", "--examples"]);
+ }
}
if self.all_features {
cmd.arg("--all-features");
@@ -104,7 +111,18 @@
match self {
FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {command}"),
FlycheckConfig::CustomCommand { command, args, .. } => {
- write!(f, "{command} {}", args.join(" "))
+ // Don't show `my_custom_check --foo $saved_file` literally to the user, as it
+ // looks like we've forgotten to substitute $saved_file.
+ //
+ // Instead, show `my_custom_check --foo ...`. The
+ // actual path is often too long to be worth showing
+ // in the IDE (e.g. in the VS Code status bar).
+ let display_args = args
+ .iter()
+ .map(|arg| if arg == SAVED_FILE_PLACEHOLDER { "..." } else { arg })
+ .collect::<Vec<_>>();
+
+ write!(f, "{command} {}", display_args.join(" "))
}
}
}
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index 4cbc88c..6d8a360 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -101,7 +101,7 @@
'where' predicates:(WherePred (',' WherePred)* ','?)
WherePred =
- ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList?
+ ForBinder? (Lifetime | Type) ':' TypeBoundList?
//*************************//
@@ -534,10 +534,10 @@
Attr* Expr '.' NameRef
ClosureExpr =
- Attr* ClosureBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType?
+ Attr* ForBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType?
body:Expr
-ClosureBinder =
+ForBinder =
'for' GenericParamList
IfExpr =
@@ -658,7 +658,7 @@
'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType?
ForType =
- 'for' GenericParamList Type
+ ForBinder Type
ImplTraitType =
'impl' TypeBoundList
@@ -671,7 +671,7 @@
TypeBound =
Lifetime
-| ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type
+| ForBinder? ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type
| 'use' UseBoundGenericArgs
UseBoundGenericArgs =
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index d787fd0..a9aeeed 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -393,8 +393,7 @@
let pred = predicates.next().unwrap();
let mut bounds = pred.type_bound_list().unwrap().bounds();
- assert!(pred.for_token().is_none());
- assert!(pred.generic_param_list().is_none());
+ assert!(pred.for_binder().is_none());
assert_eq!("T", pred.ty().unwrap().syntax().text().to_string());
assert_bound("Clone", bounds.next());
assert_bound("Copy", bounds.next());
@@ -432,8 +431,10 @@
let pred = predicates.next().unwrap();
let mut bounds = pred.type_bound_list().unwrap().bounds();
- assert!(pred.for_token().is_some());
- assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string());
+ assert_eq!(
+ "<'a>",
+ pred.for_binder().unwrap().generic_param_list().unwrap().syntax().text().to_string()
+ );
assert_eq!("F", pred.ty().unwrap().syntax().text().to_string());
assert_bound("Fn(&'a str)", bounds.next());
}
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs
index 37cb4a4..d97fdec 100644
--- a/crates/syntax/src/ast/edit.rs
+++ b/crates/syntax/src/ast/edit.rs
@@ -6,9 +6,12 @@
use crate::{
AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
ast::{self, AstNode, make},
+ syntax_editor::{SyntaxEditor, SyntaxMappingBuilder},
ted,
};
+use super::syntax_factory::SyntaxFactory;
+
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct IndentLevel(pub u8);
@@ -95,6 +98,24 @@
}
}
+ pub(super) fn clone_increase_indent(self, node: &SyntaxNode) -> SyntaxNode {
+ let node = node.clone_subtree();
+ let mut editor = SyntaxEditor::new(node.clone());
+ let tokens = node
+ .preorder_with_tokens()
+ .filter_map(|event| match event {
+ rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
+ _ => None,
+ })
+ .filter_map(ast::Whitespace::cast)
+ .filter(|ws| ws.text().contains('\n'));
+ for ws in tokens {
+ let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax()));
+ editor.replace(ws.syntax(), &new_ws);
+ }
+ editor.finish().new_root().clone()
+ }
+
pub(super) fn decrease_indent(self, node: &SyntaxNode) {
let tokens = node.preorder_with_tokens().filter_map(|event| match event {
rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
@@ -111,36 +132,54 @@
}
}
}
+
+ pub(super) fn clone_decrease_indent(self, node: &SyntaxNode) -> SyntaxNode {
+ let node = node.clone_subtree();
+ let mut editor = SyntaxEditor::new(node.clone());
+ let tokens = node
+ .preorder_with_tokens()
+ .filter_map(|event| match event {
+ rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
+ _ => None,
+ })
+ .filter_map(ast::Whitespace::cast)
+ .filter(|ws| ws.text().contains('\n'));
+ for ws in tokens {
+ let new_ws =
+ make::tokens::whitespace(&ws.syntax().text().replace(&format!("\n{self}"), "\n"));
+ editor.replace(ws.syntax(), &new_ws);
+ }
+ editor.finish().new_root().clone()
+ }
}
fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
iter::successors(Some(token), |token| token.prev_token())
}
-/// Soft-deprecated in favor of mutable tree editing API `edit_in_place::Ident`.
pub trait AstNodeEdit: AstNode + Clone + Sized {
fn indent_level(&self) -> IndentLevel {
IndentLevel::from_node(self.syntax())
}
#[must_use]
fn indent(&self, level: IndentLevel) -> Self {
- fn indent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
- let res = node.clone_subtree().clone_for_update();
- level.increase_indent(&res);
- res.clone_subtree()
+ Self::cast(level.clone_increase_indent(self.syntax())).unwrap()
+ }
+ #[must_use]
+ fn indent_with_mapping(&self, level: IndentLevel, make: &SyntaxFactory) -> Self {
+ let new_node = self.indent(level);
+ if let Some(mut mapping) = make.mappings() {
+ let mut builder = SyntaxMappingBuilder::new(new_node.syntax().clone());
+ for (old, new) in self.syntax().children().zip(new_node.syntax().children()) {
+ builder.map_node(old, new);
+ }
+ builder.finish(&mut mapping);
}
-
- Self::cast(indent_inner(self.syntax(), level)).unwrap()
+ new_node
}
#[must_use]
fn dedent(&self, level: IndentLevel) -> Self {
- fn dedent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
- let res = node.clone_subtree().clone_for_update();
- level.decrease_indent(&res);
- res.clone_subtree()
- }
-
- Self::cast(dedent_inner(self.syntax(), level)).unwrap()
+ Self::cast(level.clone_decrease_indent(self.syntax())).unwrap()
}
#[must_use]
fn reset_indent(&self) -> Self {
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index e902516..28b543e 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -644,7 +644,7 @@
impl ast::Impl {
pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList {
if self.assoc_item_list().is_none() {
- let assoc_item_list = make::assoc_item_list().clone_for_update();
+ let assoc_item_list = make::assoc_item_list(None).clone_for_update();
ted::append_child(self.syntax(), assoc_item_list.syntax());
}
self.assoc_item_list().unwrap()
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 2b86246..ceb2866 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -377,22 +377,13 @@
#[inline]
pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
}
-pub struct ClosureBinder {
- pub(crate) syntax: SyntaxNode,
-}
-impl ClosureBinder {
- #[inline]
- pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
- #[inline]
- pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
-}
pub struct ClosureExpr {
pub(crate) syntax: SyntaxNode,
}
impl ast::HasAttrs for ClosureExpr {}
impl ClosureExpr {
#[inline]
- pub fn closure_binder(&self) -> Option<ClosureBinder> { support::child(&self.syntax) }
+ pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) }
#[inline]
pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
#[inline]
@@ -615,6 +606,15 @@
#[inline]
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
}
+pub struct ForBinder {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ForBinder {
+ #[inline]
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ #[inline]
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+}
pub struct ForExpr {
pub(crate) syntax: SyntaxNode,
}
@@ -632,11 +632,9 @@
}
impl ForType {
#[inline]
- pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) }
#[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
- #[inline]
- pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
}
pub struct FormatArgsArg {
pub(crate) syntax: SyntaxNode,
@@ -1766,6 +1764,8 @@
}
impl TypeBound {
#[inline]
+ pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) }
+ #[inline]
pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
#[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
@@ -1938,13 +1938,11 @@
impl ast::HasTypeBounds for WherePred {}
impl WherePred {
#[inline]
- pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) }
#[inline]
pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
#[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
- #[inline]
- pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
}
pub struct WhileExpr {
pub(crate) syntax: SyntaxNode,
@@ -3239,42 +3237,6 @@
f.debug_struct("CastExpr").field("syntax", &self.syntax).finish()
}
}
-impl AstNode for ClosureBinder {
- #[inline]
- fn kind() -> SyntaxKind
- where
- Self: Sized,
- {
- CLOSURE_BINDER
- }
- #[inline]
- fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_BINDER }
- #[inline]
- fn cast(syntax: SyntaxNode) -> Option<Self> {
- if Self::can_cast(syntax.kind()) {
- Some(Self { syntax })
- } else {
- None
- }
- }
- #[inline]
- fn syntax(&self) -> &SyntaxNode { &self.syntax }
-}
-impl hash::Hash for ClosureBinder {
- fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
-}
-impl Eq for ClosureBinder {}
-impl PartialEq for ClosureBinder {
- fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
-}
-impl Clone for ClosureBinder {
- fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
-}
-impl fmt::Debug for ClosureBinder {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("ClosureBinder").field("syntax", &self.syntax).finish()
- }
-}
impl AstNode for ClosureExpr {
#[inline]
fn kind() -> SyntaxKind
@@ -3815,6 +3777,42 @@
f.debug_struct("FnPtrType").field("syntax", &self.syntax).finish()
}
}
+impl AstNode for ForBinder {
+ #[inline]
+ fn kind() -> SyntaxKind
+ where
+ Self: Sized,
+ {
+ FOR_BINDER
+ }
+ #[inline]
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_BINDER }
+ #[inline]
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ #[inline]
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl hash::Hash for ForBinder {
+ fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for ForBinder {}
+impl PartialEq for ForBinder {
+ fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for ForBinder {
+ fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for ForBinder {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("ForBinder").field("syntax", &self.syntax).finish()
+ }
+}
impl AstNode for ForExpr {
#[inline]
fn kind() -> SyntaxKind
@@ -10146,11 +10144,6 @@
std::fmt::Display::fmt(self.syntax(), f)
}
}
-impl std::fmt::Display for ClosureBinder {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- std::fmt::Display::fmt(self.syntax(), f)
- }
-}
impl std::fmt::Display for ClosureExpr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
@@ -10226,6 +10219,11 @@
std::fmt::Display::fmt(self.syntax(), f)
}
}
+impl std::fmt::Display for ForBinder {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
impl std::fmt::Display for ForExpr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index d67f24f..2a7b51c 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -229,8 +229,18 @@
}
}
-pub fn assoc_item_list() -> ast::AssocItemList {
- ast_from_text("impl C for D {}")
+pub fn assoc_item_list(
+ body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>,
+) -> ast::AssocItemList {
+ let is_break_braces = body.is_some();
+ let body_newline = if is_break_braces { "\n".to_owned() } else { String::new() };
+ let body_indent = if is_break_braces { " ".to_owned() } else { String::new() };
+
+ let body = match body {
+ Some(bd) => bd.iter().map(|elem| elem.to_string()).join("\n\n "),
+ None => String::new(),
+ };
+ ast_from_text(&format!("impl C for D {{{body_newline}{body_indent}{body}{body_newline}}}"))
}
fn merge_gen_params(
@@ -273,7 +283,7 @@
generic_args: Option<ast::GenericArgList>,
path_type: ast::Type,
where_clause: Option<ast::WhereClause>,
- body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>,
+ body: Option<ast::AssocItemList>,
) -> ast::Impl {
let gen_args = generic_args.map_or_else(String::new, |it| it.to_string());
@@ -281,20 +291,13 @@
let body_newline =
if where_clause.is_some() && body.is_none() { "\n".to_owned() } else { String::new() };
-
let where_clause = match where_clause {
Some(pr) => format!("\n{pr}\n"),
None => " ".to_owned(),
};
- let body = match body {
- Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""),
- None => String::new(),
- };
-
- ast_from_text(&format!(
- "impl{gen_params} {path_type}{gen_args}{where_clause}{{{body_newline}{body}}}"
- ))
+ let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string());
+ ast_from_text(&format!("impl{gen_params} {path_type}{gen_args}{where_clause}{body}"))
}
pub fn impl_trait(
@@ -308,7 +311,7 @@
ty: ast::Type,
trait_where_clause: Option<ast::WhereClause>,
ty_where_clause: Option<ast::WhereClause>,
- body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>,
+ body: Option<ast::AssocItemList>,
) -> ast::Impl {
let is_unsafe = if is_unsafe { "unsafe " } else { "" };
@@ -330,13 +333,10 @@
let where_clause = merge_where_clause(ty_where_clause, trait_where_clause)
.map_or_else(|| " ".to_owned(), |wc| format!("\n{wc}\n"));
- let body = match body {
- Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""),
- None => String::new(),
- };
+ let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string());
ast_from_text(&format!(
- "{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{body_newline}{body}}}"
+ "{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{body}"
))
}
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index f5530c5..62a7d4d 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -805,9 +805,7 @@
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum TypeBoundKind {
/// Trait
- PathType(ast::PathType),
- /// for<'a> ...
- ForType(ast::ForType),
+ PathType(Option<ast::ForBinder>, ast::PathType),
/// use
Use(ast::UseBoundGenericArgs),
/// 'a
@@ -817,9 +815,7 @@
impl ast::TypeBound {
pub fn kind(&self) -> TypeBoundKind {
if let Some(path_type) = support::children(self.syntax()).next() {
- TypeBoundKind::PathType(path_type)
- } else if let Some(for_type) = support::children(self.syntax()).next() {
- TypeBoundKind::ForType(for_type)
+ TypeBoundKind::PathType(self.for_binder(), path_type)
} else if let Some(args) = self.use_bound_generic_args() {
TypeBoundKind::Use(args)
} else if let Some(lifetime) = self.lifetime() {
diff --git a/crates/syntax/src/ast/syntax_factory.rs b/crates/syntax/src/ast/syntax_factory.rs
index 7142e4f..f3ae754 100644
--- a/crates/syntax/src/ast/syntax_factory.rs
+++ b/crates/syntax/src/ast/syntax_factory.rs
@@ -38,7 +38,7 @@
self.mappings.as_ref().map(|mappings| mappings.take()).unwrap_or_default()
}
- fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> {
+ pub(crate) fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> {
self.mappings.as_ref().map(|it| it.borrow_mut())
}
}
diff --git a/crates/syntax/src/syntax_editor.rs b/crates/syntax/src/syntax_editor.rs
index 3fa5848..5107754 100644
--- a/crates/syntax/src/syntax_editor.rs
+++ b/crates/syntax/src/syntax_editor.rs
@@ -5,7 +5,7 @@
//! [`SyntaxEditor`]: https://github.com/dotnet/roslyn/blob/43b0b05cc4f492fd5de00f6f6717409091df8daa/src/Workspaces/Core/Portable/Editing/SyntaxEditor.cs
use std::{
- fmt,
+ fmt, iter,
num::NonZeroU32,
ops::RangeInclusive,
sync::atomic::{AtomicU32, Ordering},
@@ -41,6 +41,15 @@
self.annotations.push((element.syntax_element(), annotation))
}
+ pub fn add_annotation_all(
+ &mut self,
+ elements: Vec<impl Element>,
+ annotation: SyntaxAnnotation,
+ ) {
+ self.annotations
+ .extend(elements.into_iter().map(|e| e.syntax_element()).zip(iter::repeat(annotation)));
+ }
+
pub fn merge(&mut self, mut other: SyntaxEditor) {
debug_assert!(
self.root == other.root || other.root.ancestors().any(|node| node == self.root),
diff --git a/crates/syntax/src/syntax_editor/edits.rs b/crates/syntax/src/syntax_editor/edits.rs
index d66ea8a..840e769 100644
--- a/crates/syntax/src/syntax_editor/edits.rs
+++ b/crates/syntax/src/syntax_editor/edits.rs
@@ -92,6 +92,42 @@
}
}
+impl ast::AssocItemList {
+ /// Adds a new associated item after all of the existing associated items.
+ ///
+ /// Attention! This function does align the first line of `item` with respect to `self`,
+ /// but it does _not_ change indentation of other lines (if any).
+ pub fn add_items(&self, editor: &mut SyntaxEditor, items: Vec<ast::AssocItem>) {
+ let (indent, position, whitespace) = match self.assoc_items().last() {
+ Some(last_item) => (
+ IndentLevel::from_node(last_item.syntax()),
+ Position::after(last_item.syntax()),
+ "\n\n",
+ ),
+ None => match self.l_curly_token() {
+ Some(l_curly) => {
+ normalize_ws_between_braces(editor, self.syntax());
+ (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n")
+ }
+ None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
+ },
+ };
+
+ let elements: Vec<SyntaxElement> = items
+ .into_iter()
+ .enumerate()
+ .flat_map(|(i, item)| {
+ let whitespace = if i != 0 { "\n\n" } else { whitespace };
+ vec![
+ make::tokens::whitespace(&format!("{whitespace}{indent}")).into(),
+ item.syntax().clone().into(),
+ ]
+ })
+ .collect();
+ editor.insert_all(position, elements);
+ }
+}
+
impl ast::VariantList {
pub fn add_variant(&self, editor: &mut SyntaxEditor, variant: &ast::Variant) {
let make = SyntaxFactory::without_mappings();
diff --git a/docs/book/src/contributing/README.md b/docs/book/src/contributing/README.md
index beb94cd..57c7a9c 100644
--- a/docs/book/src/contributing/README.md
+++ b/docs/book/src/contributing/README.md
@@ -252,18 +252,8 @@
4. Commit & push the changelog.
5. Run `cargo xtask publish-release-notes <CHANGELOG>` -- this will convert the changelog entry in AsciiDoc to Markdown and update the body of GitHub Releases entry.
6. Tweet.
-7. Make a new branch and run `cargo xtask rustc-pull`, open a PR, and merge it.
- This will pull any changes from `rust-lang/rust` into `rust-analyzer`.
-8. Switch to `master`, pull, then run `cargo xtask rustc-push --rust-path ../rust-rust-analyzer --rust-fork matklad/rust`.
- Replace `matklad/rust` with your own fork of `rust-lang/rust`.
- You can use the token to authenticate when you get prompted for a password, since `josh` will push over HTTPS, not SSH.
- This will push the `rust-analyzer` changes to your fork.
- You can then open a PR against `rust-lang/rust`.
-
-Note: besides the `rust-rust-analyzer` clone, the Josh cache (stored under `~/.cache/rust-analyzer-josh`) will contain a bare clone of `rust-lang/rust`.
-This currently takes about 3.5 GB.
-
-This [HackMD](https://hackmd.io/7pOuxnkdQDaL1Y1FQr65xg) has details about how `josh` syncs work.
+7. Perform a subtree [pull](#performing-a-pull).
+8. Perform a subtree [push](#performing-a-push).
If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console.
If it fails because of something that needs to be fixed, remove the release tag (if needed), fix the problem, then start over.
@@ -288,3 +278,43 @@
If you don't feel like reviewing for whatever reason, someone else will pick the review up (but please speak up if you don't feel like it)!
* The [rust-lang](https://github.com/rust-lang) team [t-rust-analyzer-contributors]([https://github.com/orgs/rust-analyzer/teams/triage](https://github.com/rust-lang/team/blob/master/teams/rust-analyzer-contributors.toml)).
This team has general triaging permissions allowing to label, close and re-open issues.
+
+## Synchronizing subtree changes
+`rust-analyzer` is a [josh](https://josh-project.github.io/josh/intro.html) subtree of the [rust-lang/rust](https://github.com/rust-lang/rust)
+repository. We use the [rustc-josh-sync](https://github.com/rust-lang/josh-sync) tool to perform synchronization between these two
+repositories. You can find documentation of the tool [here](https://github.com/rust-lang/josh-sync).
+
+You can install the synchronization tool using the following commands:
+```
+cargo install --locked --git https://github.com/rust-lang/josh-sync
+```
+
+Both pulls (synchronizing changes from rust-lang/rust into rust-analyzer) and pushes (synchronizing
+changes from rust-analyzer into rust-lang/rust) are performed from this repository.
+changes from rust-analyzer to rust-lang/rust) are performed from this repository.
+
+Usually we first perform a pull, wait for it to be merged, and then perform a push.
+
+### Performing a pull
+1) Checkout a new branch that will be used to create a PR against rust-analyzer
+2) Run the pull command
+ ```
+ rustc-josh-sync pull
+ ```
+3) Push the branch to your fork of `rust-analyzer` and create a PR
+ - If you have the `gh` CLI installed, `rustc-josh-sync` can create the PR for you.
+
+### Performing a push
+
+Wait for the previous pull to be merged.
+
+1) Switch to `master` and pull
+2) Run the push command to create a branch named `<branch-name>` in a `rustc` fork under the `<gh-username>` account
+ ```
+ rustc-josh-sync push <branch-name> <gh-username>
+ ```
+ - The push will ask you to download a checkout of the `rust-lang/rust` repository.
+ - If you get prompted for a password, see [this](https://github.com/rust-lang/josh-sync?tab=readme-ov-file#git-peculiarities).
+3) Create a PR from `<branch-name>` into `rust-lang/rust`
+
+> Besides the `rust` checkout, the Josh cache (stored under `~/.cache/rustc-josh`) will contain a bare clone of `rust-lang/rust`. This currently takes several GBs.
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 57d67a6..534c24b 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -3336,15 +3336,16 @@
}
},
"node_modules/form-data": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz",
- "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
+ "version": "4.0.4",
+ "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
+ "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"dev": true,
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
+ "hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index d2dc740..3b1b076 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -8,10 +8,9 @@
export type RunnableEnvCfgItem = {
mask?: string;
- env: Record<string, string>;
+ env: { [key: string]: { toString(): string } | null };
platform?: string | string[];
};
-export type RunnableEnvCfg = Record<string, string> | RunnableEnvCfgItem[];
type ShowStatusBar = "always" | "never" | { documentSelector: vscode.DocumentSelector };
@@ -261,18 +260,13 @@
return this.get<boolean | undefined>("testExplorer");
}
- runnablesExtraEnv(label: string): Record<string, string> | undefined {
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const item = this.get<any>("runnables.extraEnv") ?? this.get<any>("runnableEnv");
- if (!item) return undefined;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const fixRecord = (r: Record<string, any>) => {
- for (const key in r) {
- if (typeof r[key] !== "string") {
- r[key] = String(r[key]);
- }
- }
- };
+ runnablesExtraEnv(label: string): Env {
+ const serverEnv = this.serverExtraEnv;
+ let extraEnv =
+ this.get<
+ RunnableEnvCfgItem[] | { [key: string]: { toString(): string } | null } | null
+ >("runnables.extraEnv") ?? {};
+ if (!extraEnv) return serverEnv;
const platform = process.platform;
const checkPlatform = (it: RunnableEnvCfgItem) => {
@@ -283,19 +277,25 @@
return true;
};
- if (item instanceof Array) {
+ if (extraEnv instanceof Array) {
const env = {};
- for (const it of item) {
+ for (const it of extraEnv) {
const masked = !it.mask || new RegExp(it.mask).test(label);
if (masked && checkPlatform(it)) {
Object.assign(env, it.env);
}
}
- fixRecord(env);
- return env;
+ extraEnv = env;
}
- fixRecord(item);
- return item;
+ const runnableExtraEnv = substituteVariablesInEnv(
+ Object.fromEntries(
+ Object.entries(extraEnv).map(([k, v]) => [
+ k,
+ typeof v === "string" ? v : v?.toString(),
+ ]),
+ ),
+ );
+ return { ...runnableExtraEnv, ...serverEnv };
}
get restartServerOnConfigChange() {
diff --git a/editors/code/src/debug.ts b/editors/code/src/debug.ts
index adb75c2..24f8d90 100644
--- a/editors/code/src/debug.ts
+++ b/editors/code/src/debug.ts
@@ -6,7 +6,14 @@
import { Cargo } from "./toolchain";
import type { Ctx } from "./ctx";
import { createTaskFromRunnable, prepareEnv } from "./run";
-import { execute, isCargoRunnableArgs, unwrapUndefinable, log, normalizeDriveLetter } from "./util";
+import {
+ execute,
+ isCargoRunnableArgs,
+ unwrapUndefinable,
+ log,
+ normalizeDriveLetter,
+ Env,
+} from "./util";
import type { Config } from "./config";
// Here we want to keep track on everything that's currently running
@@ -206,10 +213,7 @@
destination: string;
};
-async function discoverSourceFileMap(
- env: Record<string, string>,
- cwd: string,
-): Promise<SourceFileMap | undefined> {
+async function discoverSourceFileMap(env: Env, cwd: string): Promise<SourceFileMap | undefined> {
const sysroot = env["RUSTC_TOOLCHAIN"];
if (sysroot) {
// let's try to use the default toolchain
@@ -232,7 +236,7 @@
type DebugConfigProvider<Type extends string, DebugConfig extends BaseDebugConfig<Type>> = {
executableProperty: keyof DebugConfig;
- environmentProperty: PropertyFetcher<DebugConfig, Record<string, string>, keyof DebugConfig>;
+ environmentProperty: PropertyFetcher<DebugConfig, Env, keyof DebugConfig>;
runnableArgsProperty: PropertyFetcher<DebugConfig, ra.CargoRunnableArgs, keyof DebugConfig>;
sourceFileMapProperty?: keyof DebugConfig;
type: Type;
@@ -276,7 +280,7 @@
"environment",
Object.entries(env).map((entry) => ({
name: entry[0],
- value: entry[1],
+ value: entry[1] ?? "",
})),
],
runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [
@@ -304,10 +308,7 @@
},
};
-async function getDebugExecutable(
- runnableArgs: ra.CargoRunnableArgs,
- env: Record<string, string>,
-): Promise<string> {
+async function getDebugExecutable(runnableArgs: ra.CargoRunnableArgs, env: Env): Promise<string> {
const cargo = new Cargo(runnableArgs.workspaceRoot || ".", env);
const executable = await cargo.executableFromArgs(runnableArgs);
@@ -328,7 +329,7 @@
runnable: ra.Runnable,
runnableArgs: ra.CargoRunnableArgs,
executable: string,
- env: Record<string, string>,
+ env: Env,
sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration {
const {
@@ -380,14 +381,14 @@
args: string[];
sourceMap: Record<string, string> | undefined;
sourceLanguages: ["rust"];
- env: Record<string, string>;
+ env: Env;
} & BaseDebugConfig<"lldb">;
type NativeDebugConfig = {
target: string;
// See https://github.com/WebFreak001/code-debug/issues/359
arguments: string;
- env: Record<string, string>;
+ env: Env;
valuesFormatting: "prettyPrinters";
} & BaseDebugConfig<"gdb">;
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts
index 95166c4..87c1d52 100644
--- a/editors/code/src/run.ts
+++ b/editors/code/src/run.ts
@@ -7,7 +7,7 @@
import { makeDebugConfig } from "./debug";
import type { Config } from "./config";
import type { LanguageClient } from "vscode-languageclient/node";
-import { log, unwrapUndefinable, type RustEditor } from "./util";
+import { Env, log, unwrapUndefinable, type RustEditor } from "./util";
const quickPickButtons = [
{ iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configuration." },
@@ -122,11 +122,8 @@
}
}
-export function prepareBaseEnv(
- inheritEnv: boolean,
- base?: Record<string, string>,
-): Record<string, string> {
- const env: Record<string, string> = { RUST_BACKTRACE: "short" };
+export function prepareBaseEnv(inheritEnv: boolean, base?: Env): Env {
+ const env: Env = { RUST_BACKTRACE: "short" };
if (inheritEnv) {
Object.assign(env, process.env);
}
@@ -136,11 +133,7 @@
return env;
}
-export function prepareEnv(
- inheritEnv: boolean,
- runnableEnv?: Record<string, string>,
- runnableEnvCfg?: Record<string, string>,
-): Record<string, string> {
+export function prepareEnv(inheritEnv: boolean, runnableEnv?: Env, runnableEnvCfg?: Env): Env {
const env = prepareBaseEnv(inheritEnv, runnableEnv);
if (runnableEnvCfg) {
diff --git a/editors/code/src/tasks.ts b/editors/code/src/tasks.ts
index 730ec6d..eb0748a 100644
--- a/editors/code/src/tasks.ts
+++ b/editors/code/src/tasks.ts
@@ -1,6 +1,7 @@
import * as vscode from "vscode";
import type { Config } from "./config";
import * as toolchain from "./toolchain";
+import { Env } from "./util";
// This ends up as the `type` key in tasks.json. RLS also uses `cargo` and
// our configuration should be compatible with it so use the same key.
@@ -117,8 +118,8 @@
export async function targetToExecution(
definition: TaskDefinition,
options?: {
- env?: { [key: string]: string };
cwd?: string;
+ env?: Env;
},
cargo?: string,
): Promise<vscode.ProcessExecution | vscode.ShellExecution> {
@@ -131,7 +132,12 @@
command = definition.command;
args = definition.args || [];
}
- return new vscode.ProcessExecution(command, args, options);
+ return new vscode.ProcessExecution(command, args, {
+ cwd: options?.cwd,
+ env: Object.fromEntries(
+ Object.entries(options?.env ?? {}).map(([key, value]) => [key, value ?? ""]),
+ ),
+ });
}
export function activateTaskProvider(config: Config): vscode.Disposable {
diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts
index a859ce6..06f75a8 100644
--- a/editors/code/src/toolchain.ts
+++ b/editors/code/src/toolchain.ts
@@ -3,7 +3,7 @@
import * as path from "path";
import * as readline from "readline";
import * as vscode from "vscode";
-import { log, memoizeAsync, unwrapUndefinable } from "./util";
+import { Env, log, memoizeAsync, unwrapUndefinable } from "./util";
import type { CargoRunnableArgs } from "./lsp_ext";
interface CompilationArtifact {
@@ -37,7 +37,7 @@
export class Cargo {
constructor(
readonly rootFolder: string,
- readonly env: Record<string, string>,
+ readonly env: Env,
) {}
// Made public for testing purposes
@@ -156,7 +156,7 @@
/** Mirrors `toolchain::cargo()` implementation */
// FIXME: The server should provide this
-export function cargoPath(env?: Record<string, string>): Promise<string> {
+export function cargoPath(env?: Env): Promise<string> {
if (env?.["RUSTC_TOOLCHAIN"]) {
return Promise.resolve("cargo");
}
diff --git a/josh-sync.toml b/josh-sync.toml
new file mode 100644
index 0000000..51ff0d7
--- /dev/null
+++ b/josh-sync.toml
@@ -0,0 +1,2 @@
+repo = "rust-analyzer"
+filter = ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer"
diff --git a/rust-version b/rust-version
index c2b1c15..2178caf 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-e05ab47e6c418fb2b9faa2eae9a7e70c65c98eaa
+733dab558992d902d6d17576de1da768094e2cf3
diff --git a/triagebot.toml b/triagebot.toml
index 2201b5a..27fdb67 100644
--- a/triagebot.toml
+++ b/triagebot.toml
@@ -17,6 +17,7 @@
"sync from downstream",
"Sync from rust",
"sync from rust",
+ "Rustc pull update",
]
labels = ["has-merge-commits", "S-waiting-on-author"]
@@ -27,3 +28,6 @@
# Prevents mentions in commits to avoid users being spammed
[no-mentions]
+
+# Automatically close and reopen PRs made by bots to run CI on them
+[bot-pull-requests]
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml
index 8cd5811..9d8a195 100644
--- a/xtask/Cargo.toml
+++ b/xtask/Cargo.toml
@@ -8,7 +8,6 @@
[dependencies]
anyhow.workspace = true
-directories = "6.0"
flate2 = "1.1.2"
write-json = "0.1.4"
xshell.workspace = true
diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs
index 2fd471b..72f6215 100644
--- a/xtask/src/flags.rs
+++ b/xtask/src/flags.rs
@@ -59,20 +59,6 @@
optional --dry-run
}
- cmd rustc-pull {
- /// rustc commit to pull.
- optional --commit refspec: String
- }
-
- cmd rustc-push {
- /// rust local path, e.g. `../rust-rust-analyzer`.
- required --rust-path rust_path: String
- /// rust fork name, e.g. `matklad/rust`.
- required --rust-fork rust_fork: String
- /// branch name.
- optional --branch branch: String
- }
-
cmd dist {
/// Use mimalloc allocator for server
optional --mimalloc
@@ -121,8 +107,6 @@
Install(Install),
FuzzTests(FuzzTests),
Release(Release),
- RustcPull(RustcPull),
- RustcPush(RustcPush),
Dist(Dist),
PublishReleaseNotes(PublishReleaseNotes),
Metrics(Metrics),
@@ -152,18 +136,6 @@
}
#[derive(Debug)]
-pub struct RustcPull {
- pub commit: Option<String>,
-}
-
-#[derive(Debug)]
-pub struct RustcPush {
- pub rust_path: String,
- pub rust_fork: String,
- pub branch: Option<String>,
-}
-
-#[derive(Debug)]
pub struct Dist {
pub mimalloc: bool,
pub jemalloc: bool,
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index aaa8d0e..c5ad49c 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -42,8 +42,6 @@
flags::XtaskCmd::Install(cmd) => cmd.run(sh),
flags::XtaskCmd::FuzzTests(_) => run_fuzzer(sh),
flags::XtaskCmd::Release(cmd) => cmd.run(sh),
- flags::XtaskCmd::RustcPull(cmd) => cmd.run(sh),
- flags::XtaskCmd::RustcPush(cmd) => cmd.run(sh),
flags::XtaskCmd::Dist(cmd) => cmd.run(sh),
flags::XtaskCmd::PublishReleaseNotes(cmd) => cmd.run(sh),
flags::XtaskCmd::Metrics(cmd) => cmd.run(sh),
diff --git a/xtask/src/release.rs b/xtask/src/release.rs
index e41f4ce..d06a25c 100644
--- a/xtask/src/release.rs
+++ b/xtask/src/release.rs
@@ -1,12 +1,5 @@
mod changelog;
-use std::process::{Command, Stdio};
-use std::thread;
-use std::time::Duration;
-
-use anyhow::{Context as _, bail};
-use directories::ProjectDirs;
-use stdx::JodChild;
use xshell::{Shell, cmd};
use crate::{date_iso, flags, is_release_tag, project_root};
@@ -59,171 +52,3 @@
Ok(())
}
}
-
-// git sync implementation adapted from https://github.com/rust-lang/miri/blob/62039ac/miri-script/src/commands.rs
-impl flags::RustcPull {
- pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
- sh.change_dir(project_root());
- let commit = self.commit.map(Result::Ok).unwrap_or_else(|| {
- let rust_repo_head =
- cmd!(sh, "git ls-remote https://github.com/rust-lang/rust/ HEAD").read()?;
- rust_repo_head
- .split_whitespace()
- .next()
- .map(|front| front.trim().to_owned())
- .ok_or_else(|| anyhow::format_err!("Could not obtain Rust repo HEAD from remote."))
- })?;
- // Make sure the repo is clean.
- if !cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty() {
- bail!("working directory must be clean before running `cargo xtask pull`");
- }
- // This should not add any new root commits. So count those before and after merging.
- let num_roots = || -> anyhow::Result<u32> {
- Ok(cmd!(sh, "git rev-list HEAD --max-parents=0 --count")
- .read()
- .context("failed to determine the number of root commits")?
- .parse::<u32>()?)
- };
- let num_roots_before = num_roots()?;
- // Make sure josh is running.
- let josh = start_josh()?;
-
- // Update rust-version file. As a separate commit, since making it part of
- // the merge has confused the heck out of josh in the past.
- // We pass `--no-verify` to avoid running any git hooks that might exist,
- // in case they dirty the repository.
- sh.write_file("rust-version", format!("{commit}\n"))?;
- const PREPARING_COMMIT_MESSAGE: &str = "Preparing for merge from rust-lang/rust";
- cmd!(sh, "git commit rust-version --no-verify -m {PREPARING_COMMIT_MESSAGE}")
- .run()
- .context("FAILED to commit rust-version file, something went wrong")?;
-
- // Fetch given rustc commit.
- cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git")
- .run()
- .inspect_err(|_| {
- // Try to un-do the previous `git commit`, to leave the repo in the state we found it it.
- cmd!(sh, "git reset --hard HEAD^")
- .run()
- .expect("FAILED to clean up again after failed `git fetch`, sorry for that");
- })
- .context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?;
-
- // Merge the fetched commit.
- const MERGE_COMMIT_MESSAGE: &str = "Merge from rust-lang/rust";
- cmd!(sh, "git merge FETCH_HEAD --no-verify --no-ff -m {MERGE_COMMIT_MESSAGE}")
- .run()
- .context("FAILED to merge new commits, something went wrong")?;
-
- // Check that the number of roots did not increase.
- if num_roots()? != num_roots_before {
- bail!("Josh created a new root commit. This is probably not the history you want.");
- }
-
- drop(josh);
- Ok(())
- }
-}
-
-impl flags::RustcPush {
- pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
- let branch = self.branch.as_deref().unwrap_or("sync-from-ra");
- let rust_path = self.rust_path;
- let rust_fork = self.rust_fork;
-
- sh.change_dir(project_root());
- let base = sh.read_file("rust-version")?.trim().to_owned();
- // Make sure the repo is clean.
- if !cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty() {
- bail!("working directory must be clean before running `cargo xtask push`");
- }
- // Make sure josh is running.
- let josh = start_josh()?;
-
- // Find a repo we can do our preparation in.
- sh.change_dir(rust_path);
-
- // Prepare the branch. Pushing works much better if we use as base exactly
- // the commit that we pulled from last time, so we use the `rust-version`
- // file to find out which commit that would be.
- println!("Preparing {rust_fork} (base: {base})...");
- if cmd!(sh, "git fetch https://github.com/{rust_fork} {branch}")
- .ignore_stderr()
- .read()
- .is_ok()
- {
- bail!(
- "The branch `{branch}` seems to already exist in `https://github.com/{rust_fork}`. Please delete it and try again."
- );
- }
- cmd!(sh, "git fetch https://github.com/rust-lang/rust {base}").run()?;
- cmd!(sh, "git push https://github.com/{rust_fork} {base}:refs/heads/{branch}")
- .ignore_stdout()
- .ignore_stderr() // silence the "create GitHub PR" message
- .run()?;
- println!();
-
- // Do the actual push.
- sh.change_dir(project_root());
- println!("Pushing rust-analyzer changes...");
- cmd!(
- sh,
- "git push http://localhost:{JOSH_PORT}/{rust_fork}.git{JOSH_FILTER}.git HEAD:{branch}"
- )
- .run()?;
- println!();
-
- // Do a round-trip check to make sure the push worked as expected.
- cmd!(
- sh,
- "git fetch http://localhost:{JOSH_PORT}/{rust_fork}.git{JOSH_FILTER}.git {branch}"
- )
- .ignore_stderr()
- .read()?;
- let head = cmd!(sh, "git rev-parse HEAD").read()?;
- let fetch_head = cmd!(sh, "git rev-parse FETCH_HEAD").read()?;
- if head != fetch_head {
- bail!(
- "Josh created a non-roundtrip push! Do NOT merge this into rustc!\n\
- Expected {head}, got {fetch_head}."
- );
- }
- println!(
- "Confirmed that the push round-trips back to rust-analyzer properly. Please create a rustc PR:"
- );
- // https://github.com/github-linguist/linguist/compare/master...octocat:linguist:master
- let fork_path = rust_fork.replace('/', ":");
- println!(
- " https://github.com/rust-lang/rust/compare/{fork_path}:{branch}?quick_pull=1&title=Subtree+update+of+rust-analyzer&body=r?+@ghost"
- );
-
- drop(josh);
- Ok(())
- }
-}
-
-/// Used for rustc syncs.
-const JOSH_FILTER: &str = ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer";
-const JOSH_PORT: &str = "42042";
-
-fn start_josh() -> anyhow::Result<impl Drop> {
- // Determine cache directory.
- let local_dir = {
- let user_dirs = ProjectDirs::from("org", "rust-lang", "rust-analyzer-josh").unwrap();
- user_dirs.cache_dir().to_owned()
- };
-
- // Start josh, silencing its output.
- let mut cmd = Command::new("josh-proxy");
- cmd.arg("--local").arg(local_dir);
- cmd.arg("--remote").arg("https://github.com");
- cmd.arg("--port").arg(JOSH_PORT);
- cmd.arg("--no-background");
- cmd.stdout(Stdio::null());
- cmd.stderr(Stdio::null());
- let josh = cmd.spawn().context("failed to start josh-proxy, make sure it is installed")?;
- // Give it some time so hopefully the port is open. (100ms was not enough.)
- thread::sleep(Duration::from_millis(200));
-
- Ok(JodChild(josh))
-}