Merge from rust-lang/rust
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index c618e4b..00408e9 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -11,12 +11,12 @@
use triomphe::Arc;
use crate::{
- AssocItemId, AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, EnumVariantId,
- EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId,
- FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc, MacroExpander,
- MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId,
- StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId,
- TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
+ AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc,
+ EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc,
+ FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc,
+ MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId,
+ ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId,
+ TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
attr::{Attrs, AttrsWithOwner},
expr_store::{
Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes,
@@ -90,7 +90,10 @@
#[salsa::interned]
fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
- // // endregion: items
+ // endregion: items
+
+ #[salsa::interned]
+ fn intern_block(&self, loc: BlockLoc) -> BlockId;
}
#[query_group::query_group]
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 03683ec..efa1374 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -11,7 +11,7 @@
use cfg::CfgOptions;
use either::Either;
use hir_expand::{
- HirFileId, InFile, Intern, MacroDefId,
+ HirFileId, InFile, MacroDefId,
mod_path::tool_path,
name::{AsName, Name},
span_map::SpanMapRef,
@@ -2148,7 +2148,7 @@
) -> ExprId {
let block_id = self.expander.ast_id_map().ast_id_for_block(&block).map(|file_local_id| {
let ast_id = self.expander.in_file(file_local_id);
- BlockLoc { ast_id, module: self.module }.intern(self.db)
+ self.db.intern_block(BlockLoc { ast_id, module: self.module })
});
let (module, def_map) =
@@ -2815,6 +2815,51 @@
mutability: Mutability::Shared,
})
};
+
+ // Assume that rustc version >= 1.89.0 iff lang item `format_arguments` exists
+ // but `format_unsafe_arg` does not
+ let fmt_args =
+ || crate::lang_item::lang_item(self.db, self.module.krate(), LangItem::FormatArguments);
+ let fmt_unsafe_arg =
+ || crate::lang_item::lang_item(self.db, self.module.krate(), LangItem::FormatUnsafeArg);
+ let use_format_args_since_1_89_0 = fmt_args().is_some() && fmt_unsafe_arg().is_none();
+
+ let idx = if use_format_args_since_1_89_0 {
+ self.collect_format_args_impl(
+ syntax_ptr,
+ fmt,
+ hygiene,
+ argmap,
+ lit_pieces,
+ format_options,
+ )
+ } else {
+ self.collect_format_args_before_1_89_0_impl(
+ syntax_ptr,
+ fmt,
+ argmap,
+ lit_pieces,
+ format_options,
+ )
+ };
+
+ self.source_map
+ .template_map
+ .get_or_insert_with(Default::default)
+ .format_args_to_captures
+ .insert(idx, (hygiene, mappings));
+ idx
+ }
+
+ /// `format_args!` expansion implementation for rustc versions < `1.89.0`
+ fn collect_format_args_before_1_89_0_impl(
+ &mut self,
+ syntax_ptr: AstPtr<ast::Expr>,
+ fmt: FormatArgs,
+ argmap: FxIndexSet<(usize, ArgumentType)>,
+ lit_pieces: ExprId,
+ format_options: ExprId,
+ ) -> ExprId {
let arguments = &*fmt.arguments.arguments;
let args = if arguments.is_empty() {
@@ -2902,19 +2947,181 @@
});
}
- let idx = self.alloc_expr(
+ self.alloc_expr(
Expr::Call {
callee: new_v1_formatted,
args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]),
},
syntax_ptr,
- );
- self.source_map
- .template_map
- .get_or_insert_with(Default::default)
- .format_args_to_captures
- .insert(idx, (hygiene, mappings));
- idx
+ )
+ }
+
+ /// `format_args!` expansion implementation for rustc versions >= `1.89.0`,
+ /// especially since [this PR](https://github.com/rust-lang/rust/pull/140748)
+ fn collect_format_args_impl(
+ &mut self,
+ syntax_ptr: AstPtr<ast::Expr>,
+ fmt: FormatArgs,
+ hygiene: HygieneId,
+ argmap: FxIndexSet<(usize, ArgumentType)>,
+ lit_pieces: ExprId,
+ format_options: ExprId,
+ ) -> ExprId {
+ let arguments = &*fmt.arguments.arguments;
+
+ let (let_stmts, args) = if arguments.is_empty() {
+ (
+ // Generate:
+ // []
+ vec![],
+ self.alloc_expr_desugared(Expr::Array(Array::ElementList {
+ elements: Box::default(),
+ })),
+ )
+ } else if argmap.len() == 1 && arguments.len() == 1 {
+ // Only one argument, so we don't need to make the `args` tuple.
+ //
+ // Generate:
+ // super let args = [<core::fmt::Arguments>::new_display(&arg)];
+ let args = argmap
+ .iter()
+ .map(|&(arg_index, ty)| {
+ let ref_arg = self.alloc_expr_desugared(Expr::Ref {
+ expr: arguments[arg_index].expr,
+ rawness: Rawness::Ref,
+ mutability: Mutability::Shared,
+ });
+ self.make_argument(ref_arg, ty)
+ })
+ .collect();
+ let args =
+ self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: args }));
+ let args_name = Name::new_symbol_root(sym::args);
+ let args_binding =
+ self.alloc_binding(args_name.clone(), BindingAnnotation::Unannotated, hygiene);
+ let args_pat = self.alloc_pat_desugared(Pat::Bind { id: args_binding, subpat: None });
+ self.add_definition_to_binding(args_binding, args_pat);
+ // TODO: We don't have `super let` yet.
+ let let_stmt = Statement::Let {
+ pat: args_pat,
+ type_ref: None,
+ initializer: Some(args),
+ else_branch: None,
+ };
+ (vec![let_stmt], self.alloc_expr_desugared(Expr::Path(Path::from(args_name))))
+ } else {
+ // Generate:
+ // super let args = (&arg0, &arg1, &...);
+ let args_name = Name::new_symbol_root(sym::args);
+ let args_binding =
+ self.alloc_binding(args_name.clone(), BindingAnnotation::Unannotated, hygiene);
+ let args_pat = self.alloc_pat_desugared(Pat::Bind { id: args_binding, subpat: None });
+ self.add_definition_to_binding(args_binding, args_pat);
+ let elements = arguments
+ .iter()
+ .map(|arg| {
+ self.alloc_expr_desugared(Expr::Ref {
+ expr: arg.expr,
+ rawness: Rawness::Ref,
+ mutability: Mutability::Shared,
+ })
+ })
+ .collect();
+ let args_tuple = self.alloc_expr_desugared(Expr::Tuple { exprs: elements });
+ // TODO: We don't have `super let` yet
+ let let_stmt1 = Statement::Let {
+ pat: args_pat,
+ type_ref: None,
+ initializer: Some(args_tuple),
+ else_branch: None,
+ };
+
+ // Generate:
+ // super let args = [
+ // <core::fmt::Argument>::new_display(args.0),
+ // <core::fmt::Argument>::new_lower_hex(args.1),
+ // <core::fmt::Argument>::new_debug(args.0),
+ // …
+ // ];
+ let args = argmap
+ .iter()
+ .map(|&(arg_index, ty)| {
+ let args_ident_expr =
+ self.alloc_expr_desugared(Expr::Path(args_name.clone().into()));
+ let arg = self.alloc_expr_desugared(Expr::Field {
+ expr: args_ident_expr,
+ name: Name::new_tuple_field(arg_index),
+ });
+ self.make_argument(arg, ty)
+ })
+ .collect();
+ let array =
+ self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: args }));
+ let args_binding =
+ self.alloc_binding(args_name.clone(), BindingAnnotation::Unannotated, hygiene);
+ let args_pat = self.alloc_pat_desugared(Pat::Bind { id: args_binding, subpat: None });
+ self.add_definition_to_binding(args_binding, args_pat);
+ let let_stmt2 = Statement::Let {
+ pat: args_pat,
+ type_ref: None,
+ initializer: Some(array),
+ else_branch: None,
+ };
+ (vec![let_stmt1, let_stmt2], self.alloc_expr_desugared(Expr::Path(args_name.into())))
+ };
+
+ // Generate:
+ // &args
+ let args = self.alloc_expr_desugared(Expr::Ref {
+ expr: args,
+ rawness: Rawness::Ref,
+ mutability: Mutability::Shared,
+ });
+
+ let call_block = {
+ // Generate:
+ // unsafe {
+ // <core::fmt::Arguments>::new_v1_formatted(
+ // lit_pieces,
+ // args,
+ // format_options,
+ // )
+ // }
+
+ let new_v1_formatted = LangItem::FormatArguments.ty_rel_path(
+ self.db,
+ self.module.krate(),
+ Name::new_symbol_root(sym::new_v1_formatted),
+ );
+ let new_v1_formatted =
+ self.alloc_expr_desugared(new_v1_formatted.map_or(Expr::Missing, Expr::Path));
+ let args = [lit_pieces, args, format_options];
+ let call = self
+ .alloc_expr_desugared(Expr::Call { callee: new_v1_formatted, args: args.into() });
+
+ Expr::Unsafe { id: None, statements: Box::default(), tail: Some(call) }
+ };
+
+ if !let_stmts.is_empty() {
+ // Generate:
+ // {
+ // super let …
+ // super let …
+ // <core::fmt::Arguments>::new_…(…)
+ // }
+ let call = self.alloc_expr_desugared(call_block);
+ self.alloc_expr(
+ Expr::Block {
+ id: None,
+ statements: let_stmts.into(),
+ tail: Some(call),
+ label: None,
+ },
+ syntax_ptr,
+ )
+ } else {
+ self.alloc_expr(call_block, syntax_ptr)
+ }
}
/// Generate a hir expression for a format_args placeholder specification.
diff --git a/crates/hir-def/src/expr_store/tests/body.rs b/crates/hir-def/src/expr_store/tests/body.rs
index 29e249b..927e280 100644
--- a/crates/hir-def/src/expr_store/tests/body.rs
+++ b/crates/hir-def/src/expr_store/tests/body.rs
@@ -178,14 +178,14 @@
}
#[test]
-fn desugar_builtin_format_args() {
+fn desugar_builtin_format_args_before_1_89_0() {
let (db, body, def) = lower(
r#"
-//- minicore: fmt
+//- minicore: fmt_before_1_89_0
fn main() {
let are = "are";
let count = 10;
- builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", last = "!");
+ builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", orphan = (), last = "!");
}
"#,
);
@@ -249,8 +249,11 @@
builtin#lang(Count::Implied),
),
],
- unsafe {
- builtin#lang(UnsafeArg::new)()
+ {
+ ();
+ unsafe {
+ builtin#lang(UnsafeArg::new)()
+ }
},
);
}"#]]
@@ -258,6 +261,89 @@
}
#[test]
+fn desugar_builtin_format_args() {
+ let (db, body, def) = lower(
+ r#"
+//- minicore: fmt
+fn main() {
+ let are = "are";
+ let count = 10;
+ builtin#format_args("\u{1b}hello {count:02} {} friends, we {are:?} {0}{last}", "fancy", orphan = (), last = "!");
+}
+"#,
+ );
+
+ expect![[r#"
+ fn main() {
+ let are = "are";
+ let count = 10;
+ {
+ let args = (&"fancy", &(), &"!", &count, &are, );
+ let args = [
+ builtin#lang(Argument::new_display)(
+ args.3,
+ ), builtin#lang(Argument::new_display)(
+ args.0,
+ ), builtin#lang(Argument::new_debug)(
+ args.4,
+ ), builtin#lang(Argument::new_display)(
+ args.2,
+ ),
+ ];
+ unsafe {
+ builtin#lang(Arguments::new_v1_formatted)(
+ &[
+ "\u{1b}hello ", " ", " friends, we ", " ", "",
+ ],
+ &args,
+ &[
+ builtin#lang(Placeholder::new)(
+ 0usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 8u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Is)(
+ 2,
+ ),
+ ), builtin#lang(Placeholder::new)(
+ 1usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ), builtin#lang(Placeholder::new)(
+ 2usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ), builtin#lang(Placeholder::new)(
+ 1usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ), builtin#lang(Placeholder::new)(
+ 3usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ),
+ ],
+ )
+ }
+ };
+ }"#]]
+ .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
+
+#[test]
fn test_macro_hygiene() {
let (db, body, def) = lower(
r##"
@@ -295,29 +381,31 @@
expect![[r#"
fn main() {
_ = ra_test_fixture::error::SsrError::new(
- builtin#lang(Arguments::new_v1_formatted)(
- &[
- "Failed to resolve path `", "`",
- ],
- &[
+ {
+ let args = [
builtin#lang(Argument::new_display)(
&node.text(),
),
- ],
- &[
- builtin#lang(Placeholder::new)(
- 0usize,
- ' ',
- builtin#lang(Alignment::Unknown),
- 0u32,
- builtin#lang(Count::Implied),
- builtin#lang(Count::Implied),
- ),
- ],
+ ];
unsafe {
- builtin#lang(UnsafeArg::new)()
- },
- ),
+ builtin#lang(Arguments::new_v1_formatted)(
+ &[
+ "Failed to resolve path `", "`",
+ ],
+ &args,
+ &[
+ builtin#lang(Placeholder::new)(
+ 0usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ),
+ ],
+ )
+ }
+ },
);
}"#]]
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
@@ -327,7 +415,7 @@
fn regression_10300() {
let (db, body, def) = lower(
r#"
-//- minicore: concat, panic
+//- minicore: concat, panic, fmt_before_1_89_0
mod private {
pub use core::concat;
}
diff --git a/crates/hir-def/src/expr_store/tests/body/block.rs b/crates/hir-def/src/expr_store/tests/body/block.rs
index bb0b70b..c770737 100644
--- a/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -189,8 +189,8 @@
}
"#,
expect![[r#"
- BlockIdLt { [salsa id]: Id(3c01) } in BlockRelativeModuleId { block: Some(BlockIdLt { [salsa id]: Id(3c00) }), local_id: Idx::<ModuleData>(1) }
- BlockIdLt { [salsa id]: Id(3c00) } in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
+ BlockId(3c01) in BlockRelativeModuleId { block: Some(BlockId(3c00)), local_id: Idx::<ModuleData>(1) }
+ BlockId(3c00) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
crate scope
"#]],
);
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index a542214..a562f2d 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -384,26 +384,7 @@
/// The containing module.
pub module: ModuleId,
}
-#[salsa_macros::tracked(debug)]
-#[derive(PartialOrd, Ord)]
-pub struct BlockIdLt<'db> {
- pub loc: BlockLoc,
-}
-pub type BlockId = BlockIdLt<'static>;
-impl hir_expand::Intern for BlockLoc {
- type Database = dyn DefDatabase;
- type ID = BlockId;
- fn intern(self, db: &Self::Database) -> Self::ID {
- unsafe { std::mem::transmute::<BlockIdLt<'_>, BlockId>(BlockIdLt::new(db, self)) }
- }
-}
-impl hir_expand::Lookup for BlockId {
- type Database = dyn DefDatabase;
- type Data = BlockLoc;
- fn lookup(&self, db: &Self::Database) -> Self::Data {
- self.loc(db)
- }
-}
+impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block);
/// A `ModuleId` that is always a crate's root module.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index d1432ca..b756bb8 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -1230,11 +1230,15 @@
self.select_from_expr(*expr);
}
}
+ Expr::Let { pat: _, expr } => {
+ self.walk_expr(*expr);
+ let place = self.place_of_expr(*expr);
+ self.ref_expr(*expr, place);
+ }
Expr::UnaryOp { expr, op: _ }
| Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
| Expr::Await { expr }
| Expr::Loop { body: expr, label: _ }
- | Expr::Let { pat: _, expr }
| Expr::Box { expr }
| Expr::Cast { expr, type_ref: _ } => {
self.consume_expr(*expr);
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 88d21be..7fb9817 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -444,3 +444,22 @@
expect!["99..165;49..54;120..121,133..134 ByRef(Mut { kind: Default }) a &'? mut A"],
);
}
+
+#[test]
+fn let_binding_is_a_ref_capture() {
+ check_closure_captures(
+ r#"
+//- minicore:copy
+struct S;
+fn main() {
+ let mut s = S;
+ let s_ref = &mut s;
+ let closure = || {
+ if let ref cb = s_ref {
+ }
+ };
+}
+"#,
+ expect!["83..135;49..54;112..117 ByRef(Shared) s_ref &'? &'? mut S"],
+ );
+}
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index b1cf30b..0bce69a 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -242,9 +242,9 @@
resolve_field(db, variant_def, name, ns)
}
-fn resolve_assoc_item(
- db: &dyn HirDatabase,
- ty: &Type,
+fn resolve_assoc_item<'db>(
+ db: &'db dyn HirDatabase,
+ ty: &Type<'db>,
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
@@ -256,10 +256,10 @@
})
}
-fn resolve_impl_trait_item(
- db: &dyn HirDatabase,
+fn resolve_impl_trait_item<'db>(
+ db: &'db dyn HirDatabase,
resolver: Resolver<'_>,
- ty: &Type,
+ ty: &Type<'db>,
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index f7b140e..074bde9 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -36,15 +36,15 @@
};
macro_rules! diagnostics {
- ($($diag:ident,)*) => {
+ ($($diag:ident $(<$lt:lifetime>)?,)*) => {
#[derive(Debug)]
- pub enum AnyDiagnostic {$(
- $diag(Box<$diag>),
+ pub enum AnyDiagnostic<'db> {$(
+ $diag(Box<$diag $(<$lt>)?>),
)*}
$(
- impl From<$diag> for AnyDiagnostic {
- fn from(d: $diag) -> AnyDiagnostic {
+ impl<'db> From<$diag $(<$lt>)?> for AnyDiagnostic<'db> {
+ fn from(d: $diag $(<$lt>)?) -> AnyDiagnostic<'db> {
AnyDiagnostic::$diag(Box::new(d))
}
}
@@ -69,12 +69,12 @@
diagnostics![
AwaitOutsideOfAsync,
BreakOutsideOfLoop,
- CastToUnsized,
- ExpectedFunction,
+ CastToUnsized<'db>,
+ ExpectedFunction<'db>,
InactiveCode,
IncoherentImpl,
IncorrectCase,
- InvalidCast,
+ InvalidCast<'db>,
InvalidDeriveTarget,
MacroDefError,
MacroError,
@@ -85,7 +85,7 @@
MissingFields,
MissingMatchArms,
MissingUnsafe,
- MovedOutOfRef,
+ MovedOutOfRef<'db>,
NeedMut,
NonExhaustiveLet,
NoSuchField,
@@ -98,17 +98,17 @@
TraitImplMissingAssocItems,
TraitImplOrphan,
TraitImplRedundantAssocItems,
- TypedHole,
- TypeMismatch,
+ TypedHole<'db>,
+ TypeMismatch<'db>,
UndeclaredLabel,
UnimplementedBuiltinMacro,
UnreachableLabel,
UnresolvedAssocItem,
UnresolvedExternCrate,
- UnresolvedField,
+ UnresolvedField<'db>,
UnresolvedImport,
UnresolvedMacroCall,
- UnresolvedMethodCall,
+ UnresolvedMethodCall<'db>,
UnresolvedModule,
UnresolvedIdent,
UnusedMut,
@@ -130,9 +130,9 @@
}
#[derive(Debug)]
-pub struct TypedHole {
+pub struct TypedHole<'db> {
pub expr: InFile<ExprOrPatPtr>,
- pub expected: Type,
+ pub expected: Type<'db>,
}
#[derive(Debug)]
@@ -242,25 +242,25 @@
}
#[derive(Debug)]
-pub struct ExpectedFunction {
+pub struct ExpectedFunction<'db> {
pub call: InFile<ExprOrPatPtr>,
- pub found: Type,
+ pub found: Type<'db>,
}
#[derive(Debug)]
-pub struct UnresolvedField {
+pub struct UnresolvedField<'db> {
pub expr: InFile<ExprOrPatPtr>,
- pub receiver: Type,
+ pub receiver: Type<'db>,
pub name: Name,
pub method_with_same_name_exists: bool,
}
#[derive(Debug)]
-pub struct UnresolvedMethodCall {
+pub struct UnresolvedMethodCall<'db> {
pub expr: InFile<ExprOrPatPtr>,
- pub receiver: Type,
+ pub receiver: Type<'db>,
pub name: Name,
- pub field_with_same_name: Option<Type>,
+ pub field_with_same_name: Option<Type<'db>>,
pub assoc_func_with_same_name: Option<Function>,
}
@@ -329,10 +329,10 @@
}
#[derive(Debug)]
-pub struct TypeMismatch {
+pub struct TypeMismatch<'db> {
pub expr_or_pat: InFile<ExprOrPatPtr>,
- pub expected: Type,
- pub actual: Type,
+ pub expected: Type<'db>,
+ pub actual: Type<'db>,
}
#[derive(Debug)]
@@ -352,8 +352,8 @@
}
#[derive(Debug)]
-pub struct MovedOutOfRef {
- pub ty: Type,
+pub struct MovedOutOfRef<'db> {
+ pub ty: Type<'db>,
pub span: InFile<SyntaxNodePtr>,
}
@@ -403,17 +403,17 @@
}
#[derive(Debug)]
-pub struct CastToUnsized {
+pub struct CastToUnsized<'db> {
pub expr: InFile<ExprOrPatPtr>,
- pub cast_ty: Type,
+ pub cast_ty: Type<'db>,
}
#[derive(Debug)]
-pub struct InvalidCast {
+pub struct InvalidCast<'db> {
pub expr: InFile<ExprOrPatPtr>,
pub error: CastError,
- pub expr_ty: Type,
- pub cast_ty: Type,
+ pub expr_ty: Type<'db>,
+ pub cast_ty: Type<'db>,
}
#[derive(Debug)]
@@ -482,12 +482,12 @@
pub expected_kind: GenericArgKind,
}
-impl AnyDiagnostic {
+impl<'db> AnyDiagnostic<'db> {
pub(crate) fn body_validation_diagnostic(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
diagnostic: BodyValidationDiagnostic,
source_map: &hir_def::expr_store::BodySourceMap,
- ) -> Option<AnyDiagnostic> {
+ ) -> Option<AnyDiagnostic<'db>> {
match diagnostic {
BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
let variant_data = variant.variant_data(db);
@@ -618,12 +618,12 @@
}
pub(crate) fn inference_diagnostic(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
def: DefWithBodyId,
d: &InferenceDiagnostic,
source_map: &hir_def::expr_store::BodySourceMap,
sig_map: &hir_def::expr_store::ExpressionStoreSourceMap,
- ) -> Option<AnyDiagnostic> {
+ ) -> Option<AnyDiagnostic<'db>> {
let expr_syntax = |expr| {
source_map
.expr_syntax(expr)
@@ -819,7 +819,7 @@
fn path_diagnostic(
diag: &PathLoweringDiagnostic,
path: InFile<ast::Path>,
- ) -> Option<AnyDiagnostic> {
+ ) -> Option<AnyDiagnostic<'db>> {
Some(match *diag {
PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => {
let segment = hir_segment_to_ast_segment(&path.value, segment)?;
@@ -912,8 +912,8 @@
pub(crate) fn ty_diagnostic(
diag: &TyLoweringDiagnostic,
source_map: &ExpressionStoreSourceMap,
- db: &dyn HirDatabase,
- ) -> Option<AnyDiagnostic> {
+ db: &'db dyn HirDatabase,
+ ) -> Option<AnyDiagnostic<'db>> {
let Ok(source) = source_map.type_syntax(diag.source) else {
stdx::never!("error on synthetic type syntax");
return None;
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index 124ab8e..112558b 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -431,7 +431,7 @@
}
}
-impl HirDisplay for Type {
+impl HirDisplay for Type<'_> {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
self.ty.hir_fmt(f)
}
@@ -743,7 +743,7 @@
}
}
-impl HirDisplay for TraitRef {
+impl HirDisplay for TraitRef<'_> {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
self.trait_ref.hir_fmt(f)
}
diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs
index fe7429c..4767d47 100644
--- a/crates/hir/src/has_source.rs
+++ b/crates/hir/src/has_source.rs
@@ -225,7 +225,7 @@
}
}
-impl HasSource for Param {
+impl HasSource for Param<'_> {
type Ast = Either<ast::SelfParam, ast::Param>;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index adae335..3b39707 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -84,7 +84,7 @@
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use span::{AstIdNode, Edition, FileId};
-use stdx::{format_to, impl_from, never};
+use stdx::{format_to, impl_from, never, variance::PhantomCovariantLifetime};
use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
ast::{self, HasAttrs as _, HasName, HasVisibility as _},
@@ -400,7 +400,11 @@
Some(name)
}
- pub fn diagnostics(self, db: &dyn HirDatabase, style_lints: bool) -> Vec<AnyDiagnostic> {
+ pub fn diagnostics<'db>(
+ self,
+ db: &'db dyn HirDatabase,
+ style_lints: bool,
+ ) -> Vec<AnyDiagnostic<'db>> {
let id = match self {
ModuleDef::Adt(it) => match it {
Adt::Struct(it) => it.id.into(),
@@ -612,10 +616,10 @@
}
/// Fills `acc` with the module's diagnostics.
- pub fn diagnostics(
+ pub fn diagnostics<'db>(
self,
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
style_lints: bool,
) {
let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered();
@@ -970,10 +974,10 @@
}
}
-fn macro_call_diagnostics(
- db: &dyn HirDatabase,
+fn macro_call_diagnostics<'db>(
+ db: &'db dyn HirDatabase,
macro_call_id: MacroCallId,
- acc: &mut Vec<AnyDiagnostic>,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
) {
let Some(e) = db.parse_macro_expansion_error(macro_call_id) else {
return;
@@ -1010,7 +1014,11 @@
}
}
-fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) {
+fn emit_macro_def_diagnostics<'db>(
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
+ m: Macro,
+) {
let id = db.macro_def(m.id);
if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) {
if let Some(e) = expander.mac.err() {
@@ -1030,18 +1038,18 @@
}
}
-fn emit_def_diagnostic(
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+fn emit_def_diagnostic<'db>(
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
diag: &DefDiagnostic,
edition: Edition,
) {
emit_def_diagnostic_(db, acc, &diag.kind, edition)
}
-fn emit_def_diagnostic_(
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+fn emit_def_diagnostic_<'db>(
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
diag: &DefDiagnosticKind,
edition: Edition,
) {
@@ -1251,14 +1259,18 @@
Name::new_tuple_field(self.index as usize)
}
- pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
let ty = db.infer(self.owner).tuple_field_access_types[&self.tuple]
.as_slice(Interner)
.get(self.index as usize)
.and_then(|arg| arg.ty(Interner))
.cloned()
.unwrap_or_else(|| TyKind::Error.intern(Interner));
- Type { env: db.trait_environment_for_body(self.owner), ty }
+ Type {
+ env: db.trait_environment_for_body(self.owner),
+ ty,
+ _pd: PhantomCovariantLifetime::new(),
+ }
}
}
@@ -1309,7 +1321,7 @@
/// Returns the type as in the signature of the struct (i.e., with
/// placeholder types for type parameters). Only use this in the context of
/// the field definition.
- pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
let var_id = self.parent.into();
let generic_def_id: GenericDefId = match self.parent {
VariantDef::Struct(it) => it.id.into(),
@@ -1322,7 +1334,11 @@
}
// FIXME: Find better API to also handle const generics
- pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
+ pub fn ty_with_args<'db>(
+ &self,
+ db: &'db dyn HirDatabase,
+ generics: impl Iterator<Item = Type<'db>>,
+ ) -> Type<'db> {
let var_id = self.parent.into();
let def_id: AdtId = match self.parent {
VariantDef::Struct(it) => it.id.into(),
@@ -1394,15 +1410,15 @@
.collect()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def(db, self.id)
}
- pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def_placeholders(db, self.id)
}
- pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
@@ -1449,15 +1465,15 @@
Module { id: self.id.lookup(db).container }
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def(db, self.id)
}
- pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def_placeholders(db, self.id)
}
- pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
@@ -1515,16 +1531,16 @@
db.enum_signature(self.id).repr
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
Type::from_def(db, self.id)
}
- pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty_placeholders<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
Type::from_def_placeholders(db, self.id)
}
/// The type of the enum variant bodies.
- pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn variant_body_ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
Type::new_for_crate(
self.id.lookup(db).container.krate(),
TyBuilder::builtin(match db.enum_signature(self.id).variant_body_type() {
@@ -1599,7 +1615,7 @@
self.id.lookup(db).parent.into()
}
- pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
@@ -1701,14 +1717,18 @@
/// Turns this ADT into a type. Any type parameters of the ADT will be
/// turned into unknown types, which is good for e.g. finding the most
/// general set of completions, but will not look very nice when printed.
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
let id = AdtId::from(self);
Type::from_def(db, id)
}
/// Turns this ADT into a type with the given type parameters. This isn't
/// the greatest API, FIXME find a better one.
- pub fn ty_with_args(self, db: &dyn HirDatabase, args: impl Iterator<Item = Type>) -> Type {
+ pub fn ty_with_args<'db>(
+ self,
+ db: &'db dyn HirDatabase,
+ args: impl Iterator<Item = Type<'db>>,
+ ) -> Type<'db> {
let id = AdtId::from(self);
let mut it = args.map(|t| t.ty);
let ty = TyBuilder::def_ty(db, id.into(), None)
@@ -1841,7 +1861,7 @@
}
/// Returns the type this def's body has to evaluate to.
- pub fn body_type(self, db: &dyn HirDatabase) -> Type {
+ pub fn body_type(self, db: &dyn HirDatabase) -> Type<'_> {
match self {
DefWithBody::Function(it) => it.ret_type(db),
DefWithBody::Static(it) => it.ty(db),
@@ -1874,10 +1894,10 @@
}
}
- pub fn diagnostics(
+ pub fn diagnostics<'db>(
self,
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
style_lints: bool,
) {
let krate = self.module(db).id.krate();
@@ -2107,7 +2127,7 @@
fn expr_store_diagnostics(
db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+ acc: &mut Vec<AnyDiagnostic<'_>>,
source_map: &ExpressionStoreSourceMap,
) {
for diag in source_map.diagnostics() {
@@ -2172,11 +2192,11 @@
db.function_signature(self.id).name.clone()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
- pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type {
+ pub fn fn_ptr_type(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.resolver(db);
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
@@ -2185,7 +2205,7 @@
}
/// Get this function's return type
- pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
+ pub fn ret_type(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.resolver(db);
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
@@ -2194,11 +2214,11 @@
}
// FIXME: Find better API to also handle const generics
- pub fn ret_type_with_args(
+ pub fn ret_type_with_args<'db>(
self,
- db: &dyn HirDatabase,
- generics: impl Iterator<Item = Type>,
- ) -> Type {
+ db: &'db dyn HirDatabase,
+ generics: impl Iterator<Item = Type<'db>>,
+ ) -> Type<'db> {
let resolver = self.id.resolver(db);
let parent_id: Option<GenericDefId> = match self.id.lookup(db).container {
ItemContainerId::ImplId(it) => Some(it.into()),
@@ -2223,7 +2243,7 @@
Type::new_with_resolver_inner(db, &resolver, ty)
}
- pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn async_ret_type<'db>(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
if !self.is_async(db) {
return None;
}
@@ -2247,7 +2267,7 @@
self.has_self_param(db).then_some(SelfParam { func: self.id })
}
- pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
+ pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param<'_>> {
let environment = db.trait_environment(self.id.into());
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
@@ -2256,7 +2276,11 @@
.iter()
.enumerate()
.map(|(idx, ty)| {
- let ty = Type { env: environment.clone(), ty: ty.clone() };
+ let ty = Type {
+ env: environment.clone(),
+ ty: ty.clone(),
+ _pd: PhantomCovariantLifetime::new(),
+ };
Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
})
.collect()
@@ -2266,12 +2290,12 @@
db.function_signature(self.id).params.len()
}
- pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
+ pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param<'_>>> {
self.self_param(db)?;
Some(self.params_without_self(db))
}
- pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param> {
+ pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param<'_>> {
let environment = db.trait_environment(self.id.into());
let substs = TyBuilder::placeholder_subst(db, self.id);
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
@@ -2282,18 +2306,22 @@
.enumerate()
.skip(skip)
.map(|(idx, ty)| {
- let ty = Type { env: environment.clone(), ty: ty.clone() };
+ let ty = Type {
+ env: environment.clone(),
+ ty: ty.clone(),
+ _pd: PhantomCovariantLifetime::new(),
+ };
Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
})
.collect()
}
// FIXME: Find better API to also handle const generics
- pub fn params_without_self_with_args(
+ pub fn params_without_self_with_args<'db>(
self,
- db: &dyn HirDatabase,
- generics: impl Iterator<Item = Type>,
- ) -> Vec<Param> {
+ db: &'db dyn HirDatabase,
+ generics: impl Iterator<Item = Type<'db>>,
+ ) -> Vec<Param<'db>> {
let environment = db.trait_environment(self.id.into());
let parent_id: Option<GenericDefId> = match self.id.lookup(db).container {
ItemContainerId::ImplId(it) => Some(it.into()),
@@ -2328,7 +2356,11 @@
.enumerate()
.skip(skip)
.map(|(idx, ty)| {
- let ty = Type { env: environment.clone(), ty: ty.clone() };
+ let ty = Type {
+ env: environment.clone(),
+ ty: ty.clone(),
+ _pd: PhantomCovariantLifetime::new(),
+ };
Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx }
})
.collect()
@@ -2358,7 +2390,8 @@
return true;
}
- let Some(impl_traits) = self.ret_type(db).as_impl_traits(db) else { return false };
+ let ret_type = self.ret_type(db);
+ let Some(impl_traits) = ret_type.as_impl_traits(db) else { return false };
let Some(future_trait_id) = LangItem::Future.resolve_trait(db, self.ty(db).env.krate)
else {
return false;
@@ -2501,14 +2534,14 @@
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
-pub struct Param {
+pub struct Param<'db> {
func: Callee,
/// The index in parameter list, including self parameter.
idx: usize,
- ty: Type,
+ ty: Type<'db>,
}
-impl Param {
+impl<'db> Param<'db> {
pub fn parent_fn(&self) -> Option<Function> {
match self.func {
Callee::Def(CallableDefId::FunctionId(f)) => Some(f.into()),
@@ -2524,7 +2557,7 @@
self.idx
}
- pub fn ty(&self) -> &Type {
+ pub fn ty(&self) -> &Type<'db> {
&self.ty
}
@@ -2591,17 +2624,21 @@
Function::from(self.func)
}
- pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
let substs = TyBuilder::placeholder_subst(db, self.func);
let callable_sig =
db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
let environment = db.trait_environment(self.func.into());
let ty = callable_sig.params()[0].clone();
- Type { env: environment, ty }
+ Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
}
// FIXME: Find better API to also handle const generics
- pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
+ pub fn ty_with_args<'db>(
+ &self,
+ db: &'db dyn HirDatabase,
+ generics: impl Iterator<Item = Type<'db>>,
+ ) -> Type<'db> {
let parent_id: GenericDefId = match self.func.lookup(db).container {
ItemContainerId::ImplId(it) => it.into(),
ItemContainerId::TraitId(it) => it.into(),
@@ -2626,7 +2663,7 @@
db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
let environment = db.trait_environment(self.func.into());
let ty = callable_sig.params()[0].clone();
- Type { env: environment, ty }
+ Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
}
}
@@ -2714,7 +2751,7 @@
self.source(db)?.value.body()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
@@ -2791,7 +2828,7 @@
self.source(db)?.value.body()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_value_def(db, self.id)
}
@@ -2961,11 +2998,11 @@
Module { id: self.id.module(db) }
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def(db, self.id)
}
- pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type<'_> {
Type::from_def_placeholders(db, self.id)
}
@@ -3010,7 +3047,7 @@
BuiltinType { inner: hir_def::builtin_type::BuiltinType::Str }
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
let core = Crate::core(db).map(|core| core.id).unwrap_or_else(|| db.all_crates()[0]);
Type::new_for_crate(core, TyBuilder::builtin(self.inner))
}
@@ -3472,7 +3509,7 @@
}
}
- pub fn implementing_ty(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn implementing_ty(self, db: &dyn HirDatabase) -> Option<Type<'_>> {
match self.container(db) {
AssocItemContainer::Impl(i) => Some(i.self_ty(db)),
_ => None,
@@ -3500,10 +3537,10 @@
}
}
- pub fn diagnostics(
+ pub fn diagnostics<'db>(
self,
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
style_lints: bool,
) {
match self {
@@ -3625,7 +3662,7 @@
}
}
- pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ pub fn diagnostics<'db>(self, db: &'db dyn HirDatabase, acc: &mut Vec<AnyDiagnostic<'db>>) {
let def = self.id();
let generics = db.generic_params(def);
@@ -3690,18 +3727,19 @@
// We cannot call this `Substitution` unfortunately...
#[derive(Debug)]
-pub struct GenericSubstitution {
+pub struct GenericSubstitution<'db> {
def: GenericDefId,
subst: Substitution,
env: Arc<TraitEnvironment>,
+ _pd: PhantomCovariantLifetime<'db>,
}
-impl GenericSubstitution {
+impl<'db> GenericSubstitution<'db> {
fn new(def: GenericDefId, subst: Substitution, env: Arc<TraitEnvironment>) -> Self {
- Self { def, subst, env }
+ Self { def, subst, env, _pd: PhantomCovariantLifetime::new() }
}
- pub fn types(&self, db: &dyn HirDatabase) -> Vec<(Symbol, Type)> {
+ pub fn types(&self, db: &'db dyn HirDatabase) -> Vec<(Symbol, Type<'db>)> {
let container = match self.def {
GenericDefId::ConstId(id) => Some(id.lookup(db).container),
GenericDefId::FunctionId(id) => Some(id.lookup(db).container),
@@ -3744,7 +3782,10 @@
container_params
.chain(self_params)
.filter_map(|(ty, name)| {
- Some((name?.symbol().clone(), Type { ty, env: self.env.clone() }))
+ Some((
+ name?.symbol().clone(),
+ Type { ty, env: self.env.clone(), _pd: PhantomCovariantLifetime::new() },
+ ))
})
.collect()
}
@@ -3847,7 +3888,7 @@
self.parent(db).module(db)
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
let def = self.parent;
let infer = db.infer(def);
let ty = infer[self.binding_id].clone();
@@ -4109,6 +4150,10 @@
self.merge().name(db)
}
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent().into()
+ }
+
pub fn module(self, db: &dyn HirDatabase) -> Module {
self.id.parent().module(db).into()
}
@@ -4124,7 +4169,7 @@
}
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.parent().resolver(db);
let ty =
TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner);
@@ -4146,7 +4191,7 @@
.collect()
}
- pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn default(self, db: &dyn HirDatabase) -> Option<Type<'_>> {
let ty = generic_arg_from_param(db, self.id.into())?;
let resolver = self.id.parent().resolver(db);
match ty.data(Interner) {
@@ -4211,7 +4256,7 @@
self.id.parent().into()
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
Type::new(db, self.id.parent(), db.const_param_ty(self.id))
}
@@ -4268,7 +4313,7 @@
}
}
- pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
match self.split(db) {
Either::Left(it) => it.ty(db),
Either::Right(it) => it.ty(db),
@@ -4313,7 +4358,10 @@
module.id.def_map(db)[module.id.local_id].scope.impls().map(Into::into).collect()
}
- pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
+ pub fn all_for_type<'db>(
+ db: &'db dyn HirDatabase,
+ Type { ty, env, _pd: _ }: Type<'db>,
+ ) -> Vec<Impl> {
let def_crates = match method_resolution::def_crates(db, &ty, env.krate) {
Some(def_crates) => def_crates,
None => return Vec::new(),
@@ -4398,14 +4446,14 @@
Some(Trait { id })
}
- pub fn trait_ref(self, db: &dyn HirDatabase) -> Option<TraitRef> {
+ pub fn trait_ref(self, db: &dyn HirDatabase) -> Option<TraitRef<'_>> {
let substs = TyBuilder::placeholder_subst(db, self.id);
let trait_ref = db.impl_trait(self.id)?.substitute(Interner, &substs);
let resolver = self.id.resolver(db);
Some(TraitRef::new_with_resolver(db, &resolver, trait_ref))
}
- pub fn self_ty(self, db: &dyn HirDatabase) -> Type {
+ pub fn self_ty(self, db: &dyn HirDatabase) -> Type<'_> {
let resolver = self.id.resolver(db);
let substs = TyBuilder::placeholder_subst(db, self.id);
let ty = db.impl_self_ty(self.id).substitute(Interner, &substs);
@@ -4467,21 +4515,22 @@
}
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct TraitRef {
+pub struct TraitRef<'db> {
env: Arc<TraitEnvironment>,
trait_ref: hir_ty::TraitRef,
+ _pd: PhantomCovariantLifetime<'db>,
}
-impl TraitRef {
+impl<'db> TraitRef<'db> {
pub(crate) fn new_with_resolver(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
resolver: &Resolver<'_>,
trait_ref: hir_ty::TraitRef,
- ) -> TraitRef {
+ ) -> Self {
let env = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
- TraitRef { env, trait_ref }
+ TraitRef { env, trait_ref, _pd: PhantomCovariantLifetime::new() }
}
pub fn trait_(&self) -> Trait {
@@ -4489,21 +4538,21 @@
Trait { id }
}
- pub fn self_ty(&self) -> Type {
+ pub fn self_ty(&self) -> Type<'_> {
let ty = self.trait_ref.self_type_parameter(Interner);
- Type { env: self.env.clone(), ty }
+ Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() }
}
/// Returns `idx`-th argument of this trait reference if it is a type argument. Note that the
/// first argument is the `Self` type.
- pub fn get_type_argument(&self, idx: usize) -> Option<Type> {
+ pub fn get_type_argument(&self, idx: usize) -> Option<Type<'db>> {
self.trait_ref
.substitution
.as_slice(Interner)
.get(idx)
.and_then(|arg| arg.ty(Interner))
.cloned()
- .map(|ty| Type { env: self.env.clone(), ty })
+ .map(|ty| Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() })
}
}
@@ -4551,7 +4600,7 @@
.collect()
}
- pub fn capture_types(&self, db: &dyn HirDatabase) -> Vec<Type> {
+ pub fn capture_types<'db>(&self, db: &'db dyn HirDatabase) -> Vec<Type<'db>> {
let owner = db.lookup_intern_closure((self.id).into()).0;
let infer = &db.infer(owner);
let (captures, _) = infer.closure_info(&self.id);
@@ -4560,6 +4609,7 @@
.map(|capture| Type {
env: db.trait_environment_for_body(owner),
ty: capture.ty(&self.subst),
+ _pd: PhantomCovariantLifetime::new(),
})
.collect()
}
@@ -4691,40 +4741,45 @@
}
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct Type {
+pub struct Type<'db> {
env: Arc<TraitEnvironment>,
ty: Ty,
+ _pd: PhantomCovariantLifetime<'db>,
}
-impl Type {
- pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver<'_>, ty: Ty) -> Type {
+impl<'db> Type<'db> {
+ pub(crate) fn new_with_resolver(
+ db: &'db dyn HirDatabase,
+ resolver: &Resolver<'_>,
+ ty: Ty,
+ ) -> Self {
Type::new_with_resolver_inner(db, resolver, ty)
}
pub(crate) fn new_with_resolver_inner(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
resolver: &Resolver<'_>,
ty: Ty,
- ) -> Type {
+ ) -> Self {
let environment = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
- Type { env: environment, ty }
+ Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
}
- pub(crate) fn new_for_crate(krate: base_db::Crate, ty: Ty) -> Type {
- Type { env: TraitEnvironment::empty(krate), ty }
+ pub(crate) fn new_for_crate(krate: base_db::Crate, ty: Ty) -> Self {
+ Type { env: TraitEnvironment::empty(krate), ty, _pd: PhantomCovariantLifetime::new() }
}
- fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
+ fn new(db: &'db dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Self {
let resolver = lexical_env.resolver(db);
let environment = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
- Type { env: environment, ty }
+ Type { env: environment, ty, _pd: PhantomCovariantLifetime::new() }
}
- fn from_def(db: &dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Type {
+ fn from_def(db: &'db dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Self {
let ty = db.ty(def.into());
let substs = TyBuilder::unknown_subst(
db,
@@ -4737,7 +4792,10 @@
Type::new(db, def, ty.substitute(Interner, &substs))
}
- fn from_def_placeholders(db: &dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Type {
+ fn from_def_placeholders(
+ db: &'db dyn HirDatabase,
+ def: impl Into<TyDefId> + HasResolver,
+ ) -> Self {
let ty = db.ty(def.into());
let substs = TyBuilder::placeholder_subst(
db,
@@ -4750,7 +4808,10 @@
Type::new(db, def, ty.substitute(Interner, &substs))
}
- fn from_value_def(db: &dyn HirDatabase, def: impl Into<ValueTyDefId> + HasResolver) -> Type {
+ fn from_value_def(
+ db: &'db dyn HirDatabase,
+ def: impl Into<ValueTyDefId> + HasResolver,
+ ) -> Self {
let Some(ty) = db.value_ty(def.into()) else {
return Type::new(db, def, TyKind::Error.intern(Interner));
};
@@ -4770,13 +4831,17 @@
Type::new(db, def, ty.substitute(Interner, &substs))
}
- pub fn new_slice(ty: Type) -> Type {
- Type { env: ty.env, ty: TyBuilder::slice(ty.ty) }
+ pub fn new_slice(ty: Self) -> Self {
+ Type { env: ty.env, ty: TyBuilder::slice(ty.ty), _pd: PhantomCovariantLifetime::new() }
}
- pub fn new_tuple(krate: base_db::Crate, tys: &[Type]) -> Type {
+ pub fn new_tuple(krate: base_db::Crate, tys: &[Self]) -> Self {
let tys = tys.iter().map(|it| it.ty.clone());
- Type { env: TraitEnvironment::empty(krate), ty: TyBuilder::tuple_with(tys) }
+ Type {
+ env: TraitEnvironment::empty(krate),
+ ty: TyBuilder::tuple_with(tys),
+ _pd: PhantomCovariantLifetime::new(),
+ }
}
pub fn is_unit(&self) -> bool {
@@ -4803,7 +4868,7 @@
matches!(self.ty.kind(Interner), TyKind::Ref(..))
}
- pub fn contains_reference(&self, db: &dyn HirDatabase) -> bool {
+ pub fn contains_reference(&self, db: &'db dyn HirDatabase) -> bool {
return go(db, self.env.krate, &self.ty);
fn go(db: &dyn HirDatabase, krate: base_db::Crate, ty: &Ty) -> bool {
@@ -4847,13 +4912,13 @@
}
}
- pub fn as_reference(&self) -> Option<(Type, Mutability)> {
+ pub fn as_reference(&self) -> Option<(Type<'db>, Mutability)> {
let (ty, _lt, m) = self.ty.as_reference()?;
let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut));
Some((self.derived(ty.clone()), m))
}
- pub fn add_reference(&self, mutability: Mutability) -> Type {
+ pub fn add_reference(&self, mutability: Mutability) -> Self {
let ty_mutability = match mutability {
Mutability::Shared => hir_ty::Mutability::Not,
Mutability::Mut => hir_ty::Mutability::Mut,
@@ -4889,25 +4954,25 @@
matches!(self.ty.kind(Interner), TyKind::Tuple(..))
}
- pub fn remove_ref(&self) -> Option<Type> {
+ pub fn remove_ref(&self) -> Option<Type<'db>> {
match &self.ty.kind(Interner) {
TyKind::Ref(.., ty) => Some(self.derived(ty.clone())),
_ => None,
}
}
- pub fn as_slice(&self) -> Option<Type> {
+ pub fn as_slice(&self) -> Option<Type<'db>> {
match &self.ty.kind(Interner) {
TyKind::Slice(ty) => Some(self.derived(ty.clone())),
_ => None,
}
}
- pub fn strip_references(&self) -> Type {
+ pub fn strip_references(&self) -> Self {
self.derived(self.ty.strip_references().clone())
}
- pub fn strip_reference(&self) -> Type {
+ pub fn strip_reference(&self) -> Self {
self.derived(self.ty.strip_reference().clone())
}
@@ -4918,7 +4983,7 @@
/// Checks that particular type `ty` implements `std::future::IntoFuture` or
/// `std::future::Future` and returns the `Output` associated type.
/// This function is used in `.await` syntax completion.
- pub fn into_future_output(&self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn into_future_output(&self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
let trait_ = LangItem::IntoFutureIntoFuture
.resolve_function(db, self.env.krate)
.and_then(|into_future_fn| {
@@ -4940,13 +5005,13 @@
}
/// This does **not** resolve `IntoFuture`, only `Future`.
- pub fn future_output(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn future_output(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
let future_output = LangItem::FutureOutput.resolve_type_alias(db, self.env.krate)?;
self.normalize_trait_assoc_type(db, &[], future_output.into())
}
/// This does **not** resolve `IntoIterator`, only `Iterator`.
- pub fn iterator_item(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn iterator_item(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
let iterator_trait = LangItem::Iterator.resolve_trait(db, self.env.krate)?;
let iterator_item = db
.trait_items(iterator_trait)
@@ -4954,7 +5019,7 @@
self.normalize_trait_assoc_type(db, &[], iterator_item.into())
}
- pub fn impls_iterator(self, db: &dyn HirDatabase) -> bool {
+ pub fn impls_iterator(self, db: &'db dyn HirDatabase) -> bool {
let Some(iterator_trait) = LangItem::Iterator.resolve_trait(db, self.env.krate) else {
return false;
};
@@ -4964,7 +5029,7 @@
}
/// Resolves the projection `<Self as IntoIterator>::IntoIter` and returns the resulting type
- pub fn into_iterator_iter(self, db: &dyn HirDatabase) -> Option<Type> {
+ pub fn into_iterator_iter(self, db: &'db dyn HirDatabase) -> Option<Type<'db>> {
let trait_ = LangItem::IntoIterIntoIter.resolve_function(db, self.env.krate).and_then(
|into_iter_fn| {
let assoc_item = as_assoc_item(db, AssocItem::Function, into_iter_fn)?;
@@ -4989,7 +5054,7 @@
///
/// This function can be used to check if a particular type is callable, since FnOnce is a
/// supertrait of Fn and FnMut, so all callable types implements at least FnOnce.
- pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool {
+ pub fn impls_fnonce(&self, db: &'db dyn HirDatabase) -> bool {
let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.env.krate) {
Some(it) => it,
None => return false,
@@ -5001,7 +5066,7 @@
}
// FIXME: Find better API that also handles const generics
- pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
+ pub fn impls_trait(&self, db: &'db dyn HirDatabase, trait_: Trait, args: &[Type<'db>]) -> bool {
let mut it = args.iter().map(|t| t.ty.clone());
let trait_ref = TyBuilder::trait_ref(db, trait_.id)
.push(self.ty.clone())
@@ -5029,10 +5094,10 @@
pub fn normalize_trait_assoc_type(
&self,
- db: &dyn HirDatabase,
- args: &[Type],
+ db: &'db dyn HirDatabase,
+ args: &[Type<'db>],
alias: TypeAlias,
- ) -> Option<Type> {
+ ) -> Option<Type<'db>> {
let mut args = args.iter();
let trait_id = match alias.id.lookup(db).container {
ItemContainerId::TraitId(id) => id,
@@ -5056,14 +5121,14 @@
if ty.is_unknown() { None } else { Some(self.derived(ty)) }
}
- pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
+ pub fn is_copy(&self, db: &'db dyn HirDatabase) -> bool {
let Some(copy_trait) = LangItem::Copy.resolve_trait(db, self.env.krate) else {
return false;
};
self.impls_trait(db, copy_trait.into(), &[])
}
- pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
+ pub fn as_callable(&self, db: &'db dyn HirDatabase) -> Option<Callable<'db>> {
let callee = match self.ty.kind(Interner) {
TyKind::Closure(id, subst) => Callee::Closure(*id, subst.clone()),
TyKind::Function(_) => Callee::FnPtr,
@@ -5117,7 +5182,7 @@
matches!(self.ty.kind(Interner), TyKind::Array(..))
}
- pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
+ pub fn is_packed(&self, db: &'db dyn HirDatabase) -> bool {
let adt_id = match *self.ty.kind(Interner) {
TyKind::Adt(hir_ty::AdtId(adt_id), ..) => adt_id,
_ => return false,
@@ -5134,7 +5199,7 @@
matches!(self.ty.kind(Interner), TyKind::Raw(..))
}
- pub fn remove_raw_ptr(&self) -> Option<Type> {
+ pub fn remove_raw_ptr(&self) -> Option<Type<'db>> {
if let TyKind::Raw(_, ty) = self.ty.kind(Interner) {
Some(self.derived(ty.clone()))
} else {
@@ -5182,7 +5247,7 @@
}
}
- pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> {
+ pub fn fields(&self, db: &'db dyn HirDatabase) -> Vec<(Field, Self)> {
let (variant_id, substs) = match self.ty.kind(Interner) {
TyKind::Adt(hir_ty::AdtId(AdtId::StructId(s)), substs) => ((*s).into(), substs),
TyKind::Adt(hir_ty::AdtId(AdtId::UnionId(u)), substs) => ((*u).into(), substs),
@@ -5199,7 +5264,7 @@
.collect()
}
- pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec<Type> {
+ pub fn tuple_fields(&self, _db: &'db dyn HirDatabase) -> Vec<Self> {
if let TyKind::Tuple(_, substs) = &self.ty.kind(Interner) {
substs
.iter(Interner)
@@ -5210,7 +5275,7 @@
}
}
- pub fn as_array(&self, db: &dyn HirDatabase) -> Option<(Type, usize)> {
+ pub fn as_array(&self, db: &'db dyn HirDatabase) -> Option<(Self, usize)> {
if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
try_const_usize(db, len).map(|it| (self.derived(ty.clone()), it as usize))
} else {
@@ -5228,14 +5293,14 @@
/// Returns types that this type dereferences to (including this type itself). The returned
/// iterator won't yield the same type more than once even if the deref chain contains a cycle.
- pub fn autoderef<'db>(
+ pub fn autoderef(
&self,
db: &'db dyn HirDatabase,
- ) -> impl Iterator<Item = Type> + use<'_, 'db> {
+ ) -> impl Iterator<Item = Type<'db>> + use<'_, 'db> {
self.autoderef_(db).map(move |ty| self.derived(ty))
}
- fn autoderef_(&self, db: &dyn HirDatabase) -> impl Iterator<Item = Ty> {
+ fn autoderef_(&self, db: &'db dyn HirDatabase) -> impl Iterator<Item = Ty> {
// There should be no inference vars in types passed here
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
autoderef(db, self.env.clone(), canonical)
@@ -5245,7 +5310,7 @@
// lifetime problems, because we need to borrow temp `CrateImplDefs`.
pub fn iterate_assoc_items<T>(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
krate: Crate,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
@@ -5259,7 +5324,7 @@
fn iterate_assoc_items_dyn(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
krate: Crate,
callback: &mut dyn FnMut(AssocItemId) -> bool,
) {
@@ -5298,7 +5363,7 @@
/// - "String"
/// - "U"
/// ```
- pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
+ pub fn type_arguments(&self) -> impl Iterator<Item = Type<'db>> + '_ {
self.ty
.strip_references()
.as_adt()
@@ -5368,7 +5433,7 @@
pub fn iterate_method_candidates_with_traits<T>(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
@@ -5396,7 +5461,7 @@
pub fn iterate_method_candidates<T>(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
with_local_impls: Option<Module>,
name: Option<&Name>,
@@ -5418,7 +5483,7 @@
/// are considered inherent methods.
pub fn iterate_method_candidates_split_inherent(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
@@ -5486,7 +5551,7 @@
#[tracing::instrument(skip_all, fields(name = ?name))]
pub fn iterate_path_candidates<T>(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
@@ -5521,7 +5586,7 @@
#[tracing::instrument(skip_all, fields(name = ?name))]
pub fn iterate_path_candidates_split_inherent(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
@@ -5584,10 +5649,10 @@
/// If a type can be represented as `dyn Trait`, returns all traits accessible via this type,
/// or an empty iterator otherwise.
- pub fn applicable_inherent_traits<'a>(
- &'a self,
- db: &'a dyn HirDatabase,
- ) -> impl Iterator<Item = Trait> + 'a {
+ pub fn applicable_inherent_traits(
+ &self,
+ db: &'db dyn HirDatabase,
+ ) -> impl Iterator<Item = Trait> {
let _p = tracing::info_span!("applicable_inherent_traits").entered();
self.autoderef_(db)
.filter_map(|ty| ty.dyn_trait())
@@ -5595,7 +5660,7 @@
.map(Trait::from)
}
- pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
+ pub fn env_traits(&self, db: &'db dyn HirDatabase) -> impl Iterator<Item = Trait> {
let _p = tracing::info_span!("env_traits").entered();
self.autoderef_(db)
.filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
@@ -5607,10 +5672,7 @@
.map(Trait::from)
}
- pub fn as_impl_traits(
- &self,
- db: &dyn HirDatabase,
- ) -> Option<impl Iterator<Item = Trait> + use<>> {
+ pub fn as_impl_traits(&self, db: &'db dyn HirDatabase) -> Option<impl Iterator<Item = Trait>> {
self.ty.impl_trait_bounds(db).map(|it| {
it.into_iter().filter_map(|pred| match pred.skip_binders() {
hir_ty::WhereClause::Implemented(trait_ref) => {
@@ -5621,33 +5683,33 @@
})
}
- pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<Trait> {
+ pub fn as_associated_type_parent_trait(&self, db: &'db dyn HirDatabase) -> Option<Trait> {
self.ty.associated_type_parent_trait(db).map(Into::into)
}
- fn derived(&self, ty: Ty) -> Type {
- Type { env: self.env.clone(), ty }
+ fn derived(&self, ty: Ty) -> Self {
+ Type { env: self.env.clone(), ty, _pd: PhantomCovariantLifetime::new() }
}
/// Visits every type, including generic arguments, in this type. `cb` is called with type
/// itself first, and then with its generic arguments.
- pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) {
- fn walk_substs(
- db: &dyn HirDatabase,
- type_: &Type,
+ pub fn walk(&self, db: &'db dyn HirDatabase, mut cb: impl FnMut(Type<'db>)) {
+ fn walk_substs<'db>(
+ db: &'db dyn HirDatabase,
+ type_: &Type<'db>,
substs: &Substitution,
- cb: &mut impl FnMut(Type),
+ cb: &mut impl FnMut(Type<'db>),
) {
for ty in substs.iter(Interner).filter_map(|a| a.ty(Interner)) {
walk_type(db, &type_.derived(ty.clone()), cb);
}
}
- fn walk_bounds(
- db: &dyn HirDatabase,
- type_: &Type,
+ fn walk_bounds<'db>(
+ db: &'db dyn HirDatabase,
+ type_: &Type<'db>,
bounds: &[QuantifiedWhereClause],
- cb: &mut impl FnMut(Type),
+ cb: &mut impl FnMut(Type<'db>),
) {
for pred in bounds {
if let WhereClause::Implemented(trait_ref) = pred.skip_binders() {
@@ -5664,7 +5726,11 @@
}
}
- fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) {
+ fn walk_type<'db>(
+ db: &'db dyn HirDatabase,
+ type_: &Type<'db>,
+ cb: &mut impl FnMut(Type<'db>),
+ ) {
let ty = type_.ty.strip_references();
match ty.kind(Interner) {
TyKind::Adt(_, substs) => {
@@ -5732,7 +5798,7 @@
///
/// Note that we consider placeholder types to unify with everything.
/// For example `Option<T>` and `Option<U>` unify although there is unresolved goal `T = U`.
- pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool {
+ pub fn could_unify_with(&self, db: &'db dyn HirDatabase, other: &Type<'db>) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
hir_ty::could_unify(db, self.env.clone(), &tys)
}
@@ -5741,17 +5807,17 @@
///
/// This means that placeholder types are not considered to unify if there are any bounds set on
/// them. For example `Option<T>` and `Option<U>` do not unify as we cannot show that `T = U`
- pub fn could_unify_with_deeply(&self, db: &dyn HirDatabase, other: &Type) -> bool {
+ pub fn could_unify_with_deeply(&self, db: &'db dyn HirDatabase, other: &Type<'db>) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
hir_ty::could_unify_deeply(db, self.env.clone(), &tys)
}
- pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
+ pub fn could_coerce_to(&self, db: &'db dyn HirDatabase, to: &Type<'db>) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
hir_ty::could_coerce(db, self.env.clone(), &tys)
}
- pub fn as_type_param(&self, db: &dyn HirDatabase) -> Option<TypeParam> {
+ pub fn as_type_param(&self, db: &'db dyn HirDatabase) -> Option<TypeParam> {
match self.ty.kind(Interner) {
TyKind::Placeholder(p) => Some(TypeParam {
id: TypeParamId::from_unchecked(hir_ty::from_placeholder_idx(db, *p)),
@@ -5761,19 +5827,19 @@
}
/// Returns unique `GenericParam`s contained in this type.
- pub fn generic_params(&self, db: &dyn HirDatabase) -> FxHashSet<GenericParam> {
+ pub fn generic_params(&self, db: &'db dyn HirDatabase) -> FxHashSet<GenericParam> {
hir_ty::collect_placeholders(&self.ty, db)
.into_iter()
.map(|id| TypeOrConstParam { id }.split(db).either_into())
.collect()
}
- pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
+ pub fn layout(&self, db: &'db dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty(self.ty.clone(), self.env.clone())
.map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap()))
}
- pub fn drop_glue(&self, db: &dyn HirDatabase) -> DropGlue {
+ pub fn drop_glue(&self, db: &'db dyn HirDatabase) -> DropGlue {
db.has_drop_glue(self.ty.clone(), self.env.clone())
}
}
@@ -5800,8 +5866,8 @@
// FIXME: Document this
#[derive(Debug)]
-pub struct Callable {
- ty: Type,
+pub struct Callable<'db> {
+ ty: Type<'db>,
sig: CallableSig,
callee: Callee,
/// Whether this is a method that was called with method call syntax.
@@ -5825,7 +5891,7 @@
FnImpl(FnTrait),
}
-impl Callable {
+impl<'db> Callable<'db> {
pub fn kind(&self) -> CallableKind {
match self.callee {
Callee::Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
@@ -5840,7 +5906,7 @@
Callee::FnImpl(fn_) => CallableKind::FnImpl(fn_),
}
}
- pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(SelfParam, Type)> {
+ pub fn receiver_param(&self, db: &'db dyn HirDatabase) -> Option<(SelfParam, Type<'db>)> {
let func = match self.callee {
Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
_ => return None,
@@ -5851,7 +5917,7 @@
pub fn n_params(&self) -> usize {
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
}
- pub fn params(&self) -> Vec<Param> {
+ pub fn params(&self) -> Vec<Param<'db>> {
self.sig
.params()
.iter()
@@ -5861,14 +5927,14 @@
.map(|(idx, ty)| Param { func: self.callee.clone(), idx, ty })
.collect()
}
- pub fn return_type(&self) -> Type {
+ pub fn return_type(&self) -> Type<'db> {
self.ty.derived(self.sig.ret().clone())
}
pub fn sig(&self) -> &CallableSig {
&self.sig
}
- pub fn ty(&self) -> &Type {
+ pub fn ty(&self) -> &Type<'db> {
&self.ty
}
}
@@ -6070,9 +6136,9 @@
}
#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct Adjustment {
- pub source: Type,
- pub target: Type,
+pub struct Adjustment<'db> {
+ pub source: Type<'db>,
+ pub target: Type<'db>,
pub kind: Adjust,
}
@@ -6171,7 +6237,7 @@
}
}
-impl HasCrate for Type {
+impl HasCrate for Type<'_> {
fn krate(&self, _db: &dyn HirDatabase) -> Crate {
self.env.krate.into()
}
@@ -6325,9 +6391,9 @@
SelfType(Trait),
}
-fn push_ty_diagnostics(
- db: &dyn HirDatabase,
- acc: &mut Vec<AnyDiagnostic>,
+fn push_ty_diagnostics<'db>(
+ db: &'db dyn HirDatabase,
+ acc: &mut Vec<AnyDiagnostic<'db>>,
diagnostics: Option<ThinArc<(), TyLoweringDiagnostic>>,
source_map: &ExpressionStoreSourceMap,
) {
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 1049895..d969758 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -123,15 +123,15 @@
}
#[derive(Debug)]
-pub struct TypeInfo {
+pub struct TypeInfo<'db> {
/// The original type of the expression or pattern.
- pub original: Type,
+ pub original: Type<'db>,
/// The adjusted type, if an adjustment happened.
- pub adjusted: Option<Type>,
+ pub adjusted: Option<Type<'db>>,
}
-impl TypeInfo {
- pub fn original(self) -> Type {
+impl<'db> TypeInfo<'db> {
+ pub fn original(self) -> Type<'db> {
self.original
}
@@ -140,7 +140,7 @@
}
/// The adjusted type, or the original in case no adjustments occurred.
- pub fn adjusted(self) -> Type {
+ pub fn adjusted(self) -> Type<'db> {
self.adjusted.unwrap_or(self.original)
}
}
@@ -1534,7 +1534,7 @@
Some(Label { parent, label_id })
}
- pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type<'db>> {
let analyze = self.analyze(ty.syntax())?;
analyze.type_of_type(self.db, ty)
}
@@ -1553,7 +1553,7 @@
}
}
- pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
+ pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>> {
let mutability = |m| match m {
hir_ty::Mutability::Not => Mutability::Shared,
hir_ty::Mutability::Mut => Mutability::Mut,
@@ -1596,13 +1596,13 @@
})
}
- pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo<'db>> {
self.analyze(expr.syntax())?
.type_of_expr(self.db, expr)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
}
- pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo<'db>> {
self.analyze(pat.syntax())?
.type_of_pat(self.db, pat)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
@@ -1611,15 +1611,15 @@
/// It also includes the changes that binding mode makes in the type. For example in
/// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
/// of this function is `&mut Option<T>`
- pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
+ pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type<'db>> {
self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
}
- pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type<'db>> {
self.analyze(param.syntax())?.type_of_self(self.db, param)
}
- pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type<'db>; 1]> {
self.analyze(pat.syntax())
.and_then(|it| it.pattern_adjustments(self.db, pat))
.unwrap_or_default()
@@ -1629,7 +1629,7 @@
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
}
- pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable> {
+ pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable<'db>> {
self.analyze(call.syntax())?.resolve_expr_as_callable(self.db, call)
}
@@ -1641,7 +1641,7 @@
pub fn resolve_method_call_fallback(
&self,
call: &ast::MethodCallExpr,
- ) -> Option<(Either<Function, Field>, Option<GenericSubstitution>)> {
+ ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
}
@@ -1649,10 +1649,10 @@
// FIXME: better api for the trait environment
pub fn resolve_trait_impl_method(
&self,
- env: Type,
+ env: Type<'db>,
trait_: Trait,
func: Function,
- subst: impl IntoIterator<Item = Type>,
+ subst: impl IntoIterator<Item = Type<'db>>,
) -> Option<Function> {
let mut substs = hir_ty::TyBuilder::subst_for_def(self.db, TraitId::from(trait_), None);
for s in subst {
@@ -1691,7 +1691,10 @@
// This does not resolve the method call to the correct trait impl!
// We should probably fix that.
- pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ pub fn resolve_method_call_as_callable(
+ &self,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Callable<'db>> {
self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
}
@@ -1702,14 +1705,15 @@
pub fn resolve_field_fallback(
&self,
field: &ast::FieldExpr,
- ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution>)> {
+ ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
+ {
self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
}
pub fn resolve_record_field(
&self,
field: &ast::RecordExprField,
- ) -> Option<(Field, Option<Local>, Type)> {
+ ) -> Option<(Field, Option<Local>, Type<'db>)> {
self.resolve_record_field_with_substitution(field)
.map(|(field, local, ty, _)| (field, local, ty))
}
@@ -1717,18 +1721,21 @@
pub fn resolve_record_field_with_substitution(
&self,
field: &ast::RecordExprField,
- ) -> Option<(Field, Option<Local>, Type, GenericSubstitution)> {
+ ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
self.analyze(field.syntax())?.resolve_record_field(self.db, field)
}
- pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
+ pub fn resolve_record_pat_field(
+ &self,
+ field: &ast::RecordPatField,
+ ) -> Option<(Field, Type<'db>)> {
self.resolve_record_pat_field_with_subst(field).map(|(field, ty, _)| (field, ty))
}
pub fn resolve_record_pat_field_with_subst(
&self,
field: &ast::RecordPatField,
- ) -> Option<(Field, Type, GenericSubstitution)> {
+ ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
}
@@ -1801,7 +1808,7 @@
pub fn resolve_path_with_subst(
&self,
path: &ast::Path,
- ) -> Option<(PathResolution, Option<GenericSubstitution>)> {
+ ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
self.analyze(path.syntax())?.resolve_path(self.db, path)
}
@@ -1812,7 +1819,7 @@
pub fn resolve_offset_of_field(
&self,
name_ref: &ast::NameRef,
- ) -> Option<(Either<Variant, Field>, GenericSubstitution)> {
+ ) -> Option<(Either<Variant, Field>, GenericSubstitution<'db>)> {
self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
}
@@ -1834,13 +1841,19 @@
self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
}
- pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ pub fn record_literal_missing_fields(
+ &self,
+ literal: &ast::RecordExpr,
+ ) -> Vec<(Field, Type<'db>)> {
self.analyze(literal.syntax())
.and_then(|it| it.record_literal_missing_fields(self.db, literal))
.unwrap_or_default()
}
- pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ pub fn record_pattern_missing_fields(
+ &self,
+ pattern: &ast::RecordPat,
+ ) -> Vec<(Field, Type<'db>)> {
self.analyze(pattern.syntax())
.and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
.unwrap_or_default()
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 3273358..48543ca 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -257,7 +257,11 @@
infer.expr_adjustments.get(&expr_id).map(|v| &**v)
}
- pub(crate) fn type_of_type(&self, db: &'db dyn HirDatabase, ty: &ast::Type) -> Option<Type> {
+ pub(crate) fn type_of_type(
+ &self,
+ db: &'db dyn HirDatabase,
+ ty: &ast::Type,
+ ) -> Option<Type<'db>> {
let type_ref = self.type_id(ty)?;
let ty = TyLoweringContext::new(
db,
@@ -277,7 +281,7 @@
&self,
db: &'db dyn HirDatabase,
expr: &ast::Expr,
- ) -> Option<(Type, Option<Type>)> {
+ ) -> Option<(Type<'db>, Option<Type<'db>>)> {
let expr_id = self.expr_id(expr.clone())?;
let infer = self.infer()?;
let coerced = expr_id
@@ -293,7 +297,7 @@
&self,
db: &'db dyn HirDatabase,
pat: &ast::Pat,
- ) -> Option<(Type, Option<Type>)> {
+ ) -> Option<(Type<'db>, Option<Type<'db>>)> {
let expr_or_pat_id = self.pat_id(pat)?;
let infer = self.infer()?;
let coerced = match expr_or_pat_id {
@@ -316,7 +320,7 @@
&self,
db: &'db dyn HirDatabase,
pat: &ast::IdentPat,
- ) -> Option<Type> {
+ ) -> Option<Type<'db>> {
let binding_id = self.binding_id_of_pat(pat)?;
let infer = self.infer()?;
let ty = infer[binding_id].clone();
@@ -328,7 +332,7 @@
&self,
db: &'db dyn HirDatabase,
_param: &ast::SelfParam,
- ) -> Option<Type> {
+ ) -> Option<Type<'db>> {
let binding = self.body()?.self_param?;
let ty = self.infer()?[binding].clone();
Some(Type::new_with_resolver(db, &self.resolver, ty))
@@ -353,7 +357,7 @@
&self,
db: &'db dyn HirDatabase,
pat: &ast::Pat,
- ) -> Option<SmallVec<[Type; 1]>> {
+ ) -> Option<SmallVec<[Type<'db>; 1]>> {
let pat_id = self.pat_id(pat)?;
let infer = self.infer()?;
Some(
@@ -370,7 +374,7 @@
&self,
db: &'db dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<Callable> {
+ ) -> Option<Callable<'db>> {
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
let (func, substs) = self.infer()?.method_resolution(expr_id)?;
let ty = db.value_ty(func.into())?.substitute(Interner, &substs);
@@ -395,7 +399,7 @@
&self,
db: &'db dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<(Either<Function, Field>, Option<GenericSubstitution>)> {
+ ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
let inference_result = self.infer()?;
match inference_result.method_resolution(expr_id) {
@@ -419,7 +423,7 @@
&self,
db: &'db dyn HirDatabase,
call: &ast::Expr,
- ) -> Option<Callable> {
+ ) -> Option<Callable<'db>> {
let (orig, adjusted) = self.type_of_expr(db, &call.clone())?;
adjusted.unwrap_or(orig).as_callable(db)
}
@@ -440,7 +444,7 @@
field_expr: ExprId,
infer: &InferenceResult,
db: &'db dyn HirDatabase,
- ) -> Option<GenericSubstitution> {
+ ) -> Option<GenericSubstitution<'db>> {
let body = self.store()?;
if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] {
let (adt, subst) = type_of_expr_including_adjust(infer, object_expr)?.as_adt()?;
@@ -457,7 +461,8 @@
&self,
db: &'db dyn HirDatabase,
field: &ast::FieldExpr,
- ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution>)> {
+ ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
+ {
let (def, ..) = self.body_()?;
let expr_id = self.expr_id(field.clone().into())?.as_expr()?;
let inference_result = self.infer()?;
@@ -680,7 +685,7 @@
&self,
db: &'db dyn HirDatabase,
field: &ast::RecordExprField,
- ) -> Option<(Field, Option<Local>, Type, GenericSubstitution)> {
+ ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
let expr = ast::Expr::from(record_expr);
let expr_id = self.store_sm()?.node_expr(InFile::new(self.file_id, &expr))?;
@@ -724,7 +729,7 @@
&self,
db: &'db dyn HirDatabase,
field: &ast::RecordPatField,
- ) -> Option<(Field, Type, GenericSubstitution)> {
+ ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
let field_name = field.field_name()?.as_name();
let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
let pat_id = self.pat_id(&record_pat.into())?;
@@ -779,7 +784,7 @@
&self,
db: &'db dyn HirDatabase,
name_ref: &ast::NameRef,
- ) -> Option<(Either<crate::Variant, crate::Field>, GenericSubstitution)> {
+ ) -> Option<(Either<crate::Variant, crate::Field>, GenericSubstitution<'db>)> {
let offset_of_expr = ast::OffsetOfExpr::cast(name_ref.syntax().parent()?)?;
let container = offset_of_expr.ty()?;
let container = self.type_of_type(db, &container)?;
@@ -851,7 +856,7 @@
&self,
db: &'db dyn HirDatabase,
path: &ast::Path,
- ) -> Option<(PathResolution, Option<GenericSubstitution>)> {
+ ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
let parent = path.syntax().parent();
let parent = || parent.clone();
@@ -1216,7 +1221,7 @@
&self,
db: &'db dyn HirDatabase,
literal: &ast::RecordExpr,
- ) -> Option<Vec<(Field, Type)>> {
+ ) -> Option<Vec<(Field, Type<'db>)>> {
let body = self.store()?;
let infer = self.infer()?;
@@ -1239,7 +1244,7 @@
&self,
db: &'db dyn HirDatabase,
pattern: &ast::RecordPat,
- ) -> Option<Vec<(Field, Type)>> {
+ ) -> Option<Vec<(Field, Type<'db>)>> {
let body = self.store()?;
let infer = self.infer()?;
@@ -1258,7 +1263,7 @@
substs: &Substitution,
variant: VariantId,
missing_fields: Vec<LocalFieldId>,
- ) -> Vec<(Field, Type)> {
+ ) -> Vec<(Field, Type<'db>)> {
let field_types = db.field_types(variant);
missing_fields
diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs
index af72179..4b354e6 100644
--- a/crates/hir/src/term_search.rs
+++ b/crates/hir/src/term_search.rs
@@ -22,20 +22,20 @@
/// Helper enum to squash big number of alternative trees into `Many` variant as there is too many
/// to take into account.
#[derive(Debug)]
-enum AlternativeExprs {
+enum AlternativeExprs<'db> {
/// There are few trees, so we keep track of them all
- Few(FxHashSet<Expr>),
+ Few(FxHashSet<Expr<'db>>),
/// There are too many trees to keep track of
Many,
}
-impl AlternativeExprs {
+impl<'db> AlternativeExprs<'db> {
/// Construct alternative trees
///
/// # Arguments
/// `threshold` - threshold value for many trees (more than that is many)
/// `exprs` - expressions iterator
- fn new(threshold: usize, exprs: impl Iterator<Item = Expr>) -> AlternativeExprs {
+ fn new(threshold: usize, exprs: impl Iterator<Item = Expr<'db>>) -> AlternativeExprs<'db> {
let mut it = AlternativeExprs::Few(Default::default());
it.extend_with_threshold(threshold, exprs);
it
@@ -45,7 +45,7 @@
///
/// # Arguments
/// `ty` - Type of expressions queried (this is used to give type to `Expr::Many`)
- fn exprs(&self, ty: &Type) -> Vec<Expr> {
+ fn exprs(&self, ty: &Type<'db>) -> Vec<Expr<'db>> {
match self {
AlternativeExprs::Few(exprs) => exprs.iter().cloned().collect(),
AlternativeExprs::Many => vec![Expr::Many(ty.clone())],
@@ -57,7 +57,7 @@
/// # Arguments
/// `threshold` - threshold value for many trees (more than that is many)
/// `exprs` - expressions iterator
- fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator<Item = Expr>) {
+ fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator<Item = Expr<'db>>) {
match self {
AlternativeExprs::Few(tts) => {
for it in exprs {
@@ -88,20 +88,20 @@
/// Both of them are to speed up the term search by leaving out types / ScopeDefs that likely do
/// not produce any new results.
#[derive(Default, Debug)]
-struct LookupTable {
+struct LookupTable<'db> {
/// All the `Expr`s in "value" produce the type of "key"
- data: FxHashMap<Type, AlternativeExprs>,
+ data: FxHashMap<Type<'db>, AlternativeExprs<'db>>,
/// New types reached since last query by the `NewTypesKey`
- new_types: FxHashMap<NewTypesKey, Vec<Type>>,
+ new_types: FxHashMap<NewTypesKey, Vec<Type<'db>>>,
/// Types queried but not present
- types_wishlist: FxHashSet<Type>,
+ types_wishlist: FxHashSet<Type<'db>>,
/// Threshold to squash trees to `Many`
many_threshold: usize,
}
-impl LookupTable {
+impl<'db> LookupTable<'db> {
/// Initialize lookup table
- fn new(many_threshold: usize, goal: Type) -> Self {
+ fn new(many_threshold: usize, goal: Type<'db>) -> Self {
let mut res = Self { many_threshold, ..Default::default() };
res.new_types.insert(NewTypesKey::ImplMethod, Vec::new());
res.new_types.insert(NewTypesKey::StructProjection, Vec::new());
@@ -110,7 +110,7 @@
}
/// Find all `Expr`s that unify with the `ty`
- fn find(&mut self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
+ fn find(&mut self, db: &'db dyn HirDatabase, ty: &Type<'db>) -> Option<Vec<Expr<'db>>> {
let res = self
.data
.iter()
@@ -135,7 +135,7 @@
///
/// For example if we have type `i32` in data and we query for `&i32` it map all the type
/// trees we have for `i32` with `Expr::Reference` and returns them.
- fn find_autoref(&mut self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
+ fn find_autoref(&mut self, db: &'db dyn HirDatabase, ty: &Type<'db>) -> Option<Vec<Expr<'db>>> {
let res = self
.data
.iter()
@@ -174,7 +174,7 @@
/// Note that the types have to be the same, unification is not enough as unification is not
/// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>,
/// but they clearly do not unify themselves.
- fn insert(&mut self, ty: Type, exprs: impl Iterator<Item = Expr>) {
+ fn insert(&mut self, ty: Type<'db>, exprs: impl Iterator<Item = Expr<'db>>) {
match self.data.get_mut(&ty) {
Some(it) => {
it.extend_with_threshold(self.many_threshold, exprs);
@@ -192,14 +192,14 @@
}
/// Iterate all the reachable types
- fn iter_types(&self) -> impl Iterator<Item = Type> + '_ {
+ fn iter_types(&self) -> impl Iterator<Item = Type<'db>> + '_ {
self.data.keys().cloned()
}
/// Query new types reached since last query by key
///
/// Create new key if you wish to query it to avoid conflicting with existing queries.
- fn new_types(&mut self, key: NewTypesKey) -> Vec<Type> {
+ fn new_types(&mut self, key: NewTypesKey) -> Vec<Type<'db>> {
match self.new_types.get_mut(&key) {
Some(it) => std::mem::take(it),
None => Vec::new(),
@@ -207,20 +207,20 @@
}
/// Types queried but not found
- fn types_wishlist(&mut self) -> &FxHashSet<Type> {
+ fn types_wishlist(&mut self) -> &FxHashSet<Type<'db>> {
&self.types_wishlist
}
}
/// Context for the `term_search` function
#[derive(Debug)]
-pub struct TermSearchCtx<'a, DB: HirDatabase> {
+pub struct TermSearchCtx<'db, DB: HirDatabase> {
/// Semantics for the program
- pub sema: &'a Semantics<'a, DB>,
+ pub sema: &'db Semantics<'db, DB>,
/// Semantic scope, captures context for the term search
- pub scope: &'a SemanticsScope<'a>,
+ pub scope: &'db SemanticsScope<'db>,
/// Target / expected output type
- pub goal: Type,
+ pub goal: Type<'db>,
/// Configuration for term search
pub config: TermSearchConfig,
}
@@ -263,7 +263,7 @@
/// Note that there are usually more ways we can get to the `goal` type but some are discarded to
/// reduce the memory consumption. It is also unlikely anyone is willing ti browse through
/// thousands of possible responses so we currently take first 10 from every tactic.
-pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
+pub fn term_search<'db, DB: HirDatabase>(ctx: &'db TermSearchCtx<'db, DB>) -> Vec<Expr<'db>> {
let module = ctx.scope.module();
let mut defs = FxHashSet::default();
defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module)));
@@ -285,7 +285,7 @@
};
// Try trivial tactic first, also populates lookup table
- let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
+ let mut solutions: Vec<Expr<'db>> = tactics::trivial(ctx, &defs, &mut lookup).collect();
// Use well known types tactic before iterations as it does not depend on other tactics
solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup));
diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs
index 78ee3b5..8438319 100644
--- a/crates/hir/src/term_search/expr.rs
+++ b/crates/hir/src/term_search/expr.rs
@@ -59,7 +59,7 @@
/// So in short it pretty much gives us a way to get type `Option<i32>` using the items we have in
/// scope.
#[derive(Debug, Clone, Eq, Hash, PartialEq)]
-pub enum Expr {
+pub enum Expr<'db> {
/// Constant
Const(Const),
/// Static variable
@@ -69,26 +69,31 @@
/// Constant generic parameter
ConstParam(ConstParam),
/// Well known type (such as `true` for bool)
- FamousType { ty: Type, value: &'static str },
+ FamousType { ty: Type<'db>, value: &'static str },
/// Function call (does not take self param)
- Function { func: Function, generics: Vec<Type>, params: Vec<Expr> },
+ Function { func: Function, generics: Vec<Type<'db>>, params: Vec<Expr<'db>> },
/// Method call (has self param)
- Method { func: Function, generics: Vec<Type>, target: Box<Expr>, params: Vec<Expr> },
+ Method {
+ func: Function,
+ generics: Vec<Type<'db>>,
+ target: Box<Expr<'db>>,
+ params: Vec<Expr<'db>>,
+ },
/// Enum variant construction
- Variant { variant: Variant, generics: Vec<Type>, params: Vec<Expr> },
+ Variant { variant: Variant, generics: Vec<Type<'db>>, params: Vec<Expr<'db>> },
/// Struct construction
- Struct { strukt: Struct, generics: Vec<Type>, params: Vec<Expr> },
+ Struct { strukt: Struct, generics: Vec<Type<'db>>, params: Vec<Expr<'db>> },
/// Tuple construction
- Tuple { ty: Type, params: Vec<Expr> },
+ Tuple { ty: Type<'db>, params: Vec<Expr<'db>> },
/// Struct field access
- Field { expr: Box<Expr>, field: Field },
+ Field { expr: Box<Expr<'db>>, field: Field },
/// Passing type as reference (with `&`)
- Reference(Box<Expr>),
+ Reference(Box<Expr<'db>>),
/// Indicates possibility of many different options that all evaluate to `ty`
- Many(Type),
+ Many(Type<'db>),
}
-impl Expr {
+impl<'db> Expr<'db> {
/// Generate source code for type tree.
///
/// Note that trait imports are not added to generated code.
@@ -96,8 +101,8 @@
/// by `traits_used` method are also imported.
pub fn gen_source_code(
&self,
- sema_scope: &SemanticsScope<'_>,
- many_formatter: &mut dyn FnMut(&Type) -> String,
+ sema_scope: &SemanticsScope<'db>,
+ many_formatter: &mut dyn FnMut(&Type<'db>) -> String,
cfg: ImportPathConfig,
display_target: DisplayTarget,
) -> Result<String, DisplaySourceCodeError> {
@@ -298,7 +303,7 @@
/// Get type of the type tree.
///
/// Same as getting the type of root node
- pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ pub fn ty(&self, db: &'db dyn HirDatabase) -> Type<'db> {
match self {
Expr::Const(it) => it.ty(db),
Expr::Static(it) => it.ty(db),
diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs
index bcff44f..9df131f 100644
--- a/crates/hir/src/term_search/tactics.rs
+++ b/crates/hir/src/term_search/tactics.rs
@@ -40,11 +40,11 @@
///
/// _Note that there is no use of calling this tactic in every iteration as the output does not
/// depend on the current state of `lookup`_
-pub(super) fn trivial<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn trivial<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
-) -> impl Iterator<Item = Expr> + 'a {
+ lookup: &'lt mut LookupTable<'db>,
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
defs.iter().filter_map(|def| {
let expr = match def {
@@ -104,11 +104,11 @@
///
/// _Note that there is no use of calling this tactic in every iteration as the output does not
/// depend on the current state of `lookup`_
-pub(super) fn assoc_const<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn assoc_const<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
-) -> impl Iterator<Item = Expr> + 'a {
+ lookup: &'lt mut LookupTable<'db>,
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
@@ -152,12 +152,12 @@
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn data_constructor<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn data_constructor<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
@@ -199,14 +199,14 @@
let generics: Vec<_> = ty.type_arguments().collect();
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = fields
+ let param_exprs: Vec<Vec<Expr<'_>>> = fields
.into_iter()
.map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
- let exprs: Vec<Expr> = if param_exprs.is_empty() {
+ let exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
vec![Expr::Struct { strukt, generics, params: Vec::new() }]
} else {
param_exprs
@@ -247,7 +247,7 @@
.into_iter()
.filter_map(|variant| {
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = variant
+ let param_exprs: Vec<Vec<Expr<'_>>> = variant
.fields(db)
.into_iter()
.map(|field| {
@@ -257,7 +257,7 @@
// Note that we need special case for 0 param constructors because of multi cartesian
// product
- let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ let variant_exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
vec![Expr::Variant {
variant,
generics: generics.clone(),
@@ -301,12 +301,12 @@
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn free_function<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn free_function<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
defs.iter()
@@ -375,7 +375,7 @@
}
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = it
+ let param_exprs: Vec<Vec<Expr<'_>>> = it
.params_without_self_with_args(db, generics.iter().cloned())
.into_iter()
.map(|field| {
@@ -389,7 +389,7 @@
// Note that we need special case for 0 param constructors because of multi cartesian
// product
- let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ let fn_exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
vec![Expr::Function { func: *it, generics, params: Vec::new() }]
} else {
param_exprs
@@ -432,12 +432,12 @@
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn impl_method<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn impl_method<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
@@ -507,14 +507,14 @@
let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = it
+ let param_exprs: Vec<Vec<Expr<'_>>> = it
.params_without_self_with_args(db, ty.type_arguments())
.into_iter()
.map(|field| lookup.find_autoref(db, field.ty()))
.collect::<Option<_>>()?;
let generics: Vec<_> = ty.type_arguments().collect();
- let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
+ let fn_exprs: Vec<Expr<'_>> = std::iter::once(target_type_exprs)
.chain(param_exprs)
.multi_cartesian_product()
.map(|params| {
@@ -547,12 +547,12 @@
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn struct_projection<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn struct_projection<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
@@ -589,11 +589,11 @@
/// * `ctx` - Context for the term search
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
-pub(super) fn famous_types<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn famous_types<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
-) -> impl Iterator<Item = Expr> + 'a {
+ lookup: &'lt mut LookupTable<'db>,
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
[
@@ -620,12 +620,12 @@
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn impl_static_method<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn impl_static_method<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
lookup
@@ -683,7 +683,7 @@
}
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> = it
+ let param_exprs: Vec<Vec<Expr<'_>>> = it
.params_without_self_with_args(db, ty.type_arguments())
.into_iter()
.map(|field| lookup.find_autoref(db, field.ty()))
@@ -692,7 +692,7 @@
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let generics = ty.type_arguments().collect();
- let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ let fn_exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
vec![Expr::Function { func: it, generics, params: Vec::new() }]
} else {
param_exprs
@@ -722,12 +722,12 @@
/// * `defs` - Set of items in scope at term search target location
/// * `lookup` - Lookup table for types
/// * `should_continue` - Function that indicates when to stop iterating
-pub(super) fn make_tuple<'a, DB: HirDatabase>(
- ctx: &'a TermSearchCtx<'a, DB>,
+pub(super) fn make_tuple<'a, 'lt, 'db, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'db, DB>,
_defs: &'a FxHashSet<ScopeDef>,
- lookup: &'a mut LookupTable,
+ lookup: &'lt mut LookupTable<'db>,
should_continue: &'a dyn std::ops::Fn() -> bool,
-) -> impl Iterator<Item = Expr> + 'a {
+) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
let db = ctx.sema.db;
let module = ctx.scope.module();
@@ -749,15 +749,15 @@
}
// Early exit if some param cannot be filled from lookup
- let param_exprs: Vec<Vec<Expr>> =
+ let param_exprs: Vec<Vec<Expr<'db>>> =
ty.type_arguments().map(|field| lookup.find(db, &field)).collect::<Option<_>>()?;
- let exprs: Vec<Expr> = param_exprs
+ let exprs: Vec<Expr<'db>> = param_exprs
.into_iter()
.multi_cartesian_product()
.filter(|_| should_continue())
.map(|params| {
- let tys: Vec<Type> = params.iter().map(|it| it.ty(db)).collect();
+ let tys: Vec<Type<'_>> = params.iter().map(|it| it.ty(db)).collect();
let tuple_ty = Type::new_tuple(module.krate().into(), &tys);
let expr = Expr::Tuple { ty: tuple_ty.clone(), params };
diff --git a/crates/ide-assists/src/assist_config.rs b/crates/ide-assists/src/assist_config.rs
index fb569f8..57ced8d 100644
--- a/crates/ide-assists/src/assist_config.rs
+++ b/crates/ide-assists/src/assist_config.rs
@@ -22,6 +22,7 @@
pub term_search_borrowck: bool,
pub code_action_grouping: bool,
pub expr_fill_default: ExprFillDefaultMode,
+ pub prefer_self_ty: bool,
}
impl AssistConfig {
diff --git a/crates/ide-assists/src/assist_context.rs b/crates/ide-assists/src/assist_context.rs
index 9eb9452..207a754 100644
--- a/crates/ide-assists/src/assist_context.rs
+++ b/crates/ide-assists/src/assist_context.rs
@@ -95,7 +95,7 @@
}
}
- pub(crate) fn db(&self) -> &RootDatabase {
+ pub(crate) fn db(&self) -> &'a RootDatabase {
self.sema.db
}
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index 6a55f39..9f9d219 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -192,7 +192,7 @@
fn try_gen_trait_body(
ctx: &AssistContext<'_>,
func: &ast::Fn,
- trait_ref: hir::TraitRef,
+ trait_ref: hir::TraitRef<'_>,
impl_def: &ast::Impl,
edition: Edition,
) -> Option<()> {
diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 858d436..1ece7dd 100644
--- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -1,12 +1,13 @@
use std::iter::{self, Peekable};
use either::Either;
-use hir::{Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym};
+use hir::{Adt, AsAssocItem, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym};
use ide_db::RootDatabase;
use ide_db::assists::ExprFillDefaultMode;
use ide_db::syntax_helpers::suggest_name;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools;
+use syntax::ToSmolStr;
use syntax::ast::edit::IndentLevel;
use syntax::ast::edit_in_place::Indent;
use syntax::ast::syntax_factory::SyntaxFactory;
@@ -79,12 +80,20 @@
let make = SyntaxFactory::with_mappings();
- let module = ctx.sema.scope(expr.syntax())?.module();
+ let scope = ctx.sema.scope(expr.syntax())?;
+ let module = scope.module();
+ let self_ty = if ctx.config.prefer_self_ty {
+ scope
+ .containing_function()
+ .and_then(|function| function.as_assoc_item(ctx.db())?.implementing_ty(ctx.db()))
+ } else {
+ None
+ };
let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
bool,
bool,
- ) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) {
+ ) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr, self_ty.as_ref()) {
let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
let variants = enum_def.variants(ctx.db());
@@ -102,8 +111,9 @@
})
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
- let option_enum = FamousDefs(&ctx.sema, module.krate()).core_option_Option().map(lift_enum);
- let missing_pats: Box<dyn Iterator<Item = _>> = if Some(enum_def) == option_enum {
+ let option_enum = FamousDefs(&ctx.sema, module.krate()).core_option_Option();
+ let missing_pats: Box<dyn Iterator<Item = _>> = if matches!(enum_def, ExtendedEnum::Enum { enum_: e, .. } if Some(e) == option_enum)
+ {
// Match `Some` variant first.
cov_mark::hit!(option_order);
Box::new(missing_pats.rev())
@@ -111,7 +121,7 @@
Box::new(missing_pats)
};
(missing_pats.peekable(), is_non_exhaustive, has_hidden_variants)
- } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) {
+ } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr, self_ty.as_ref()) {
let is_non_exhaustive =
enum_defs.iter().any(|enum_def| enum_def.is_non_exhaustive(ctx.db(), module.krate()));
@@ -159,7 +169,9 @@
is_non_exhaustive,
has_hidden_variants,
)
- } else if let Some((enum_def, len)) = resolve_array_of_enum_def(&ctx.sema, &expr) {
+ } else if let Some((enum_def, len)) =
+ resolve_array_of_enum_def(&ctx.sema, &expr, self_ty.as_ref())
+ {
let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
let variants = enum_def.variants(ctx.db());
@@ -373,23 +385,23 @@
}
}
-#[derive(Eq, PartialEq, Clone, Copy)]
+#[derive(Eq, PartialEq, Clone)]
enum ExtendedEnum {
Bool,
- Enum(hir::Enum),
+ Enum { enum_: hir::Enum, use_self: bool },
}
#[derive(Eq, PartialEq, Clone, Copy, Debug)]
enum ExtendedVariant {
True,
False,
- Variant(hir::Variant),
+ Variant { variant: hir::Variant, use_self: bool },
}
impl ExtendedVariant {
fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool {
match self {
- ExtendedVariant::Variant(var) => {
+ ExtendedVariant::Variant { variant: var, .. } => {
var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate
}
_ => false,
@@ -397,25 +409,35 @@
}
}
-fn lift_enum(e: hir::Enum) -> ExtendedEnum {
- ExtendedEnum::Enum(e)
-}
-
impl ExtendedEnum {
- fn is_non_exhaustive(self, db: &RootDatabase, krate: Crate) -> bool {
+ fn enum_(
+ db: &RootDatabase,
+ enum_: hir::Enum,
+ enum_ty: &hir::Type<'_>,
+ self_ty: Option<&hir::Type<'_>>,
+ ) -> Self {
+ ExtendedEnum::Enum {
+ enum_,
+ use_self: self_ty.is_some_and(|self_ty| self_ty.could_unify_with_deeply(db, enum_ty)),
+ }
+ }
+
+ fn is_non_exhaustive(&self, db: &RootDatabase, krate: Crate) -> bool {
match self {
- ExtendedEnum::Enum(e) => {
+ ExtendedEnum::Enum { enum_: e, .. } => {
e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate
}
_ => false,
}
}
- fn variants(self, db: &RootDatabase) -> Vec<ExtendedVariant> {
- match self {
- ExtendedEnum::Enum(e) => {
- e.variants(db).into_iter().map(ExtendedVariant::Variant).collect::<Vec<_>>()
- }
+ fn variants(&self, db: &RootDatabase) -> Vec<ExtendedVariant> {
+ match *self {
+ ExtendedEnum::Enum { enum_: e, use_self } => e
+ .variants(db)
+ .into_iter()
+ .map(|variant| ExtendedVariant::Variant { variant, use_self })
+ .collect::<Vec<_>>(),
ExtendedEnum::Bool => {
Vec::<ExtendedVariant>::from([ExtendedVariant::True, ExtendedVariant::False])
}
@@ -423,9 +445,13 @@
}
}
-fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
+fn resolve_enum_def(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+ self_ty: Option<&hir::Type<'_>>,
+) -> Option<ExtendedEnum> {
sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
- Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
+ Some(Adt::Enum(e)) => Some(ExtendedEnum::enum_(sema.db, e, &ty, self_ty)),
_ => ty.is_bool().then_some(ExtendedEnum::Bool),
})
}
@@ -433,6 +459,7 @@
fn resolve_tuple_of_enum_def(
sema: &Semantics<'_, RootDatabase>,
expr: &ast::Expr,
+ self_ty: Option<&hir::Type<'_>>,
) -> Option<Vec<ExtendedEnum>> {
sema.type_of_expr(expr)?
.adjusted()
@@ -441,7 +468,7 @@
.map(|ty| {
ty.autoderef(sema.db).find_map(|ty| {
match ty.as_adt() {
- Some(Adt::Enum(e)) => Some(lift_enum(e)),
+ Some(Adt::Enum(e)) => Some(ExtendedEnum::enum_(sema.db, e, &ty, self_ty)),
// For now we only handle expansion for a tuple of enums. Here
// we map non-enum items to None and rely on `collect` to
// convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
@@ -456,10 +483,11 @@
fn resolve_array_of_enum_def(
sema: &Semantics<'_, RootDatabase>,
expr: &ast::Expr,
+ self_ty: Option<&hir::Type<'_>>,
) -> Option<(ExtendedEnum, usize)> {
sema.type_of_expr(expr)?.adjusted().as_array(sema.db).and_then(|(ty, len)| {
ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
- Some(Adt::Enum(e)) => Some((lift_enum(e), len)),
+ Some(Adt::Enum(e)) => Some((ExtendedEnum::enum_(sema.db, e, &ty, self_ty), len)),
_ => ty.is_bool().then_some((ExtendedEnum::Bool, len)),
})
})
@@ -474,9 +502,21 @@
) -> Option<ast::Pat> {
let db = ctx.db();
match var {
- ExtendedVariant::Variant(var) => {
+ ExtendedVariant::Variant { variant: var, use_self } => {
let edition = module.krate().edition(db);
- let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition);
+ let path = if use_self {
+ make::path_from_segments(
+ [
+ make::path_segment(make::name_ref_self_ty()),
+ make::path_segment(make::name_ref(
+ &var.name(db).display(db, edition).to_smolstr(),
+ )),
+ ],
+ false,
+ )
+ } else {
+ mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition)
+ };
let fields = var.fields(db);
let pat: ast::Pat = match var.kind(db) {
hir::StructKind::Tuple => {
@@ -509,8 +549,10 @@
#[cfg(test)]
mod tests {
+ use crate::AssistConfig;
use crate::tests::{
- check_assist, check_assist_not_applicable, check_assist_target, check_assist_unresolved,
+ TEST_CONFIG, check_assist, check_assist_not_applicable, check_assist_target,
+ check_assist_unresolved, check_assist_with_config,
};
use super::add_missing_match_arms;
@@ -2095,4 +2137,111 @@
"#,
);
}
+
+ #[test]
+ fn prefer_self() {
+ check_assist_with_config(
+ add_missing_match_arms,
+ AssistConfig { prefer_self_ty: true, ..TEST_CONFIG },
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl Foo {
+ fn qux(&self) {
+ match self {
+ $0_ => {}
+ }
+ }
+}
+ "#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl Foo {
+ fn qux(&self) {
+ match self {
+ Self::Bar => ${1:todo!()},
+ Self::Baz => ${2:todo!()},$0
+ }
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn prefer_self_with_generics() {
+ check_assist_with_config(
+ add_missing_match_arms,
+ AssistConfig { prefer_self_ty: true, ..TEST_CONFIG },
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+impl<T> Foo<T> {
+ fn qux(&self) {
+ match self {
+ $0_ => {}
+ }
+ }
+}
+ "#,
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+impl<T> Foo<T> {
+ fn qux(&self) {
+ match self {
+ Self::Bar(${1:_}) => ${2:todo!()},
+ Self::Baz => ${3:todo!()},$0
+ }
+ }
+}
+ "#,
+ );
+ check_assist_with_config(
+ add_missing_match_arms,
+ AssistConfig { prefer_self_ty: true, ..TEST_CONFIG },
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+impl<T> Foo<T> {
+ fn qux(v: Foo<i32>) {
+ match v {
+ $0_ => {}
+ }
+ }
+}
+ "#,
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+impl<T> Foo<T> {
+ fn qux(v: Foo<i32>) {
+ match v {
+ Foo::Bar(${1:_}) => ${2:todo!()},
+ Foo::Baz => ${3:todo!()},$0
+ }
+ }
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs
index f3243d3..bb6a10d 100644
--- a/crates/ide-assists/src/handlers/auto_import.rs
+++ b/crates/ide-assists/src/handlers/auto_import.rs
@@ -164,9 +164,9 @@
Some(())
}
-pub(super) fn find_importable_node(
- ctx: &AssistContext<'_>,
-) -> Option<(ImportAssets, SyntaxNode, Option<Type>)> {
+pub(super) fn find_importable_node<'a: 'db, 'db>(
+ ctx: &'a AssistContext<'db>,
+) -> Option<(ImportAssets<'db>, SyntaxNode, Option<Type<'db>>)> {
// Deduplicate this with the `expected_type_and_name` logic for completions
let expected = |expr_or_pat: Either<ast::Expr, ast::Pat>| match expr_or_pat {
Either::Left(expr) => {
@@ -226,7 +226,7 @@
}
}
-fn group_label(import_candidate: &ImportCandidate) -> GroupLabel {
+fn group_label(import_candidate: &ImportCandidate<'_>) -> GroupLabel {
let name = match import_candidate {
ImportCandidate::Path(candidate) => format!("Import {}", candidate.name.text()),
ImportCandidate::TraitAssocItem(candidate) => {
@@ -244,7 +244,7 @@
pub(crate) fn relevance_score(
ctx: &AssistContext<'_>,
import: &LocatedImport,
- expected: Option<&Type>,
+ expected: Option<&Type<'_>>,
current_module: Option<&Module>,
) -> i32 {
let mut score = 0;
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index cf45ea0..00cbef1 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -309,23 +309,23 @@
}
#[derive(Debug)]
-struct Function {
+struct Function<'db> {
name: ast::NameRef,
self_param: Option<ast::SelfParam>,
- params: Vec<Param>,
- control_flow: ControlFlow,
- ret_ty: RetType,
+ params: Vec<Param<'db>>,
+ control_flow: ControlFlow<'db>,
+ ret_ty: RetType<'db>,
body: FunctionBody,
outliving_locals: Vec<OutlivedLocal>,
/// Whether at least one of the container's tail expr is contained in the range we're extracting.
contains_tail_expr: bool,
- mods: ContainerInfo,
+ mods: ContainerInfo<'db>,
}
#[derive(Debug)]
-struct Param {
+struct Param<'db> {
var: Local,
- ty: hir::Type,
+ ty: hir::Type<'db>,
move_local: bool,
requires_mut: bool,
is_copy: bool,
@@ -340,10 +340,10 @@
}
#[derive(Debug)]
-enum FunType {
+enum FunType<'db> {
Unit,
- Single(hir::Type),
- Tuple(Vec<hir::Type>),
+ Single(hir::Type<'db>),
+ Tuple(Vec<hir::Type<'db>>),
}
/// Where to put extracted function definition
@@ -358,19 +358,19 @@
// FIXME: ControlFlow and ContainerInfo both track some function modifiers, feels like these two should
// probably be merged somehow.
#[derive(Debug)]
-struct ControlFlow {
- kind: Option<FlowKind>,
+struct ControlFlow<'db> {
+ kind: Option<FlowKind<'db>>,
is_async: bool,
is_unsafe: bool,
}
/// The thing whose expression we are extracting from. Can be a function, const, static, const arg, ...
#[derive(Clone, Debug)]
-struct ContainerInfo {
+struct ContainerInfo<'db> {
is_const: bool,
parent_loop: Option<SyntaxNode>,
/// The function's return type, const's type etc.
- ret_type: Option<hir::Type>,
+ ret_type: Option<hir::Type<'db>>,
generic_param_lists: Vec<ast::GenericParamList>,
where_clauses: Vec<ast::WhereClause>,
edition: Edition,
@@ -389,11 +389,11 @@
/// }
/// ```
#[derive(Debug, Clone)]
-enum FlowKind {
+enum FlowKind<'db> {
/// Return with value (`return $expr;`)
Return(Option<ast::Expr>),
Try {
- kind: TryKind,
+ kind: TryKind<'db>,
},
/// Break with label and value (`break 'label $expr;`)
Break(Option<ast::Lifetime>, Option<ast::Expr>),
@@ -402,18 +402,18 @@
}
#[derive(Debug, Clone)]
-enum TryKind {
+enum TryKind<'db> {
Option,
- Result { ty: hir::Type },
+ Result { ty: hir::Type<'db> },
}
#[derive(Debug)]
-enum RetType {
- Expr(hir::Type),
+enum RetType<'db> {
+ Expr(hir::Type<'db>),
Stmt,
}
-impl RetType {
+impl RetType<'_> {
fn is_unit(&self) -> bool {
match self {
RetType::Expr(ty) => ty.is_unit(),
@@ -456,8 +456,8 @@
}
}
-impl Function {
- fn return_type(&self, ctx: &AssistContext<'_>) -> FunType {
+impl<'db> Function<'db> {
+ fn return_type(&self, ctx: &AssistContext<'db>) -> FunType<'db> {
match &self.ret_ty {
RetType::Expr(ty) if ty.is_unit() => FunType::Unit,
RetType::Expr(ty) => FunType::Single(ty.clone()),
@@ -487,7 +487,7 @@
}
}
-impl Param {
+impl<'db> Param<'db> {
fn kind(&self) -> ParamKind {
match (self.move_local, self.requires_mut, self.is_copy) {
(false, true, _) => ParamKind::MutRef,
@@ -497,7 +497,7 @@
}
}
- fn to_arg(&self, ctx: &AssistContext<'_>, edition: Edition) -> ast::Expr {
+ fn to_arg(&self, ctx: &AssistContext<'db>, edition: Edition) -> ast::Expr {
let var = path_expr_from_local(ctx, self.var, edition);
match self.kind() {
ParamKind::Value | ParamKind::MutValue => var,
@@ -532,8 +532,12 @@
}
}
-impl TryKind {
- fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>, edition: Edition) -> Option<TryKind> {
+impl<'db> TryKind<'db> {
+ fn of_ty(
+ ty: hir::Type<'db>,
+ ctx: &AssistContext<'db>,
+ edition: Edition,
+ ) -> Option<TryKind<'db>> {
if ty.is_unknown() {
// We favour Result for `expr?`
return Some(TryKind::Result { ty });
@@ -551,7 +555,7 @@
}
}
-impl FlowKind {
+impl<'db> FlowKind<'db> {
fn make_result_handler(&self, expr: Option<ast::Expr>) -> ast::Expr {
match self {
FlowKind::Return(_) => make::expr_return(expr),
@@ -567,7 +571,7 @@
}
}
- fn expr_ty(&self, ctx: &AssistContext<'_>) -> Option<hir::Type> {
+ fn expr_ty(&self, ctx: &AssistContext<'db>) -> Option<hir::Type<'db>> {
match self {
FlowKind::Return(Some(expr)) | FlowKind::Break(_, Some(expr)) => {
ctx.sema.type_of_expr(expr).map(TypeInfo::adjusted)
@@ -876,11 +880,11 @@
(res, self_param)
}
- fn analyze_container(
+ fn analyze_container<'db>(
&self,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
edition: Edition,
- ) -> Option<(ContainerInfo, bool)> {
+ ) -> Option<(ContainerInfo<'db>, bool)> {
let mut ancestors = self.parent()?.ancestors();
let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
let mut parent_loop = None;
@@ -985,7 +989,7 @@
))
}
- fn return_ty(&self, ctx: &AssistContext<'_>) -> Option<RetType> {
+ fn return_ty<'db>(&self, ctx: &AssistContext<'db>) -> Option<RetType<'db>> {
match self.tail_expr() {
Some(expr) => ctx.sema.type_of_expr(&expr).map(TypeInfo::original).map(RetType::Expr),
None => Some(RetType::Stmt),
@@ -1006,11 +1010,11 @@
}
/// Analyses the function body for external control flow.
- fn external_control_flow(
+ fn external_control_flow<'db>(
&self,
- ctx: &AssistContext<'_>,
- container_info: &ContainerInfo,
- ) -> Option<ControlFlow> {
+ ctx: &AssistContext<'db>,
+ container_info: &ContainerInfo<'db>,
+ ) -> Option<ControlFlow<'db>> {
let mut ret_expr = None;
let mut try_expr = None;
let mut break_expr = None;
@@ -1096,12 +1100,12 @@
/// find variables that should be extracted as params
///
/// Computes additional info that affects param type and mutability
- fn extracted_function_params(
+ fn extracted_function_params<'db>(
&self,
- ctx: &AssistContext<'_>,
- container_info: &ContainerInfo,
+ ctx: &AssistContext<'db>,
+ container_info: &ContainerInfo<'db>,
locals: FxIndexSet<Local>,
- ) -> Vec<Param> {
+ ) -> Vec<Param<'db>> {
locals
.into_iter()
.sorted()
@@ -1449,7 +1453,7 @@
}
}
-fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> SyntaxNode {
+fn make_call(ctx: &AssistContext<'_>, fun: &Function<'_>, indent: IndentLevel) -> SyntaxNode {
let ret_ty = fun.return_type(ctx);
let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx, fun.mods.edition)));
@@ -1508,17 +1512,17 @@
}
}
-enum FlowHandler {
+enum FlowHandler<'db> {
None,
- Try { kind: TryKind },
- If { action: FlowKind },
- IfOption { action: FlowKind },
- MatchOption { none: FlowKind },
- MatchResult { err: FlowKind },
+ Try { kind: TryKind<'db> },
+ If { action: FlowKind<'db> },
+ IfOption { action: FlowKind<'db> },
+ MatchOption { none: FlowKind<'db> },
+ MatchResult { err: FlowKind<'db> },
}
-impl FlowHandler {
- fn from_ret_ty(fun: &Function, ret_ty: &FunType) -> FlowHandler {
+impl<'db> FlowHandler<'db> {
+ fn from_ret_ty(fun: &Function<'db>, ret_ty: &FunType<'db>) -> FlowHandler<'db> {
if fun.contains_tail_expr {
return FlowHandler::None;
}
@@ -1628,7 +1632,7 @@
fn format_function(
ctx: &AssistContext<'_>,
module: hir::Module,
- fun: &Function,
+ fun: &Function<'_>,
old_indent: IndentLevel,
) -> ast::Fn {
let fun_name = make::name(&fun.name.text());
@@ -1654,7 +1658,7 @@
fn make_generic_params_and_where_clause(
ctx: &AssistContext<'_>,
- fun: &Function,
+ fun: &Function<'_>,
) -> (Option<ast::GenericParamList>, Option<ast::WhereClause>) {
let used_type_params = fun.type_params(ctx);
@@ -1666,7 +1670,7 @@
fn make_generic_param_list(
ctx: &AssistContext<'_>,
- fun: &Function,
+ fun: &Function<'_>,
used_type_params: &[TypeParam],
) -> Option<ast::GenericParamList> {
let mut generic_params = fun
@@ -1703,7 +1707,7 @@
fn make_where_clause(
ctx: &AssistContext<'_>,
- fun: &Function,
+ fun: &Function<'_>,
used_type_params: &[TypeParam],
) -> Option<ast::WhereClause> {
let mut predicates = fun
@@ -1743,9 +1747,9 @@
}
}
-impl Function {
+impl<'db> Function<'db> {
/// Collect all the `TypeParam`s used in the `body` and `params`.
- fn type_params(&self, ctx: &AssistContext<'_>) -> Vec<TypeParam> {
+ fn type_params(&self, ctx: &AssistContext<'db>) -> Vec<TypeParam> {
let type_params_in_descendant_paths =
self.body.descendant_paths().filter_map(|it| match ctx.sema.resolve_path(&it) {
Some(PathResolution::TypeParam(type_param)) => Some(type_param),
@@ -1808,8 +1812,8 @@
}
}
-impl FunType {
- fn make_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+impl<'db> FunType<'db> {
+ fn make_ty(&self, ctx: &AssistContext<'db>, module: hir::Module) -> ast::Type {
match self {
FunType::Unit => make::ty_unit(),
FunType::Single(ty) => make_ty(ty, ctx, module),
@@ -1831,7 +1835,11 @@
}
}
-fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -> ast::BlockExpr {
+fn make_body(
+ ctx: &AssistContext<'_>,
+ old_indent: IndentLevel,
+ fun: &Function<'_>,
+) -> ast::BlockExpr {
let ret_ty = fun.return_type(ctx);
let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
@@ -2009,19 +2017,19 @@
make::hacky_block_expr(elements, Some(tail_expr))
}
-fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String {
+fn format_type(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) -> String {
ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_owned())
}
-fn make_ty(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+fn make_ty(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
let ty_str = format_type(ty, ctx, module);
make::ty(&ty_str)
}
fn rewrite_body_segment(
ctx: &AssistContext<'_>,
- params: &[Param],
- handler: &FlowHandler,
+ params: &[Param<'_>],
+ handler: &FlowHandler<'_>,
syntax: &SyntaxNode,
) -> SyntaxNode {
let syntax = fix_param_usages(ctx, params, syntax);
@@ -2030,8 +2038,12 @@
}
/// change all usages to account for added `&`/`&mut` for some params
-fn fix_param_usages(ctx: &AssistContext<'_>, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode {
- let mut usages_for_param: Vec<(&Param, Vec<ast::Expr>)> = Vec::new();
+fn fix_param_usages(
+ ctx: &AssistContext<'_>,
+ params: &[Param<'_>],
+ syntax: &SyntaxNode,
+) -> SyntaxNode {
+ let mut usages_for_param: Vec<(&Param<'_>, Vec<ast::Expr>)> = Vec::new();
let tm = TreeMutator::new(syntax);
@@ -2085,7 +2097,7 @@
res
}
-fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) {
+fn update_external_control_flow(handler: &FlowHandler<'_>, syntax: &SyntaxNode) {
let mut nested_loop = None;
let mut nested_scope = None;
for event in syntax.preorder() {
@@ -2146,7 +2158,10 @@
}
}
-fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Option<ast::Expr> {
+fn make_rewritten_flow(
+ handler: &FlowHandler<'_>,
+ arg_expr: Option<ast::Expr>,
+) -> Option<ast::Expr> {
let value = match handler {
FlowHandler::None | FlowHandler::Try { .. } => return None,
FlowHandler::If { .. } => make::expr_call(
diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs
index 30084d2..78ae815 100644
--- a/crates/ide-assists/src/handlers/generate_function.rs
+++ b/crates/ide-assists/src/handlers/generate_function.rs
@@ -307,7 +307,7 @@
ctx: &AssistContext<'_>,
call: &ast::MethodCallExpr,
name: &ast::NameRef,
- receiver_ty: Type,
+ receiver_ty: Type<'_>,
target_module: Module,
target: GeneratedFunctionTarget,
) -> Option<Self> {
diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs
index 6f028e5..b7b8bc6 100644
--- a/crates/ide-assists/src/handlers/inline_call.rs
+++ b/crates/ide-assists/src/handlers/inline_call.rs
@@ -283,11 +283,11 @@
}
}
-fn get_fn_params(
- db: &dyn HirDatabase,
+fn get_fn_params<'db>(
+ db: &'db dyn HirDatabase,
function: hir::Function,
param_list: &ast::ParamList,
-) -> Option<Vec<(ast::Pat, Option<ast::Type>, hir::Param)>> {
+) -> Option<Vec<(ast::Pat, Option<ast::Type>, hir::Param<'db>)>> {
let mut assoc_fn_params = function.assoc_fn_params(db).into_iter();
let mut params = Vec::new();
@@ -316,7 +316,7 @@
function_def_file_id: EditionedFileId,
function: hir::Function,
fn_body: &ast::BlockExpr,
- params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
+ params: &[(ast::Pat, Option<ast::Type>, hir::Param<'_>)],
CallInfo { node, arguments, generic_arg_list, krate }: &CallInfo,
) -> ast::Expr {
let file_id = sema.hir_file_for(fn_body.syntax());
diff --git a/crates/ide-assists/src/handlers/inline_const_as_literal.rs b/crates/ide-assists/src/handlers/inline_const_as_literal.rs
index e5ed04f..b11d379 100644
--- a/crates/ide-assists/src/handlers/inline_const_as_literal.rs
+++ b/crates/ide-assists/src/handlers/inline_const_as_literal.rs
@@ -58,7 +58,7 @@
fn validate_type_recursively(
ctx: &AssistContext<'_>,
- ty_hir: Option<&hir::Type>,
+ ty_hir: Option<&hir::Type<'_>>,
refed: bool,
fuel: i32,
) -> Option<()> {
diff --git a/crates/ide-assists/src/handlers/merge_match_arms.rs b/crates/ide-assists/src/handlers/merge_match_arms.rs
index 42f3521..08170f8 100644
--- a/crates/ide-assists/src/handlers/merge_match_arms.rs
+++ b/crates/ide-assists/src/handlers/merge_match_arms.rs
@@ -105,7 +105,7 @@
}
fn are_same_types(
- current_arm_types: &FxHashMap<String, Option<Type>>,
+ current_arm_types: &FxHashMap<String, Option<Type<'_>>>,
arm: &ast::MatchArm,
ctx: &AssistContext<'_>,
) -> bool {
@@ -121,15 +121,15 @@
true
}
-fn get_arm_types(
- context: &AssistContext<'_>,
+fn get_arm_types<'db>(
+ context: &AssistContext<'db>,
arm: &ast::MatchArm,
-) -> FxHashMap<String, Option<Type>> {
- let mut mapping: FxHashMap<String, Option<Type>> = FxHashMap::default();
+) -> FxHashMap<String, Option<Type<'db>>> {
+ let mut mapping: FxHashMap<String, Option<Type<'db>>> = FxHashMap::default();
- fn recurse(
- map: &mut FxHashMap<String, Option<Type>>,
- ctx: &AssistContext<'_>,
+ fn recurse<'db>(
+ map: &mut FxHashMap<String, Option<Type<'db>>>,
+ ctx: &AssistContext<'db>,
pat: &Option<ast::Pat>,
) {
if let Some(local_pat) = pat {
diff --git a/crates/ide-assists/src/handlers/qualify_path.rs b/crates/ide-assists/src/handlers/qualify_path.rs
index 07d2f52..8834ad9 100644
--- a/crates/ide-assists/src/handlers/qualify_path.rs
+++ b/crates/ide-assists/src/handlers/qualify_path.rs
@@ -217,7 +217,7 @@
}
}
-fn group_label(candidate: &ImportCandidate) -> GroupLabel {
+fn group_label(candidate: &ImportCandidate<'_>) -> GroupLabel {
let name = match candidate {
ImportCandidate::Path(it) => &it.name,
ImportCandidate::TraitAssocItem(it) | ImportCandidate::TraitMethod(it) => {
@@ -230,7 +230,7 @@
fn label(
db: &RootDatabase,
- candidate: &ImportCandidate,
+ candidate: &ImportCandidate<'_>,
import: &LocatedImport,
edition: Edition,
) -> String {
diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs
index 16debc4..c38bdfd 100644
--- a/crates/ide-assists/src/handlers/remove_unused_imports.rs
+++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -117,7 +117,7 @@
if unused.peek().is_some() {
acc.add(
AssistId::quick_fix("remove_unused_imports"),
- "Remove all the unused imports",
+ "Remove all unused imports",
selected_el.text_range(),
|builder| {
let unused: Vec<ast::UseTree> = unused.map(|x| builder.make_mut(x)).collect();
diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs
index 6af8e14..019ddaf 100644
--- a/crates/ide-assists/src/handlers/term_search.rs
+++ b/crates/ide-assists/src/handlers/term_search.rs
@@ -46,7 +46,7 @@
return None;
}
- let mut formatter = |_: &hir::Type| String::from("todo!()");
+ let mut formatter = |_: &hir::Type<'_>| String::from("todo!()");
let edition = scope.krate().edition(ctx.db());
let paths = paths
@@ -100,7 +100,9 @@
fn test_complete_todo_with_msg() {
check_assist(
term_search,
- r#"//- minicore: todo, unimplemented
+ // FIXME: Since we are lacking of `super let`, term search fails due to borrowck failure.
+ // Should implement super let and remove `fmt_before_1_89_0`
+ r#"//- minicore: todo, unimplemented, fmt_before_1_89_0
fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
@@ -110,7 +112,9 @@
fn test_complete_unimplemented_with_msg() {
check_assist(
term_search,
- r#"//- minicore: todo, unimplemented
+ // FIXME: Since we are lacking of `super let`, term search fails due to borrowck failure.
+ // Should implement super let and remove `fmt_before_1_89_0`
+ r#"//- minicore: todo, unimplemented, fmt_before_1_89_0
fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
@@ -120,7 +124,9 @@
fn test_complete_unimplemented() {
check_assist(
term_search,
- r#"//- minicore: todo, unimplemented
+ // FIXME: Since we are lacking of `super let`, term search fails due to borrowck failure.
+ // Should implement super let and remove `fmt_before_1_89_0`
+ r#"//- minicore: todo, unimplemented, fmt_before_1_89_0
fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
)
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index 5e68897..cda2ad4 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -37,6 +37,7 @@
term_search_borrowck: true,
code_action_grouping: true,
expr_fill_default: ExprFillDefaultMode::Todo,
+ prefer_self_ty: false,
};
pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
@@ -57,6 +58,7 @@
term_search_borrowck: true,
code_action_grouping: false,
expr_fill_default: ExprFillDefaultMode::Todo,
+ prefer_self_ty: false,
};
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
@@ -77,6 +79,7 @@
term_search_borrowck: true,
code_action_grouping: true,
expr_fill_default: ExprFillDefaultMode::Todo,
+ prefer_self_ty: false,
};
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
@@ -97,6 +100,7 @@
term_search_borrowck: true,
code_action_grouping: true,
expr_fill_default: ExprFillDefaultMode::Todo,
+ prefer_self_ty: false,
};
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
@@ -114,6 +118,23 @@
}
#[track_caller]
+pub(crate) fn check_assist_with_config(
+ assist: Handler,
+ config: AssistConfig,
+ #[rust_analyzer::rust_fixture] ra_fixture_before: &str,
+ #[rust_analyzer::rust_fixture] ra_fixture_after: &str,
+) {
+ let ra_fixture_after = trim_indent(ra_fixture_after);
+ check_with_config(
+ config,
+ assist,
+ ra_fixture_before,
+ ExpectedResult::After(&ra_fixture_after),
+ None,
+ );
+}
+
+#[track_caller]
pub(crate) fn check_assist_no_snippet_cap(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index ef6914f..1a91053 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -405,7 +405,7 @@
}
fn check_pat_variant_from_enum(ctx: &AssistContext<'_>, pat: &ast::Pat) -> bool {
- ctx.sema.type_of_pat(pat).is_none_or(|ty: hir::TypeInfo| {
+ ctx.sema.type_of_pat(pat).is_none_or(|ty: hir::TypeInfo<'_>| {
ty.adjusted().as_adt().is_some_and(|adt| matches!(adt, hir::Adt::Enum(_)))
})
}
@@ -780,9 +780,9 @@
}
#[derive(Debug)]
-pub(crate) struct ReferenceConversion {
+pub(crate) struct ReferenceConversion<'db> {
conversion: ReferenceConversionType,
- ty: hir::Type,
+ ty: hir::Type<'db>,
impls_deref: bool,
}
@@ -802,10 +802,10 @@
Result,
}
-impl ReferenceConversion {
+impl<'db> ReferenceConversion<'db> {
pub(crate) fn convert_type(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
display_target: DisplayTarget,
) -> ast::Type {
let ty = match self.conversion {
@@ -878,11 +878,11 @@
// FIXME: It should return a new hir::Type, but currently constructing new types is too cumbersome
// and all users of this function operate on string type names, so they can do the conversion
// itself themselves.
-pub(crate) fn convert_reference_type(
- ty: hir::Type,
- db: &RootDatabase,
- famous_defs: &FamousDefs<'_, '_>,
-) -> Option<ReferenceConversion> {
+pub(crate) fn convert_reference_type<'db>(
+ ty: hir::Type<'db>,
+ db: &'db RootDatabase,
+ famous_defs: &FamousDefs<'_, 'db>,
+) -> Option<ReferenceConversion<'db>> {
handle_copy(&ty, db)
.or_else(|| handle_as_ref_str(&ty, db, famous_defs))
.or_else(|| handle_as_ref_slice(&ty, db, famous_defs))
@@ -892,18 +892,21 @@
.map(|(conversion, impls_deref)| ReferenceConversion { ty, conversion, impls_deref })
}
-fn could_deref_to_target(ty: &hir::Type, target: &hir::Type, db: &dyn HirDatabase) -> bool {
+fn could_deref_to_target(ty: &hir::Type<'_>, target: &hir::Type<'_>, db: &dyn HirDatabase) -> bool {
let ty_ref = ty.add_reference(hir::Mutability::Shared);
let target_ref = target.add_reference(hir::Mutability::Shared);
ty_ref.could_coerce_to(db, &target_ref)
}
-fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<(ReferenceConversionType, bool)> {
+fn handle_copy(
+ ty: &hir::Type<'_>,
+ db: &dyn HirDatabase,
+) -> Option<(ReferenceConversionType, bool)> {
ty.is_copy(db).then_some((ReferenceConversionType::Copy, true))
}
fn handle_as_ref_str(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
@@ -914,7 +917,7 @@
}
fn handle_as_ref_slice(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
@@ -928,7 +931,7 @@
}
fn handle_dereferenced(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
@@ -941,7 +944,7 @@
}
fn handle_option_as_ref(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
@@ -953,7 +956,7 @@
}
fn handle_result_as_ref(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
famous_defs: &FamousDefs<'_, '_>,
) -> Option<(ReferenceConversionType, bool)> {
diff --git a/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/crates/ide-assists/src/utils/gen_trait_fn_body.rs
index 4ea56dc..c58bdd9 100644
--- a/crates/ide-assists/src/utils/gen_trait_fn_body.rs
+++ b/crates/ide-assists/src/utils/gen_trait_fn_body.rs
@@ -17,7 +17,7 @@
func: &ast::Fn,
trait_path: &ast::Path,
adt: &ast::Adt,
- trait_ref: Option<TraitRef>,
+ trait_ref: Option<TraitRef<'_>>,
) -> Option<()> {
match trait_path.segment()?.name_ref()?.text().as_str() {
"Clone" => gen_clone_impl(adt, func),
@@ -405,7 +405,7 @@
}
/// Generate a `PartialEq` impl based on the fields and members of the target type.
-fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -> Option<()> {
+fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
stdx::always!(func.name().is_some_and(|name| name.text() == "eq"));
fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
match expr {
@@ -599,7 +599,7 @@
Some(())
}
-fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -> Option<()> {
+fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp"));
fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
let mut arms = vec![];
diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs
index 5d68aca..65072d9 100644
--- a/crates/ide-completion/src/completions.rs
+++ b/crates/ide-completion/src/completions.rs
@@ -161,7 +161,11 @@
item.add_to(self, ctx.db);
}
- pub(crate) fn add_expr(&mut self, ctx: &CompletionContext<'_>, expr: &hir::term_search::Expr) {
+ pub(crate) fn add_expr(
+ &mut self,
+ ctx: &CompletionContext<'_>,
+ expr: &hir::term_search::Expr<'_>,
+ ) {
if let Some(item) = render_expr(ctx, expr) {
item.add_to(self, ctx.db)
}
@@ -170,7 +174,7 @@
pub(crate) fn add_crate_roots(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
) {
ctx.process_all_names(&mut |name, res, doc_aliases| match res {
ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) if m.is_crate_root() => {
@@ -183,7 +187,7 @@
pub(crate) fn add_path_resolution(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: hir::Name,
resolution: hir::ScopeDef,
doc_aliases: Vec<syntax::SmolStr>,
@@ -232,7 +236,7 @@
pub(crate) fn add_enum_variants(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
e: hir::Enum,
) {
if !ctx.check_stability_and_hidden(e) {
@@ -246,7 +250,7 @@
pub(crate) fn add_module(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
module: hir::Module,
local_name: hir::Name,
doc_aliases: Vec<syntax::SmolStr>,
@@ -263,7 +267,7 @@
pub(crate) fn add_macro(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
mac: hir::Macro,
local_name: hir::Name,
) {
@@ -286,7 +290,7 @@
pub(crate) fn add_function(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
func: hir::Function,
local_name: Option<hir::Name>,
) {
@@ -312,7 +316,7 @@
pub(crate) fn add_method(
&mut self,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
func: hir::Function,
receiver: Option<SmolStr>,
local_name: Option<hir::Name>,
@@ -340,7 +344,7 @@
pub(crate) fn add_method_with_import(
&mut self,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
func: hir::Function,
import: LocatedImport,
) {
@@ -407,7 +411,7 @@
pub(crate) fn add_qualified_enum_variant(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
variant: hir::Variant,
path: hir::ModPath,
) {
@@ -424,7 +428,7 @@
pub(crate) fn add_enum_variant(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
variant: hir::Variant,
local_name: Option<hir::Name>,
) {
@@ -447,10 +451,10 @@
pub(crate) fn add_field(
&mut self,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
receiver: Option<SmolStr>,
field: hir::Field,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
) {
let is_private_editable = match ctx.is_visible(&field) {
Visible::Yes => false,
@@ -471,7 +475,7 @@
pub(crate) fn add_struct_literal(
&mut self,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
strukt: hir::Struct,
path: Option<hir::ModPath>,
local_name: Option<hir::Name>,
@@ -518,7 +522,7 @@
ctx: &CompletionContext<'_>,
receiver: Option<SmolStr>,
field: usize,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
) {
// Only used for (unnamed) tuples, whose all fields *are* stable. No need to check
// stability here.
@@ -550,7 +554,7 @@
&mut self,
ctx: &CompletionContext<'_>,
pattern_ctx: &PatternContext,
- path_ctx: Option<&PathCompletionCtx>,
+ path_ctx: Option<&PathCompletionCtx<'_>>,
variant: hir::Variant,
local_name: Option<hir::Name>,
) {
@@ -704,7 +708,7 @@
pub(super) fn complete_name_ref(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- NameRefContext { nameref, kind }: &NameRefContext,
+ NameRefContext { nameref, kind }: &NameRefContext<'_>,
) {
match kind {
NameRefKind::Path(path_ctx) => {
diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs
index 705402c..c542e14 100644
--- a/crates/ide-completion/src/completions/attribute.rs
+++ b/crates/ide-completion/src/completions/attribute.rs
@@ -86,7 +86,7 @@
pub(crate) fn complete_attribute_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
&AttrCtx { kind, annotated_item_kind, ref derive_helpers }: &AttrCtx,
) {
let is_inner = kind == AttrKind::Inner;
diff --git a/crates/ide-completion/src/completions/attribute/derive.rs b/crates/ide-completion/src/completions/attribute/derive.rs
index 2fc07e0..267d92b 100644
--- a/crates/ide-completion/src/completions/attribute/derive.rs
+++ b/crates/ide-completion/src/completions/attribute/derive.rs
@@ -13,7 +13,7 @@
pub(crate) fn complete_derive_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
existing_derives: &ExistingDerives,
) {
let core = ctx.famous_defs().core();
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index 4f21136..5340d65 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -18,7 +18,7 @@
pub(crate) fn complete_dot(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
) {
let receiver_ty = match dot_access {
DotAccess { receiver_ty: Some(receiver_ty), .. } => &receiver_ty.original,
@@ -130,8 +130,8 @@
pub(crate) fn complete_undotted_self(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
- expr_ctx: &PathExprCtx,
+ path_ctx: &PathCompletionCtx<'_>,
+ expr_ctx: &PathExprCtx<'_>,
) {
if !ctx.config.enable_self_on_the_fly {
return;
@@ -198,9 +198,9 @@
fn complete_fields(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- receiver: &hir::Type,
- mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
- mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
+ receiver: &hir::Type<'_>,
+ mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type<'_>),
+ mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type<'_>),
is_field_access: bool,
is_method_access_with_parens: bool,
) {
@@ -230,7 +230,7 @@
fn complete_methods(
ctx: &CompletionContext<'_>,
- receiver: &hir::Type,
+ receiver: &hir::Type<'_>,
traits_in_scope: &FxHashSet<hir::TraitId>,
f: impl FnMut(hir::Function),
) {
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index 7fbd1fb..2133291 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -47,8 +47,8 @@
pub(crate) fn complete_expr_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
- expr_ctx: &PathExprCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
+ expr_ctx: &PathExprCtx<'_>,
) {
let _p = tracing::info_span!("complete_expr_path").entered();
if !ctx.qualifier_ctx.none() {
@@ -145,10 +145,16 @@
});
match resolution {
hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
- // Set visible_from to None so private items are returned.
- // They will be possibly filtered out in add_path_resolution()
- // via def_is_visible().
- let module_scope = module.scope(ctx.db, None);
+ let visible_from = if ctx.config.enable_private_editable {
+ // Set visible_from to None so private items are returned.
+ // They will be possibly filtered out in add_path_resolution()
+ // via def_is_visible().
+ None
+ } else {
+ Some(ctx.module)
+ };
+
+ let module_scope = module.scope(ctx.db, visible_from);
for (name, def) in module_scope {
if scope_def_applicable(def) {
acc.add_path_resolution(
diff --git a/crates/ide-completion/src/completions/field.rs b/crates/ide-completion/src/completions/field.rs
index 1441b0e..26afa9c 100644
--- a/crates/ide-completion/src/completions/field.rs
+++ b/crates/ide-completion/src/completions/field.rs
@@ -8,7 +8,7 @@
pub(crate) fn complete_field_list_tuple_variant(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
) {
if ctx.qualifier_ctx.vis_node.is_some() {
} else if let PathCompletionCtx {
diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs
index a747561..dad8a76 100644
--- a/crates/ide-completion/src/completions/flyimport.rs
+++ b/crates/ide-completion/src/completions/flyimport.rs
@@ -111,7 +111,7 @@
pub(crate) fn import_on_the_fly_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
) -> Option<()> {
if !ctx.config.enable_imports_on_the_fly {
return None;
@@ -175,7 +175,7 @@
pub(crate) fn import_on_the_fly_dot(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
) -> Option<()> {
if !ctx.config.enable_imports_on_the_fly {
return None;
@@ -203,8 +203,8 @@
fn import_on_the_fly(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { kind, .. }: &PathCompletionCtx,
- import_assets: ImportAssets,
+ path_ctx @ PathCompletionCtx { kind, .. }: &PathCompletionCtx<'_>,
+ import_assets: ImportAssets<'_>,
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
@@ -290,7 +290,7 @@
acc: &mut Completions,
ctx: &CompletionContext<'_>,
pattern_ctx: &PatternContext,
- import_assets: ImportAssets,
+ import_assets: ImportAssets<'_>,
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
@@ -335,8 +335,8 @@
fn import_on_the_fly_method(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
- import_assets: ImportAssets,
+ dot_access: &DotAccess<'_>,
+ import_assets: ImportAssets<'_>,
position: SyntaxNode,
potential_import_name: String,
) -> Option<()> {
@@ -400,11 +400,11 @@
if token_kind.is_any_identifier() { ctx.token.to_string() } else { String::new() }
}
-fn import_assets_for_path(
- ctx: &CompletionContext<'_>,
+fn import_assets_for_path<'db>(
+ ctx: &CompletionContext<'db>,
potential_import_name: &str,
qualifier: Option<ast::Path>,
-) -> Option<ImportAssets> {
+) -> Option<ImportAssets<'db>> {
let _p =
tracing::info_span!("import_assets_for_path", ?potential_import_name, ?qualifier).entered();
diff --git a/crates/ide-completion/src/completions/item_list.rs b/crates/ide-completion/src/completions/item_list.rs
index 893997c..6c001bd 100644
--- a/crates/ide-completion/src/completions/item_list.rs
+++ b/crates/ide-completion/src/completions/item_list.rs
@@ -10,8 +10,8 @@
pub(crate) fn complete_item_list_in_expr(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
- expr_ctx: &PathExprCtx,
+ path_ctx: &PathCompletionCtx<'_>,
+ expr_ctx: &PathExprCtx<'_>,
) {
if !expr_ctx.in_block_expr {
return;
@@ -25,7 +25,7 @@
pub(crate) fn complete_item_list(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
kind: &ItemListKind,
) {
let _p = tracing::info_span!("complete_item_list").entered();
diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 58aead7..092219a 100644
--- a/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -122,7 +122,7 @@
pub(crate) fn complete_trait_impl_item_by_name(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
name_ref: &Option<ast::NameRef>,
impl_: &Option<ast::Impl>,
) {
diff --git a/crates/ide-completion/src/completions/pattern.rs b/crates/ide-completion/src/completions/pattern.rs
index ea3511d..62fae1c 100644
--- a/crates/ide-completion/src/completions/pattern.rs
+++ b/crates/ide-completion/src/completions/pattern.rs
@@ -124,7 +124,7 @@
pub(crate) fn complete_pattern_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
) {
match qualified {
Qualified::With { resolution: Some(resolution), super_chain_len, .. } => {
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index 3cdf211..d002385 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -11,6 +11,7 @@
text_edit::TextEdit,
ty_filter::TryEnum,
};
+use itertools::Either;
use stdx::never;
use syntax::{
SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},
@@ -28,7 +29,7 @@
pub(crate) fn complete_postfix(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
) {
if !ctx.config.enable_postfix_completions {
return;
@@ -86,98 +87,10 @@
}
}
- let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
- if let Some(try_enum) = &try_enum {
- match try_enum {
- TryEnum::Result => {
- postfix_snippet(
- "ifl",
- "if let Ok {}",
- &format!("if let Ok($1) = {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
-
- postfix_snippet(
- "lete",
- "let Ok else {}",
- &format!("let Ok($1) = {receiver_text} else {{\n $2\n}};\n$0"),
- )
- .add_to(acc, ctx.db);
-
- postfix_snippet(
- "while",
- "while let Ok {}",
- &format!("while let Ok($1) = {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
- }
- TryEnum::Option => {
- postfix_snippet(
- "ifl",
- "if let Some {}",
- &format!("if let Some($1) = {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
-
- postfix_snippet(
- "lete",
- "let Some else {}",
- &format!("let Some($1) = {receiver_text} else {{\n $2\n}};\n$0"),
- )
- .add_to(acc, ctx.db);
-
- postfix_snippet(
- "while",
- "while let Some {}",
- &format!("while let Some($1) = {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
- }
- }
- } else if receiver_ty.is_bool() || receiver_ty.is_unknown() {
- postfix_snippet("if", "if expr {}", &format!("if {receiver_text} {{\n $0\n}}"))
- .add_to(acc, ctx.db);
- postfix_snippet("while", "while expr {}", &format!("while {receiver_text} {{\n $0\n}}"))
- .add_to(acc, ctx.db);
- postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db);
- } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() {
- if receiver_ty.impls_trait(ctx.db, trait_, &[]) {
- postfix_snippet(
- "for",
- "for ele in expr {}",
- &format!("for ele in {receiver_text} {{\n $0\n}}"),
- )
- .add_to(acc, ctx.db);
- }
- }
-
postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc, ctx.db);
postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc, ctx.db);
postfix_snippet("deref", "*expr", &format!("*{receiver_text}")).add_to(acc, ctx.db);
- let mut block_should_be_wrapped = true;
- if dot_receiver.syntax().kind() == BLOCK_EXPR {
- block_should_be_wrapped = false;
- if let Some(parent) = dot_receiver.syntax().parent() {
- if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) {
- block_should_be_wrapped = true;
- }
- }
- };
- let unsafe_completion_string = if block_should_be_wrapped {
- format!("unsafe {{ {receiver_text} }}")
- } else {
- format!("unsafe {receiver_text}")
- };
- postfix_snippet("unsafe", "unsafe {}", &unsafe_completion_string).add_to(acc, ctx.db);
-
- let const_completion_string = if block_should_be_wrapped {
- format!("const {{ {receiver_text} }}")
- } else {
- format!("const {receiver_text}")
- };
- postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db);
-
// The rest of the postfix completions create an expression that moves an argument,
// so it's better to consider references now to avoid breaking the compilation
@@ -195,37 +108,6 @@
add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text);
}
- match try_enum {
- Some(try_enum) => match try_enum {
- TryEnum::Result => {
- postfix_snippet(
- "match",
- "match expr {}",
- &format!("match {receiver_text} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}"),
- )
- .add_to(acc, ctx.db);
- }
- TryEnum::Option => {
- postfix_snippet(
- "match",
- "match expr {}",
- &format!(
- "match {receiver_text} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}"
- ),
- )
- .add_to(acc, ctx.db);
- }
- },
- None => {
- postfix_snippet(
- "match",
- "match expr {}",
- &format!("match {receiver_text} {{\n ${{1:_}} => {{$0}},\n}}"),
- )
- .add_to(acc, ctx.db);
- }
- }
-
postfix_snippet("box", "Box::new(expr)", &format!("Box::new({receiver_text})"))
.add_to(acc, ctx.db);
postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({receiver_text})")).add_to(acc, ctx.db); // fixme
@@ -233,15 +115,183 @@
postfix_snippet("call", "function(expr)", &format!("${{1}}({receiver_text})"))
.add_to(acc, ctx.db);
- if let Some(parent) = dot_receiver_including_refs.syntax().parent().and_then(|p| p.parent()) {
- if matches!(parent.kind(), STMT_LIST | EXPR_STMT) {
- postfix_snippet("let", "let", &format!("let $0 = {receiver_text};"))
- .add_to(acc, ctx.db);
- postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};"))
- .add_to(acc, ctx.db);
+ let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
+ let mut is_in_cond = false;
+ if let Some(parent) = dot_receiver_including_refs.syntax().parent() {
+ if let Some(second_ancestor) = parent.parent() {
+ let sec_ancestor_kind = second_ancestor.kind();
+ if let Some(expr) = <Either<ast::IfExpr, ast::WhileExpr>>::cast(second_ancestor) {
+ is_in_cond = match expr {
+ Either::Left(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent),
+ Either::Right(it) => {
+ it.condition().is_some_and(|cond| *cond.syntax() == parent)
+ }
+ }
+ }
+ match &try_enum {
+ Some(try_enum) if is_in_cond => match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
+ "let",
+ "let Ok(_)",
+ &format!("let Ok($0) = {receiver_text}"),
+ )
+ .add_to(acc, ctx.db);
+ postfix_snippet(
+ "letm",
+ "let Ok(mut _)",
+ &format!("let Ok(mut $0) = {receiver_text}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ TryEnum::Option => {
+ postfix_snippet(
+ "let",
+ "let Some(_)",
+ &format!("let Some($0) = {receiver_text}"),
+ )
+ .add_to(acc, ctx.db);
+ postfix_snippet(
+ "letm",
+ "let Some(mut _)",
+ &format!("let Some(mut $0) = {receiver_text}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ },
+ _ if matches!(sec_ancestor_kind, STMT_LIST | EXPR_STMT) => {
+ postfix_snippet("let", "let", &format!("let $0 = {receiver_text};"))
+ .add_to(acc, ctx.db);
+ postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};"))
+ .add_to(acc, ctx.db);
+ }
+ _ => (),
+ }
}
}
+ if !is_in_cond {
+ match try_enum {
+ Some(try_enum) => match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!("match {receiver_text} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ TryEnum::Option => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!(
+ "match {receiver_text} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}"
+ ),
+ )
+ .add_to(acc, ctx.db);
+ }
+ },
+ None => {
+ postfix_snippet(
+ "match",
+ "match expr {}",
+ &format!("match {receiver_text} {{\n ${{1:_}} => {{$0}},\n}}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ }
+ if let Some(try_enum) = &try_enum {
+ match try_enum {
+ TryEnum::Result => {
+ postfix_snippet(
+ "ifl",
+ "if let Ok {}",
+ &format!("if let Ok($1) = {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
+ "lete",
+ "let Ok else {}",
+ &format!("let Ok($1) = {receiver_text} else {{\n $2\n}};\n$0"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
+ "while",
+ "while let Ok {}",
+ &format!("while let Ok($1) = {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ TryEnum::Option => {
+ postfix_snippet(
+ "ifl",
+ "if let Some {}",
+ &format!("if let Some($1) = {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
+ "lete",
+ "let Some else {}",
+ &format!("let Some($1) = {receiver_text} else {{\n $2\n}};\n$0"),
+ )
+ .add_to(acc, ctx.db);
+
+ postfix_snippet(
+ "while",
+ "while let Some {}",
+ &format!("while let Some($1) = {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ }
+ } else if receiver_ty.is_bool() || receiver_ty.is_unknown() {
+ postfix_snippet("if", "if expr {}", &format!("if {receiver_text} {{\n $0\n}}"))
+ .add_to(acc, ctx.db);
+ postfix_snippet(
+ "while",
+ "while expr {}",
+ &format!("while {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+ postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db);
+ } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() {
+ if receiver_ty.impls_trait(ctx.db, trait_, &[]) {
+ postfix_snippet(
+ "for",
+ "for ele in expr {}",
+ &format!("for ele in {receiver_text} {{\n $0\n}}"),
+ )
+ .add_to(acc, ctx.db);
+ }
+ }
+ }
+
+ let mut block_should_be_wrapped = true;
+ if dot_receiver.syntax().kind() == BLOCK_EXPR {
+ block_should_be_wrapped = false;
+ if let Some(parent) = dot_receiver.syntax().parent() {
+ if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) {
+ block_should_be_wrapped = true;
+ }
+ }
+ };
+ {
+ let (open_brace, close_brace) =
+ if block_should_be_wrapped { ("{ ", " }") } else { ("", "") };
+ let (open_paren, close_paren) = if is_in_cond { ("(", ")") } else { ("", "") };
+ let unsafe_completion_string =
+ format!("{open_paren}unsafe {open_brace}{receiver_text}{close_brace}{close_paren}");
+ postfix_snippet("unsafe", "unsafe {}", &unsafe_completion_string).add_to(acc, ctx.db);
+
+ let const_completion_string =
+ format!("{open_paren}const {open_brace}{receiver_text}{close_brace}{close_paren}");
+ postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db);
+ }
+
if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() {
if let Some(literal_text) = ast::String::cast(literal.token()) {
add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text);
@@ -568,6 +618,54 @@
}
#[test]
+ fn option_iflet_cond() {
+ check(
+ r#"
+//- minicore: option
+fn main() {
+ let bar = Some(true);
+ if bar.$0
+}
+"#,
+ expect![[r#"
+ me and(…) fn(self, Option<U>) -> Option<U>
+ me as_ref() const fn(&self) -> Option<&T>
+ me ok_or(…) const fn(self, E) -> Result<T, E>
+ me unwrap() const fn(self) -> T
+ me unwrap_or(…) fn(self, T) -> T
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn const const {}
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let Some(_)
+ sn letm let Some(mut _)
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_edit(
+ "let",
+ r#"
+//- minicore: option
+fn main() {
+ let bar = Some(true);
+ if bar.$0
+}
+"#,
+ r#"
+fn main() {
+ let bar = Some(true);
+ if let Some($0) = bar
+}
+"#,
+ );
+ }
+
+ #[test]
fn option_letelse() {
check_edit(
"lete",
diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs
index c18aab0..36f38a7 100644
--- a/crates/ide-completion/src/completions/record.rs
+++ b/crates/ide-completion/src/completions/record.rs
@@ -88,7 +88,7 @@
pub(crate) fn add_default_update(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- ty: Option<hir::TypeInfo>,
+ ty: Option<hir::TypeInfo<'_>>,
) {
let default_trait = ctx.famous_defs().core_default_Default();
let impls_default_trait = default_trait
@@ -117,7 +117,7 @@
fn complete_fields(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- missing_fields: Vec<(hir::Field, hir::Type)>,
+ missing_fields: Vec<(hir::Field, hir::Type<'_>)>,
) {
for (field, ty) in missing_fields {
// This should call something else, we shouldn't be synthesizing a DotAccess here
diff --git a/crates/ide-completion/src/completions/snippet.rs b/crates/ide-completion/src/completions/snippet.rs
index 31aae11..ead9852 100644
--- a/crates/ide-completion/src/completions/snippet.rs
+++ b/crates/ide-completion/src/completions/snippet.rs
@@ -11,8 +11,8 @@
pub(crate) fn complete_expr_snippet(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
- &PathExprCtx { in_block_expr, .. }: &PathExprCtx,
+ path_ctx: &PathCompletionCtx<'_>,
+ &PathExprCtx { in_block_expr, .. }: &PathExprCtx<'_>,
) {
if !matches!(path_ctx.qualified, Qualified::No) {
return;
@@ -51,7 +51,7 @@
pub(crate) fn complete_item_snippet(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
kind: &ItemListKind,
) {
if !matches!(path_ctx.qualified, Qualified::No) {
diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs
index 79db705..7c38c7d 100644
--- a/crates/ide-completion/src/completions/type.rs
+++ b/crates/ide-completion/src/completions/type.rs
@@ -12,7 +12,7 @@
pub(crate) fn complete_type_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
location: &TypeLocation,
) {
let _p = tracing::info_span!("complete_type_path").entered();
@@ -220,7 +220,7 @@
pub(crate) fn complete_ascribed_type(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
ascription: &TypeAscriptionTarget,
) -> Option<()> {
if !path_ctx.is_trivial_path() {
diff --git a/crates/ide-completion/src/completions/use_.rs b/crates/ide-completion/src/completions/use_.rs
index 4d6d0b7..d2ab193 100644
--- a/crates/ide-completion/src/completions/use_.rs
+++ b/crates/ide-completion/src/completions/use_.rs
@@ -13,7 +13,7 @@
pub(crate) fn complete_use_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, use_tree_parent, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, use_tree_parent, .. }: &PathCompletionCtx<'_>,
name_ref: &Option<ast::NameRef>,
) {
match qualified {
diff --git a/crates/ide-completion/src/completions/vis.rs b/crates/ide-completion/src/completions/vis.rs
index d15c35a..38761f7 100644
--- a/crates/ide-completion/src/completions/vis.rs
+++ b/crates/ide-completion/src/completions/vis.rs
@@ -8,7 +8,7 @@
pub(crate) fn complete_vis_path(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
- path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
+ path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
&has_in_token: &bool,
) {
match qualified {
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 5287627..cfd7f80 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -65,13 +65,13 @@
/// The state of the path we are currently completing.
#[derive(Debug)]
-pub(crate) struct PathCompletionCtx {
+pub(crate) struct PathCompletionCtx<'db> {
/// If this is a call with () already there (or {} in case of record patterns)
pub(crate) has_call_parens: bool,
/// If this has a macro call bang !
pub(crate) has_macro_bang: bool,
/// The qualifier of the current path.
- pub(crate) qualified: Qualified,
+ pub(crate) qualified: Qualified<'db>,
/// The parent of the path we are completing.
pub(crate) parent: Option<ast::Path>,
#[allow(dead_code)]
@@ -79,14 +79,14 @@
pub(crate) path: ast::Path,
/// The path of which we are completing the segment in the original file
pub(crate) original_path: Option<ast::Path>,
- pub(crate) kind: PathKind,
+ pub(crate) kind: PathKind<'db>,
/// Whether the path segment has type args or not.
pub(crate) has_type_args: bool,
/// Whether the qualifier comes from a use tree parent or not
pub(crate) use_tree_parent: bool,
}
-impl PathCompletionCtx {
+impl PathCompletionCtx<'_> {
pub(crate) fn is_trivial_path(&self) -> bool {
matches!(
self,
@@ -104,9 +104,9 @@
/// The kind of path we are completing right now.
#[derive(Debug, PartialEq, Eq)]
-pub(crate) enum PathKind {
+pub(crate) enum PathKind<'db> {
Expr {
- expr_ctx: PathExprCtx,
+ expr_ctx: PathExprCtx<'db>,
},
Type {
location: TypeLocation,
@@ -140,7 +140,7 @@
}
#[derive(Debug, PartialEq, Eq)]
-pub(crate) struct PathExprCtx {
+pub(crate) struct PathExprCtx<'db> {
pub(crate) in_block_expr: bool,
pub(crate) in_breakable: BreakableKind,
pub(crate) after_if_expr: bool,
@@ -152,7 +152,7 @@
/// The surrounding RecordExpression we are completing a functional update
pub(crate) is_func_update: Option<ast::RecordExpr>,
pub(crate) self_param: Option<hir::SelfParam>,
- pub(crate) innermost_ret_ty: Option<hir::Type>,
+ pub(crate) innermost_ret_ty: Option<hir::Type<'db>>,
pub(crate) impl_: Option<ast::Impl>,
/// Whether this expression occurs in match arm guard position: before the
/// fat arrow token
@@ -241,7 +241,7 @@
}
#[derive(Debug)]
-pub(crate) enum Qualified {
+pub(crate) enum Qualified<'db> {
No,
With {
path: ast::Path,
@@ -260,7 +260,7 @@
},
/// <_>::
TypeAnchor {
- ty: Option<hir::Type>,
+ ty: Option<hir::Type<'db>>,
trait_: Option<hir::Trait>,
},
/// Whether the path is an absolute path
@@ -341,17 +341,17 @@
/// The state of the NameRef we are completing.
#[derive(Debug)]
-pub(crate) struct NameRefContext {
+pub(crate) struct NameRefContext<'db> {
/// NameRef syntax in the original file
pub(crate) nameref: Option<ast::NameRef>,
- pub(crate) kind: NameRefKind,
+ pub(crate) kind: NameRefKind<'db>,
}
/// The kind of the NameRef we are completing.
#[derive(Debug)]
-pub(crate) enum NameRefKind {
- Path(PathCompletionCtx),
- DotAccess(DotAccess),
+pub(crate) enum NameRefKind<'db> {
+ Path(PathCompletionCtx<'db>),
+ DotAccess(DotAccess<'db>),
/// Position where we are only interested in keyword completions
Keyword(ast::Item),
/// The record expression this nameref is a field of and whether a dot precedes the completion identifier.
@@ -365,9 +365,9 @@
/// The identifier we are currently completing.
#[derive(Debug)]
-pub(crate) enum CompletionAnalysis {
+pub(crate) enum CompletionAnalysis<'db> {
Name(NameContext),
- NameRef(NameRefContext),
+ NameRef(NameRefContext<'db>),
Lifetime(LifetimeContext),
/// The string the cursor is currently inside
String {
@@ -386,9 +386,9 @@
/// Information about the field or method access we are completing.
#[derive(Debug)]
-pub(crate) struct DotAccess {
+pub(crate) struct DotAccess<'db> {
pub(crate) receiver: Option<ast::Expr>,
- pub(crate) receiver_ty: Option<TypeInfo>,
+ pub(crate) receiver_ty: Option<TypeInfo<'db>>,
pub(crate) kind: DotAccessKind,
pub(crate) ctx: DotAccessExprCtx,
}
@@ -457,7 +457,7 @@
/// This is usually the parameter name of the function argument we are completing.
pub(crate) expected_name: Option<NameOrNameRef>,
/// The expected type of what we are completing.
- pub(crate) expected_type: Option<Type>,
+ pub(crate) expected_type: Option<Type<'a>>,
pub(crate) qualifier_ctx: QualifierCtx,
@@ -608,7 +608,7 @@
pub(crate) fn iterate_path_candidates(
&self,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
mut cb: impl FnMut(hir::AssocItem),
) {
let mut seen = FxHashSet::default();
@@ -695,12 +695,12 @@
}
// CompletionContext construction
-impl<'a> CompletionContext<'a> {
+impl<'db> CompletionContext<'db> {
pub(crate) fn new(
- db: &'a RootDatabase,
+ db: &'db RootDatabase,
position @ FilePosition { file_id, offset }: FilePosition,
- config: &'a CompletionConfig<'a>,
- ) -> Option<(CompletionContext<'a>, CompletionAnalysis)> {
+ config: &'db CompletionConfig<'db>,
+ ) -> Option<(CompletionContext<'db>, CompletionAnalysis<'db>)> {
let _p = tracing::info_span!("CompletionContext::new").entered();
let sema = Semantics::new(db);
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 7a2230b..6e3a76f 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -38,9 +38,9 @@
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
}
-pub(super) struct AnalysisResult {
- pub(super) analysis: CompletionAnalysis,
- pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
+pub(super) struct AnalysisResult<'db> {
+ pub(super) analysis: CompletionAnalysis<'db>,
+ pub(super) expected: (Option<Type<'db>>, Option<ast::NameOrNameRef>),
pub(super) qualifier_ctx: QualifierCtx,
/// the original token of the expanded file
pub(super) token: SyntaxToken,
@@ -48,13 +48,13 @@
pub(super) original_offset: TextSize,
}
-pub(super) fn expand_and_analyze(
- sema: &Semantics<'_, RootDatabase>,
+pub(super) fn expand_and_analyze<'db>(
+ sema: &Semantics<'db, RootDatabase>,
original_file: InFile<SyntaxNode>,
speculative_file: SyntaxNode,
offset: TextSize,
original_token: &SyntaxToken,
-) -> Option<AnalysisResult> {
+) -> Option<AnalysisResult<'db>> {
// as we insert after the offset, right biased will *always* pick the identifier no matter
// if there is an ident already typed or not
let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
@@ -432,12 +432,13 @@
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
/// of the completion location.
-fn analyze(
- sema: &Semantics<'_, RootDatabase>,
+fn analyze<'db>(
+ sema: &Semantics<'db, RootDatabase>,
expansion_result: ExpansionResult,
original_token: &SyntaxToken,
self_token: &SyntaxToken,
-) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
+) -> Option<(CompletionAnalysis<'db>, (Option<Type<'db>>, Option<ast::NameOrNameRef>), QualifierCtx)>
+{
let _p = tracing::info_span!("CompletionContext::analyze").entered();
let ExpansionResult {
original_file,
@@ -555,17 +556,17 @@
}
/// Calculate the expected type and name of the cursor position.
-fn expected_type_and_name(
- sema: &Semantics<'_, RootDatabase>,
+fn expected_type_and_name<'db>(
+ sema: &Semantics<'db, RootDatabase>,
token: &SyntaxToken,
name_like: &ast::NameLike,
-) -> (Option<Type>, Option<NameOrNameRef>) {
+) -> (Option<Type<'db>>, Option<NameOrNameRef>) {
let mut node = match token.parent() {
Some(it) => it,
None => return (None, None),
};
- let strip_refs = |mut ty: Type| match name_like {
+ let strip_refs = |mut ty: Type<'db>| match name_like {
ast::NameLike::NameRef(n) => {
let p = match n.syntax().parent() {
Some(it) => it,
@@ -805,13 +806,13 @@
Some(NameContext { name, kind })
}
-fn classify_name_ref(
- sema: &Semantics<'_, RootDatabase>,
+fn classify_name_ref<'db>(
+ sema: &Semantics<'db, RootDatabase>,
original_file: &SyntaxNode,
name_ref: ast::NameRef,
original_offset: TextSize,
parent: SyntaxNode,
-) -> Option<(NameRefContext, QualifierCtx)> {
+) -> Option<(NameRefContext<'db>, QualifierCtx)> {
let nameref = find_node_at_offset(original_file, original_offset);
let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index 19cdef3..dcaac39 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -502,7 +502,7 @@
impl Builder {
pub(crate) fn from_resolution(
ctx: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: hir::Name,
resolution: hir::ScopeDef,
) -> Self {
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 00c0b47..c6b8af3 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -122,10 +122,10 @@
pub(crate) fn render_field(
ctx: RenderContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
receiver: Option<SmolStr>,
field: hir::Field,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
) -> CompletionItem {
let db = ctx.db();
let is_deprecated = ctx.is_deprecated(field);
@@ -204,7 +204,7 @@
ctx: RenderContext<'_>,
receiver: Option<SmolStr>,
field: usize,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
) -> CompletionItem {
let mut item = CompletionItem::new(
SymbolKind::Field,
@@ -241,7 +241,7 @@
pub(crate) fn render_path_resolution(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: hir::Name,
resolution: ScopeDef,
) -> Builder {
@@ -259,7 +259,7 @@
pub(crate) fn render_resolution_with_import(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
import_edit: LocatedImport,
) -> Option<Builder> {
let resolution = ScopeDef::from(import_edit.original_item);
@@ -282,10 +282,10 @@
pub(crate) fn render_expr(
ctx: &CompletionContext<'_>,
- expr: &hir::term_search::Expr,
+ expr: &hir::term_search::Expr<'_>,
) -> Option<Builder> {
let mut i = 1;
- let mut snippet_formatter = |ty: &hir::Type| {
+ let mut snippet_formatter = |ty: &hir::Type<'_>| {
let arg_name = ty
.as_adt()
.map(|adt| stdx::to_lower_snake_case(adt.name(ctx.db).as_str()))
@@ -295,7 +295,7 @@
res
};
- let mut label_formatter = |ty: &hir::Type| {
+ let mut label_formatter = |ty: &hir::Type<'_>| {
ty.as_adt()
.map(|adt| stdx::to_lower_snake_case(adt.name(ctx.db).as_str()))
.unwrap_or_else(|| String::from("..."))
@@ -391,7 +391,7 @@
fn render_resolution_path(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: hir::Name,
import_to_add: Option<LocatedImport>,
resolution: ScopeDef,
@@ -460,7 +460,7 @@
}
}
- let mut set_item_relevance = |ty: Type| {
+ let mut set_item_relevance = |ty: Type<'_>| {
if !ty.is_unknown() {
item.detail(ty.display(db, krate).to_string());
}
@@ -593,8 +593,8 @@
// FIXME: This checks types without possible coercions which some completions might want to do
fn match_types(
ctx: &CompletionContext<'_>,
- ty1: &hir::Type,
- ty2: &hir::Type,
+ ty1: &hir::Type<'_>,
+ ty2: &hir::Type<'_>,
) -> Option<CompletionRelevanceTypeMatch> {
if ty1 == ty2 {
Some(CompletionRelevanceTypeMatch::Exact)
@@ -607,7 +607,7 @@
fn compute_type_match(
ctx: &CompletionContext<'_>,
- completion_ty: &hir::Type,
+ completion_ty: &hir::Type<'_>,
) -> Option<CompletionRelevanceTypeMatch> {
let expected_type = ctx.expected_type.as_ref()?;
@@ -626,7 +626,7 @@
fn compute_ref_match(
ctx: &CompletionContext<'_>,
- completion_ty: &hir::Type,
+ completion_ty: &hir::Type<'_>,
) -> Option<CompletionItemRefMode> {
let expected_type = ctx.expected_type.as_ref()?;
let expected_without_ref = expected_type.remove_ref();
@@ -658,8 +658,8 @@
fn path_ref_match(
completion: &CompletionContext<'_>,
- path_ctx: &PathCompletionCtx,
- ty: &hir::Type,
+ path_ctx: &PathCompletionCtx<'_>,
+ ty: &hir::Type<'_>,
item: &mut Builder,
) {
if let Some(original_path) = &path_ctx.original_path {
@@ -733,7 +733,7 @@
) {
let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
actual.retain(|it| kinds.contains(&it.kind));
- actual.sort_by_key(|it| cmp::Reverse(it.relevance.score()));
+ actual.sort_by_key(|it| (cmp::Reverse(it.relevance.score()), it.label.primary.clone()));
check_relevance_(actual, expect);
}
@@ -743,7 +743,7 @@
actual.retain(|it| it.kind != CompletionItemKind::Snippet);
actual.retain(|it| it.kind != CompletionItemKind::Keyword);
actual.retain(|it| it.kind != CompletionItemKind::BuiltinType);
- actual.sort_by_key(|it| cmp::Reverse(it.relevance.score()));
+ actual.sort_by_key(|it| (cmp::Reverse(it.relevance.score()), it.label.primary.clone()));
check_relevance_(actual, expect);
}
@@ -824,9 +824,9 @@
st dep::test_mod_b::Struct {…} dep::test_mod_b::Struct { } [type_could_unify]
ex dep::test_mod_b::Struct { } [type_could_unify]
st Struct Struct [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(Struct) []
- md dep []
st Struct Struct [requires_import]
"#]],
);
@@ -862,9 +862,9 @@
"#,
expect![[r#"
un Union Union [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(Union) []
- md dep []
en Union Union [requires_import]
"#]],
);
@@ -900,9 +900,9 @@
ev dep::test_mod_b::Enum::variant dep::test_mod_b::Enum::variant [type_could_unify]
ex dep::test_mod_b::Enum::variant [type_could_unify]
en Enum Enum [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(Enum) []
- md dep []
en Enum Enum [requires_import]
"#]],
);
@@ -937,9 +937,9 @@
expect![[r#"
ev dep::test_mod_b::Enum::Variant dep::test_mod_b::Enum::Variant [type_could_unify]
ex dep::test_mod_b::Enum::Variant [type_could_unify]
+ md dep []
fn main() fn() []
fn test(…) fn(Enum) []
- md dep []
"#]],
);
}
@@ -967,9 +967,9 @@
}
"#,
expect![[r#"
+ md dep []
fn main() fn() []
fn test(…) fn(fn(usize) -> i32) []
- md dep []
fn function fn(usize) -> i32 [requires_import]
fn function(…) fn(isize) -> i32 [requires_import]
"#]],
@@ -1000,9 +1000,9 @@
"#,
expect![[r#"
ct CONST i32 [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(i32) []
- md dep []
ct CONST i64 [requires_import]
"#]],
);
@@ -1032,9 +1032,9 @@
"#,
expect![[r#"
sc STATIC i32 [type_could_unify+requires_import]
+ md dep []
fn main() fn() []
fn test(…) fn(i32) []
- md dep []
sc STATIC i64 [requires_import]
"#]],
);
@@ -1090,8 +1090,8 @@
"#,
expect![[r#"
- st Struct Struct [type]
st Self Self [type]
+ st Struct Struct [type]
sp Self Struct [type]
st Struct Struct [type]
ex Struct [type]
@@ -1119,9 +1119,9 @@
"#,
expect![[r#"
lc input bool [type+name+local]
+ ex false [type]
ex input [type]
ex true [type]
- ex false [type]
lc inputbad i32 [local]
fn main() fn() []
fn test(…) fn(bool) []
@@ -2088,9 +2088,9 @@
"#,
expect![[r#"
fn bar() fn() -> u8 [type+name]
+ ex bar() [type]
fn baz() fn() -> u8 [type]
ex baz() [type]
- ex bar() [type]
st A A []
fn f() fn() []
"#]],
@@ -2199,8 +2199,8 @@
lc s S [type+name+local]
st S S [type]
st S S [type]
- ex s [type]
ex S [type]
+ ex s [type]
fn foo(…) fn(&mut S) []
fn main() fn() []
"#]],
@@ -2218,8 +2218,8 @@
st S S [type]
lc ssss S [type+local]
st S S [type]
- ex ssss [type]
ex S [type]
+ ex ssss [type]
fn foo(…) fn(&mut S) []
fn main() fn() []
"#]],
@@ -2252,11 +2252,11 @@
ex Foo [type]
lc foo &Foo [local]
lc *foo [type+local]
- fn bar(…) fn(Foo) []
- fn main() fn() []
- md core []
tt Clone []
tt Copy []
+ fn bar(…) fn(Foo) []
+ md core []
+ fn main() fn() []
"#]],
);
}
@@ -2297,9 +2297,9 @@
st &S [type]
st T T []
st &T [type]
+ md core []
fn foo(…) fn(&S) []
fn main() fn() []
- md core []
"#]],
)
}
@@ -2346,9 +2346,9 @@
st &mut S [type]
st T T []
st &mut T [type]
+ md core []
fn foo(…) fn(&mut S) []
fn main() fn() []
- md core []
"#]],
)
}
@@ -2364,8 +2364,8 @@
}
"#,
expect![[r#"
- lc baz i32 [local]
lc bar u32 [local]
+ lc baz i32 [local]
fn foo(…) fn(u32) []
"#]],
);
@@ -2449,9 +2449,9 @@
st &T [type]
fn bar() fn() -> T []
fn &bar() [type]
+ md core []
fn foo(…) fn(&S) []
fn main() fn() []
- md core []
"#]],
)
}
@@ -2702,8 +2702,8 @@
fn fn_builder() fn() -> FooBuilder [type_could_unify]
fn fn_ctr_wrapped() fn() -> Option<Foo<T>> [type_could_unify]
fn fn_ctr_wrapped_2() fn() -> Result<Foo<T>, u32> [type_could_unify]
- me fn_returns_unit(…) fn(&self) [type_could_unify]
fn fn_other() fn() -> Option<u32> [type_could_unify]
+ me fn_returns_unit(…) fn(&self) [type_could_unify]
"#]],
);
}
@@ -2965,12 +2965,12 @@
ev Foo::B Foo::B [type_could_unify]
ev Foo::A(…) Foo::A(T) [type_could_unify]
lc foo Foo<u32> [type+local]
- ex foo [type]
ex Foo::B [type]
+ ex foo [type]
en Foo Foo<{unknown}> [type_could_unify]
- fn foo() fn() []
fn bar() fn() -> Foo<u8> []
fn baz() fn() -> Foo<T> []
+ fn foo() fn() []
"#]],
);
}
@@ -3000,19 +3000,19 @@
expect![[r#"
sn not !expr [snippet]
me not() fn(self) -> <Self as Not>::Output [type_could_unify+requires_import]
- sn if if expr {} []
- sn while while expr {} []
- sn ref &expr []
- sn refm &mut expr []
- sn deref *expr []
- sn unsafe unsafe {} []
- sn const const {} []
- sn match match expr {} []
sn box Box::new(expr) []
+ sn call function(expr) []
+ sn const const {} []
sn dbg dbg!(expr) []
sn dbgr dbg!(&expr) []
- sn call function(expr) []
+ sn deref *expr []
+ sn if if expr {} []
+ sn match match expr {} []
+ sn ref &expr []
+ sn refm &mut expr []
sn return return expr []
+ sn unsafe unsafe {} []
+ sn while while expr {} []
"#]],
);
}
@@ -3033,19 +3033,19 @@
&[CompletionItemKind::Snippet, CompletionItemKind::SymbolKind(SymbolKind::Method)],
expect![[r#"
me f() fn(&self) []
- sn ref &expr []
- sn refm &mut expr []
- sn deref *expr []
- sn unsafe unsafe {} []
- sn const const {} []
- sn match match expr {} []
sn box Box::new(expr) []
+ sn call function(expr) []
+ sn const const {} []
sn dbg dbg!(expr) []
sn dbgr dbg!(&expr) []
- sn call function(expr) []
+ sn deref *expr []
sn let let []
sn letm let mut []
+ sn match match expr {} []
+ sn ref &expr []
+ sn refm &mut expr []
sn return return expr []
+ sn unsafe unsafe {} []
"#]],
);
}
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index 2fe517f..7669aec 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -22,13 +22,13 @@
#[derive(Debug)]
enum FuncKind<'ctx> {
- Function(&'ctx PathCompletionCtx),
- Method(&'ctx DotAccess, Option<SmolStr>),
+ Function(&'ctx PathCompletionCtx<'ctx>),
+ Method(&'ctx DotAccess<'ctx>, Option<SmolStr>),
}
pub(crate) fn render_fn(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: Option<hir::Name>,
func: hir::Function,
) -> Builder {
@@ -38,7 +38,7 @@
pub(crate) fn render_method(
ctx: RenderContext<'_>,
- dot_access: &DotAccess,
+ dot_access: &DotAccess<'_>,
receiver: Option<SmolStr>,
local_name: Option<hir::Name>,
func: hir::Function,
@@ -186,8 +186,8 @@
fn compute_return_type_match(
db: &dyn HirDatabase,
ctx: &RenderContext<'_>,
- self_type: hir::Type,
- ret_type: &hir::Type,
+ self_type: hir::Type<'_>,
+ ret_type: &hir::Type<'_>,
) -> CompletionRelevanceReturnType {
if match_types(ctx.completion, &self_type, ret_type).is_some() {
// fn([..]) -> Self
@@ -217,8 +217,8 @@
name: SmolStr,
escaped_name: SmolStr,
self_param: Option<hir::SelfParam>,
- params: Vec<hir::Param>,
- ret_type: &hir::Type,
+ params: Vec<hir::Param<'_>>,
+ ret_type: &hir::Type<'_>,
) -> &'b mut Builder {
cov_mark::hit!(inserts_parens_for_function_calls);
@@ -288,7 +288,7 @@
builder.label(SmolStr::from_iter([&name, label_suffix])).insert_snippet(cap, snippet)
}
-fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'static str {
+fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type<'_>) -> &'static str {
if let Some(derefed_ty) = ty.remove_ref() {
for (name, local) in ctx.locals.iter().sorted_by_key(|&(k, _)| k.clone()) {
if name.as_str() == arg {
@@ -369,12 +369,12 @@
}
}
-fn params(
- ctx: &CompletionContext<'_>,
+fn params<'db>(
+ ctx: &CompletionContext<'db>,
func: hir::Function,
func_kind: &FuncKind<'_>,
has_dot_receiver: bool,
-) -> Option<(Option<hir::SelfParam>, Vec<hir::Param>)> {
+) -> Option<(Option<hir::SelfParam>, Vec<hir::Param<'db>>)> {
ctx.config.callable.as_ref()?;
// Don't add parentheses if the expected type is a function reference with the same signature.
diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs
index 5a9e35a..6c89e49 100644
--- a/crates/ide-completion/src/render/literal.rs
+++ b/crates/ide-completion/src/render/literal.rs
@@ -21,7 +21,7 @@
pub(crate) fn render_variant_lit(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
local_name: Option<hir::Name>,
variant: hir::Variant,
path: Option<hir::ModPath>,
@@ -35,7 +35,7 @@
pub(crate) fn render_struct_literal(
ctx: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
strukt: hir::Struct,
path: Option<hir::ModPath>,
local_name: Option<hir::Name>,
@@ -49,7 +49,7 @@
fn render(
ctx @ RenderContext { completion, .. }: RenderContext<'_>,
- path_ctx: &PathCompletionCtx,
+ path_ctx: &PathCompletionCtx<'_>,
thing: Variant,
name: hir::Name,
path: Option<hir::ModPath>,
@@ -194,7 +194,7 @@
}
}
- fn ty(self, db: &dyn HirDatabase) -> hir::Type {
+ fn ty(self, db: &dyn HirDatabase) -> hir::Type<'_> {
match self {
Variant::Struct(it) => it.ty(db),
Variant::EnumVariant(it) => it.parent_enum(db).ty(db),
diff --git a/crates/ide-completion/src/render/macro_.rs b/crates/ide-completion/src/render/macro_.rs
index 4674dae..35fe407 100644
--- a/crates/ide-completion/src/render/macro_.rs
+++ b/crates/ide-completion/src/render/macro_.rs
@@ -12,7 +12,7 @@
pub(crate) fn render_macro(
ctx: RenderContext<'_>,
- PathCompletionCtx { kind, has_macro_bang, has_call_parens, .. }: &PathCompletionCtx,
+ PathCompletionCtx { kind, has_macro_bang, has_call_parens, .. }: &PathCompletionCtx<'_>,
name: hir::Name,
macro_: hir::Macro,
diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs
index dcc51a8..60ec112 100644
--- a/crates/ide-completion/src/render/pattern.rs
+++ b/crates/ide-completion/src/render/pattern.rs
@@ -46,7 +46,7 @@
pub(crate) fn render_variant_pat(
ctx: RenderContext<'_>,
pattern_ctx: &PatternContext,
- path_ctx: Option<&PathCompletionCtx>,
+ path_ctx: Option<&PathCompletionCtx<'_>>,
variant: hir::Variant,
local_name: Option<Name>,
path: Option<&hir::ModPath>,
@@ -109,7 +109,7 @@
lookup: SmolStr,
pat: String,
def: impl HasDocs + Copy,
- adt_ty: hir::Type,
+ adt_ty: hir::Type<'_>,
// Missing in context of match statement completions
is_variant_missing: bool,
) -> CompletionItem {
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index b46e4c3..b2d18b7 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -1474,20 +1474,18 @@
}
"#,
expect![[r#"
+ me foo() fn(&self)
sn box Box::new(expr)
sn call function(expr)
sn const const {}
sn dbg dbg!(expr)
sn dbgr dbg!(&expr)
sn deref *expr
- sn if if expr {}
sn match match expr {}
- sn not !expr
sn ref &expr
sn refm &mut expr
sn return return expr
sn unsafe unsafe {}
- sn while while expr {}
"#]],
);
}
diff --git a/crates/ide-completion/src/tests/visibility.rs b/crates/ide-completion/src/tests/visibility.rs
index 4b5a0ac..b404011 100644
--- a/crates/ide-completion/src/tests/visibility.rs
+++ b/crates/ide-completion/src/tests/visibility.rs
@@ -1,7 +1,7 @@
//! Completion tests for visibility modifiers.
use expect_test::expect;
-use crate::tests::{check, check_with_trigger_character};
+use crate::tests::{check, check_with_private_editable, check_with_trigger_character};
#[test]
fn empty_pub() {
@@ -78,3 +78,90 @@
"#]],
);
}
+
+#[test]
+fn use_inner_public_function() {
+ check(
+ r#"
+//- /inner.rs crate:inner
+pub fn inner_public() {}
+fn inner_private() {}
+//- /foo.rs crate:foo deps:inner
+use inner::inner_public;
+pub fn outer_public() {}
+//- /lib.rs crate:lib deps:foo
+fn x() {
+ foo::$0
+}
+ "#,
+ expect![[r#"
+ fn outer_public() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn pub_use_inner_public_function() {
+ check(
+ r#"
+//- /inner.rs crate:inner
+pub fn inner_public() {}
+fn inner_private() {}
+//- /foo.rs crate:foo deps:inner
+pub use inner::inner_public;
+pub fn outer_public() {}
+//- /lib.rs crate:lib deps:foo
+fn x() {
+ foo::$0
+}
+ "#,
+ expect![[r#"
+ fn inner_public() fn()
+ fn outer_public() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn use_inner_public_function_private_editable() {
+ check_with_private_editable(
+ r#"
+//- /inner.rs crate:inner
+pub fn inner_public() {}
+fn inner_private() {}
+//- /foo.rs crate:foo deps:inner
+use inner::inner_public;
+pub fn outer_public() {}
+//- /lib.rs crate:lib deps:foo
+fn x() {
+ foo::$0
+}
+ "#,
+ expect![[r#"
+ fn inner_public() fn()
+ fn outer_public() fn()
+ "#]],
+ );
+}
+
+#[test]
+fn pub_use_inner_public_function_private_editable() {
+ check_with_private_editable(
+ r#"
+//- /inner.rs crate:inner
+pub fn inner_public() {}
+fn inner_private() {}
+//- /foo.rs crate:foo deps:inner
+pub use inner::inner_public;
+pub fn outer_public() {}
+//- /lib.rs crate:lib deps:foo
+fn x() {
+ foo::$0
+}
+ "#,
+ expect![[r#"
+ fn inner_public() fn()
+ fn outer_public() fn()
+ "#]],
+ );
+}
diff --git a/crates/ide-db/src/active_parameter.rs b/crates/ide-db/src/active_parameter.rs
index 7b5723f..9edfc11 100644
--- a/crates/ide-db/src/active_parameter.rs
+++ b/crates/ide-db/src/active_parameter.rs
@@ -13,21 +13,21 @@
use crate::RootDatabase;
#[derive(Debug)]
-pub struct ActiveParameter {
- pub ty: Type,
+pub struct ActiveParameter<'db> {
+ pub ty: Type<'db>,
pub src: Option<InFile<Either<ast::SelfParam, ast::Param>>>,
}
-impl ActiveParameter {
+impl<'db> ActiveParameter<'db> {
/// Returns information about the call argument this token is part of.
- pub fn at_token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Self> {
+ pub fn at_token(sema: &Semantics<'db, RootDatabase>, token: SyntaxToken) -> Option<Self> {
let (signature, active_parameter) = callable_for_token(sema, token)?;
Self::from_signature_and_active_parameter(sema, signature, active_parameter)
}
/// Returns information about the call argument this token is part of.
pub fn at_arg(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &'db Semantics<'db, RootDatabase>,
list: ast::ArgList,
at: TextSize,
) -> Option<Self> {
@@ -36,8 +36,8 @@
}
fn from_signature_and_active_parameter(
- sema: &Semantics<'_, RootDatabase>,
- signature: hir::Callable,
+ sema: &Semantics<'db, RootDatabase>,
+ signature: hir::Callable<'db>,
active_parameter: Option<usize>,
) -> Option<Self> {
let idx = active_parameter?;
@@ -63,10 +63,10 @@
}
/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable.
-pub fn callable_for_token(
- sema: &Semantics<'_, RootDatabase>,
+pub fn callable_for_token<'db>(
+ sema: &Semantics<'db, RootDatabase>,
token: SyntaxToken,
-) -> Option<(hir::Callable, Option<usize>)> {
+) -> Option<(hir::Callable<'db>, Option<usize>)> {
let offset = token.text_range().start();
// Find the calling expression and its NameRef
let parent = token.parent()?;
@@ -79,21 +79,21 @@
}
/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable.
-pub fn callable_for_arg_list(
- sema: &Semantics<'_, RootDatabase>,
+pub fn callable_for_arg_list<'db>(
+ sema: &Semantics<'db, RootDatabase>,
arg_list: ast::ArgList,
at: TextSize,
-) -> Option<(hir::Callable, Option<usize>)> {
+) -> Option<(hir::Callable<'db>, Option<usize>)> {
debug_assert!(arg_list.syntax().text_range().contains(at));
let callable = arg_list.syntax().parent().and_then(ast::CallableExpr::cast)?;
callable_for_node(sema, &callable, at)
}
-pub fn callable_for_node(
- sema: &Semantics<'_, RootDatabase>,
+pub fn callable_for_node<'db>(
+ sema: &Semantics<'db, RootDatabase>,
calling_node: &ast::CallableExpr,
offset: TextSize,
-) -> Option<(hir::Callable, Option<usize>)> {
+) -> Option<(hir::Callable<'db>, Option<usize>)> {
let callable = match calling_node {
ast::CallableExpr::Call(call) => sema.resolve_expr_as_callable(&call.expr()?),
ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call),
diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs
index d5db1c4..a4a140e 100644
--- a/crates/ide-db/src/defs.rs
+++ b/crates/ide-db/src/defs.rs
@@ -385,17 +385,17 @@
// FIXME: IdentClass as a name no longer fits
#[derive(Debug)]
-pub enum IdentClass {
- NameClass(NameClass),
- NameRefClass(NameRefClass),
+pub enum IdentClass<'db> {
+ NameClass(NameClass<'db>),
+ NameRefClass(NameRefClass<'db>),
Operator(OperatorClass),
}
-impl IdentClass {
+impl<'db> IdentClass<'db> {
pub fn classify_node(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
node: &SyntaxNode,
- ) -> Option<IdentClass> {
+ ) -> Option<IdentClass<'db>> {
match_ast! {
match node {
ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
@@ -418,23 +418,23 @@
}
pub fn classify_token(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
token: &SyntaxToken,
- ) -> Option<IdentClass> {
+ ) -> Option<IdentClass<'db>> {
let parent = token.parent()?;
Self::classify_node(sema, &parent)
}
pub fn classify_lifetime(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
lifetime: &ast::Lifetime,
- ) -> Option<IdentClass> {
+ ) -> Option<IdentClass<'db>> {
NameRefClass::classify_lifetime(sema, lifetime)
.map(IdentClass::NameRefClass)
.or_else(|| NameClass::classify_lifetime(sema, lifetime).map(IdentClass::NameClass))
}
- pub fn definitions(self) -> ArrayVec<(Definition, Option<GenericSubstitution>), 2> {
+ pub fn definitions(self) -> ArrayVec<(Definition, Option<GenericSubstitution<'db>>), 2> {
let mut res = ArrayVec::new();
match self {
IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
@@ -518,7 +518,7 @@
///
/// A model special case is `None` constant in pattern.
#[derive(Debug)]
-pub enum NameClass {
+pub enum NameClass<'db> {
Definition(Definition),
/// `None` in `if let None = Some(82) {}`.
/// Syntactically, it is a name, but semantically it is a reference.
@@ -528,11 +528,11 @@
PatFieldShorthand {
local_def: Local,
field_ref: Field,
- adt_subst: GenericSubstitution,
+ adt_subst: GenericSubstitution<'db>,
},
}
-impl NameClass {
+impl<'db> NameClass<'db> {
/// `Definition` defined by this name.
pub fn defined(self) -> Option<Definition> {
let res = match self {
@@ -545,7 +545,10 @@
Some(res)
}
- pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
+ pub fn classify(
+ sema: &Semantics<'db, RootDatabase>,
+ name: &ast::Name,
+ ) -> Option<NameClass<'db>> {
let _p = tracing::info_span!("NameClass::classify").entered();
let parent = name.syntax().parent()?;
@@ -597,10 +600,10 @@
Some(definition)
}
- fn classify_ident_pat(
- sema: &Semantics<'_, RootDatabase>,
+ fn classify_ident_pat<'db>(
+ sema: &Semantics<'db, RootDatabase>,
ident_pat: ast::IdentPat,
- ) -> Option<NameClass> {
+ ) -> Option<NameClass<'db>> {
if let Some(def) = sema.resolve_bind_pat_to_const(&ident_pat) {
return Some(NameClass::ConstReference(Definition::from(def)));
}
@@ -638,9 +641,9 @@
}
pub fn classify_lifetime(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
lifetime: &ast::Lifetime,
- ) -> Option<NameClass> {
+ ) -> Option<NameClass<'db>> {
let _p = tracing::info_span!("NameClass::classify_lifetime", ?lifetime).entered();
let parent = lifetime.syntax().parent()?;
@@ -723,12 +726,12 @@
/// A model special case is field shorthand syntax, which uses a single
/// reference to point to two different defs.
#[derive(Debug)]
-pub enum NameRefClass {
- Definition(Definition, Option<GenericSubstitution>),
+pub enum NameRefClass<'db> {
+ Definition(Definition, Option<GenericSubstitution<'db>>),
FieldShorthand {
local_ref: Local,
field_ref: Field,
- adt_subst: GenericSubstitution,
+ adt_subst: GenericSubstitution<'db>,
},
/// The specific situation where we have an extern crate decl without a rename
/// Here we have both a declaration and a reference.
@@ -741,13 +744,13 @@
},
}
-impl NameRefClass {
+impl<'db> NameRefClass<'db> {
// Note: we don't have unit-tests for this rather important function.
// It is primarily exercised via goto definition tests in `ide`.
pub fn classify(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
name_ref: &ast::NameRef,
- ) -> Option<NameRefClass> {
+ ) -> Option<NameRefClass<'db>> {
let _p = tracing::info_span!("NameRefClass::classify", ?name_ref).entered();
let parent = name_ref.syntax().parent()?;
@@ -866,9 +869,9 @@
}
pub fn classify_lifetime(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
lifetime: &ast::Lifetime,
- ) -> Option<NameRefClass> {
+ ) -> Option<NameRefClass<'db>> {
let _p = tracing::info_span!("NameRefClass::classify_lifetime", ?lifetime).entered();
if lifetime.text() == "'static" {
return Some(NameRefClass::Definition(
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index ac592df..9f35988 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -25,26 +25,26 @@
/// * assists
/// * etc.
#[derive(Debug)]
-pub enum ImportCandidate {
+pub enum ImportCandidate<'db> {
/// A path, qualified (`std::collections::HashMap`) or not (`HashMap`).
Path(PathImportCandidate),
/// A trait associated function (with no self parameter) or an associated constant.
/// For 'test_mod::TestEnum::test_function', `ty` is the `test_mod::TestEnum` expression type
/// and `name` is the `test_function`
- TraitAssocItem(TraitImportCandidate),
+ TraitAssocItem(TraitImportCandidate<'db>),
/// A trait method with self parameter.
/// For 'test_enum.test_method()', `ty` is the `test_enum` expression type
/// and `name` is the `test_method`
- TraitMethod(TraitImportCandidate),
+ TraitMethod(TraitImportCandidate<'db>),
}
/// A trait import needed for a given associated item access.
/// For `some::path::SomeStruct::ASSOC_`, contains the
/// type of `some::path::SomeStruct` and `ASSOC_` as the item name.
#[derive(Debug)]
-pub struct TraitImportCandidate {
+pub struct TraitImportCandidate<'db> {
/// A type of the item that has the associated item accessed at.
- pub receiver_ty: Type,
+ pub receiver_ty: Type<'db>,
/// The associated item name that the trait to import should contain.
pub assoc_item_name: NameToImport,
}
@@ -100,16 +100,16 @@
/// A struct to find imports in the project, given a certain name (or its part) and the context.
#[derive(Debug)]
-pub struct ImportAssets {
- import_candidate: ImportCandidate,
+pub struct ImportAssets<'db> {
+ import_candidate: ImportCandidate<'db>,
candidate_node: SyntaxNode,
module_with_candidate: Module,
}
-impl ImportAssets {
+impl<'db> ImportAssets<'db> {
pub fn for_method_call(
method_call: &ast::MethodCallExpr,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
) -> Option<Self> {
let candidate_node = method_call.syntax().clone();
Some(Self {
@@ -121,7 +121,7 @@
pub fn for_exact_path(
fully_qualified_path: &ast::Path,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
) -> Option<Self> {
let candidate_node = fully_qualified_path.syntax().clone();
if let Some(use_tree) = candidate_node.ancestors().find_map(ast::UseTree::cast) {
@@ -139,7 +139,7 @@
})
}
- pub fn for_ident_pat(sema: &Semantics<'_, RootDatabase>, pat: &ast::IdentPat) -> Option<Self> {
+ pub fn for_ident_pat(sema: &Semantics<'db, RootDatabase>, pat: &ast::IdentPat) -> Option<Self> {
if !pat.is_simple_ident() {
return None;
}
@@ -156,7 +156,7 @@
module_with_candidate: Module,
qualifier: Option<ast::Path>,
fuzzy_name: String,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
candidate_node: SyntaxNode,
) -> Option<Self> {
Some(Self {
@@ -168,7 +168,7 @@
pub fn for_fuzzy_method_call(
module_with_method_call: Module,
- receiver_ty: Type,
+ receiver_ty: Type<'db>,
fuzzy_method_name: String,
candidate_node: SyntaxNode,
) -> Option<Self> {
@@ -229,14 +229,14 @@
}
}
-impl ImportAssets {
- pub fn import_candidate(&self) -> &ImportCandidate {
+impl<'db> ImportAssets<'db> {
+ pub fn import_candidate(&self) -> &ImportCandidate<'db> {
&self.import_candidate
}
pub fn search_for_imports(
&self,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
cfg: ImportPathConfig,
prefix_kind: PrefixKind,
) -> impl Iterator<Item = LocatedImport> {
@@ -247,7 +247,7 @@
/// This may return non-absolute paths if a part of the returned path is already imported into scope.
pub fn search_for_relative_paths(
&self,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
cfg: ImportPathConfig,
) -> impl Iterator<Item = LocatedImport> {
let _p = tracing::info_span!("ImportAssets::search_for_relative_paths").entered();
@@ -286,7 +286,7 @@
fn search_for(
&self,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
prefixed: Option<PrefixKind>,
cfg: ImportPathConfig,
) -> impl Iterator<Item = LocatedImport> {
@@ -533,11 +533,11 @@
})
}
-fn trait_applicable_items(
- db: &RootDatabase,
+fn trait_applicable_items<'db>(
+ db: &'db RootDatabase,
current_crate: Crate,
- scope: &SemanticsScope<'_>,
- trait_candidate: &TraitImportCandidate,
+ scope: &SemanticsScope<'db>,
+ trait_candidate: &TraitImportCandidate<'db>,
trait_assoc_item: bool,
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
scope_filter: impl Fn(hir::Trait) -> bool,
@@ -709,9 +709,9 @@
}
}
-impl ImportCandidate {
+impl<'db> ImportCandidate<'db> {
fn for_method_call(
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
method_call: &ast::MethodCallExpr,
) -> Option<Self> {
match sema.resolve_method_call(method_call) {
@@ -725,7 +725,7 @@
}
}
- fn for_regular_path(sema: &Semantics<'_, RootDatabase>, path: &ast::Path) -> Option<Self> {
+ fn for_regular_path(sema: &Semantics<'db, RootDatabase>, path: &ast::Path) -> Option<Self> {
if sema.resolve_path(path).is_some() {
return None;
}
@@ -736,7 +736,7 @@
)
}
- fn for_name(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<Self> {
+ fn for_name(sema: &Semantics<'db, RootDatabase>, name: &ast::Name) -> Option<Self> {
if sema
.scope(name.syntax())?
.speculative_resolve(&make::ext::ident_path(&name.text()))
@@ -753,17 +753,17 @@
fn for_fuzzy_path(
qualifier: Option<ast::Path>,
fuzzy_name: String,
- sema: &Semantics<'_, RootDatabase>,
+ sema: &Semantics<'db, RootDatabase>,
) -> Option<Self> {
path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name))
}
}
-fn path_import_candidate(
- sema: &Semantics<'_, RootDatabase>,
+fn path_import_candidate<'db>(
+ sema: &Semantics<'db, RootDatabase>,
qualifier: Option<ast::Path>,
name: NameToImport,
-) -> Option<ImportCandidate> {
+) -> Option<ImportCandidate<'db>> {
Some(match qualifier {
Some(qualifier) => match sema.resolve_path(&qualifier) {
Some(PathResolution::Def(ModuleDef::BuiltinType(_))) | None => {
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index c5ad64e..7d460f7 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -429,7 +429,7 @@
/// The container of our definition should it be an assoc item
assoc_item_container: Option<hir::AssocItemContainer>,
/// whether to search for the `Self` type of the definition
- include_self_kw_refs: Option<hir::Type>,
+ include_self_kw_refs: Option<hir::Type<'a>>,
/// whether to search for the `self` module
search_self_mod: bool,
}
@@ -1087,12 +1087,12 @@
fn found_self_ty_name_ref(
&self,
- self_ty: &hir::Type,
+ self_ty: &hir::Type<'_>,
name_ref: &ast::NameRef,
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
) -> bool {
// See https://github.com/rust-lang/rust-analyzer/pull/15864/files/e0276dc5ddc38c65240edb408522bb869f15afb4#r1389848845
- let ty_eq = |ty: hir::Type| match (ty.as_adt(), self_ty.as_adt()) {
+ let ty_eq = |ty: hir::Type<'_>| match (ty.as_adt(), self_ty.as_adt()) {
(Some(ty), Some(self_ty)) => ty == self_ty,
(None, None) => ty == *self_ty,
_ => false,
@@ -1315,7 +1315,7 @@
}
}
-fn def_to_ty(sema: &Semantics<'_, RootDatabase>, def: &Definition) -> Option<hir::Type> {
+fn def_to_ty<'db>(sema: &Semantics<'db, RootDatabase>, def: &Definition) -> Option<hir::Type<'db>> {
match def {
Definition::Adt(adt) => Some(adt.ty(sema.db)),
Definition::TypeAlias(it) => Some(it.ty(sema.db)),
diff --git a/crates/ide-db/src/syntax_helpers/suggest_name.rs b/crates/ide-db/src/syntax_helpers/suggest_name.rs
index 9b9f450..995bf72 100644
--- a/crates/ide-db/src/syntax_helpers/suggest_name.rs
+++ b/crates/ide-db/src/syntax_helpers/suggest_name.rs
@@ -151,10 +151,10 @@
/// - If `ty` is an `impl Trait`, it will suggest the name of the first trait.
///
/// If the suggested name conflicts with reserved keywords, it will return `None`.
- pub fn for_type(
+ pub fn for_type<'db>(
&mut self,
- ty: &hir::Type,
- db: &RootDatabase,
+ ty: &hir::Type<'db>,
+ db: &'db RootDatabase,
edition: Edition,
) -> Option<SmolStr> {
let name = name_of_type(ty, db, edition)?;
@@ -373,7 +373,11 @@
name_of_type(&ty, sema.db, edition)
}
-fn name_of_type(ty: &hir::Type, db: &RootDatabase, edition: Edition) -> Option<SmolStr> {
+fn name_of_type<'db>(
+ ty: &hir::Type<'db>,
+ db: &'db RootDatabase,
+ edition: Edition,
+) -> Option<SmolStr> {
let name = if let Some(adt) = ty.as_adt() {
let name = adt.name(db).display(db, edition).to_string();
@@ -407,7 +411,11 @@
normalize(&name)
}
-fn sequence_name(inner_ty: Option<&hir::Type>, db: &RootDatabase, edition: Edition) -> SmolStr {
+fn sequence_name<'db>(
+ inner_ty: Option<&hir::Type<'db>>,
+ db: &'db RootDatabase,
+ edition: Edition,
+) -> SmolStr {
let items_str = SmolStr::new_static("items");
let Some(inner_ty) = inner_ty else {
return items_str;
diff --git a/crates/ide-db/src/ty_filter.rs b/crates/ide-db/src/ty_filter.rs
index 63ce0dd..095256d 100644
--- a/crates/ide-db/src/ty_filter.rs
+++ b/crates/ide-db/src/ty_filter.rs
@@ -10,7 +10,7 @@
use crate::RootDatabase;
/// Enum types that implement `std::ops::Try` trait.
-#[derive(Clone, Copy)]
+#[derive(Clone, Copy, Debug)]
pub enum TryEnum {
Result,
Option,
@@ -20,7 +20,7 @@
const ALL: [TryEnum; 2] = [TryEnum::Option, TryEnum::Result];
/// Returns `Some(..)` if the provided type is an enum that implements `std::ops::Try`.
- pub fn from_ty(sema: &Semantics<'_, RootDatabase>, ty: &hir::Type) -> Option<TryEnum> {
+ pub fn from_ty(sema: &Semantics<'_, RootDatabase>, ty: &hir::Type<'_>) -> Option<TryEnum> {
let enum_ = match ty.as_adt() {
Some(hir::Adt::Enum(it)) => it,
_ => return None,
diff --git a/crates/ide-db/src/use_trivial_constructor.rs b/crates/ide-db/src/use_trivial_constructor.rs
index a4a93e3..f63cd92 100644
--- a/crates/ide-db/src/use_trivial_constructor.rs
+++ b/crates/ide-db/src/use_trivial_constructor.rs
@@ -11,7 +11,7 @@
pub fn use_trivial_constructor(
db: &crate::RootDatabase,
path: Path,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
edition: Edition,
) -> Option<Expr> {
match ty.as_adt() {
diff --git a/crates/ide-diagnostics/src/handlers/expected_function.rs b/crates/ide-diagnostics/src/handlers/expected_function.rs
index 7d2ac37..afd1687 100644
--- a/crates/ide-diagnostics/src/handlers/expected_function.rs
+++ b/crates/ide-diagnostics/src/handlers/expected_function.rs
@@ -7,7 +7,7 @@
// This diagnostic is triggered if a call is made on something that is not callable.
pub(crate) fn expected_function(
ctx: &DiagnosticsContext<'_>,
- d: &hir::ExpectedFunction,
+ d: &hir::ExpectedFunction<'_>,
) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
diff --git a/crates/ide-diagnostics/src/handlers/invalid_cast.rs b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
index 7a6e98f..a59077b 100644
--- a/crates/ide-diagnostics/src/handlers/invalid_cast.rs
+++ b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
@@ -18,7 +18,7 @@
// Diagnostic: invalid-cast
//
// This diagnostic is triggered if the code contains an illegal cast
-pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast) -> Diagnostic {
+pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast<'_>) -> Diagnostic {
let display_range = ctx.sema.diagnostics_display_range(d.expr.map(|it| it.into()));
let (code, message) = match d.error {
CastError::CastToBool => (
@@ -106,7 +106,10 @@
// Diagnostic: cast-to-unsized
//
// This diagnostic is triggered when casting to an unsized type
-pub(crate) fn cast_to_unsized(ctx: &DiagnosticsContext<'_>, d: &hir::CastToUnsized) -> Diagnostic {
+pub(crate) fn cast_to_unsized(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::CastToUnsized<'_>,
+) -> Diagnostic {
let display_range = ctx.sema.diagnostics_display_range(d.expr.map(|it| it.into()));
Diagnostic::new(
DiagnosticCode::RustcHardError("E0620"),
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 2b76efb..8a5d82b 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -106,7 +106,7 @@
}
});
- let generate_fill_expr = |ty: &Type| match ctx.config.expr_fill_default {
+ let generate_fill_expr = |ty: &Type<'_>| match ctx.config.expr_fill_default {
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
ExprFillDefaultMode::Default => {
@@ -180,7 +180,7 @@
}
fn make_ty(
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
db: &dyn HirDatabase,
module: hir::Module,
edition: Edition,
@@ -198,7 +198,7 @@
fn get_default_constructor(
ctx: &DiagnosticsContext<'_>,
d: &hir::MissingFields,
- ty: &Type,
+ ty: &Type<'_>,
) -> Option<ast::Expr> {
if let Some(builtin_ty) = ty.as_builtin() {
if builtin_ty.is_int() || builtin_ty.is_uint() {
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 6bd5417..d8f6e81 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -630,6 +630,17 @@
// Checks that we don't place orphan arguments for formatting under an unsafe block.
check_diagnostics(
r#"
+//- minicore: fmt_before_1_89_0
+fn foo() {
+ let p = 0xDEADBEEF as *const i32;
+ format_args!("", *p);
+ // ^^ error: dereference of raw pointer is unsafe and requires an unsafe function or block
+}
+ "#,
+ );
+
+ check_diagnostics(
+ r#"
//- minicore: fmt
fn foo() {
let p = 0xDEADBEEF as *const i32;
@@ -958,4 +969,18 @@
"#,
);
}
+
+ #[test]
+ fn no_false_positive_on_format_args_since_1_89_0() {
+ check_diagnostics(
+ r#"
+//- minicore: fmt
+fn test() {
+ let foo = 10;
+ let bar = true;
+ let _x = format_args!("{} {0} {} {last}", foo, bar, last = "!");
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 01cf5e8..0928262 100644
--- a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -4,7 +4,10 @@
// Diagnostic: moved-out-of-ref
//
// This diagnostic is triggered on moving non copy things out of references.
-pub(crate) fn moved_out_of_ref(ctx: &DiagnosticsContext<'_>, d: &hir::MovedOutOfRef) -> Diagnostic {
+pub(crate) fn moved_out_of_ref(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::MovedOutOfRef<'_>,
+) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0507"),
@@ -217,4 +220,23 @@
"#,
)
}
+
+ #[test]
+ fn regression_18201() {
+ check_diagnostics(
+ r#"
+//- minicore: copy
+struct NotCopy;
+struct S(NotCopy);
+impl S {
+ fn f(&mut self) {
+ || {
+ if let ref mut _cb = self.0 {
+ }
+ };
+ }
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs
index ef42f2d..0edab5e 100644
--- a/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -1,5 +1,4 @@
use either::Either;
-use hir::{Field, HasCrate};
use hir::{HasSource, HirDisplay, Semantics, VariantId, db::ExpandDatabase};
use ide_db::text_edit::TextEdit;
use ide_db::{EditionedFileId, RootDatabase, source_change::SourceChange};
@@ -8,7 +7,10 @@
ast::{self, edit::IndentLevel, make},
};
-use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
+use crate::{
+ Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, fix,
+ handlers::private_field::field_is_private_fixes,
+};
// Diagnostic: no-such-field
//
@@ -37,8 +39,8 @@
field_is_private_fixes(
&ctx.sema,
d.field.file_id.original_file(ctx.sema.db),
- node,
private_field,
+ ctx.sema.original_range(node.syntax()).range,
)
} else {
missing_record_expr_field_fixes(
@@ -52,31 +54,6 @@
}
}
-fn field_is_private_fixes(
- sema: &Semantics<'_, RootDatabase>,
- usage_file_id: EditionedFileId,
- record_expr_field: &ast::RecordExprField,
- private_field: Field,
-) -> Option<Vec<Assist>> {
- let def_crate = private_field.krate(sema.db);
- let usage_crate = sema.file_to_module_def(usage_file_id.file_id(sema.db))?.krate();
- let visibility = if usage_crate == def_crate { "pub(crate) " } else { "pub " };
-
- let source = private_field.source(sema.db)?;
- let (range, _) = source.syntax().original_file_range_opt(sema.db)?;
- let source_change = SourceChange::from_text_edit(
- range.file_id.file_id(sema.db),
- TextEdit::insert(range.range.start(), visibility.into()),
- );
-
- Some(vec![fix(
- "increase_field_visibility",
- "Increase field visibility",
- source_change,
- sema.original_range(record_expr_field.syntax()).range,
- )])
-}
-
fn missing_record_expr_field_fixes(
sema: &Semantics<'_, RootDatabase>,
usage_file_id: EditionedFileId,
diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs
index 5b4273a..69cd0d2 100644
--- a/crates/ide-diagnostics/src/handlers/private_field.rs
+++ b/crates/ide-diagnostics/src/handlers/private_field.rs
@@ -1,4 +1,8 @@
-use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use hir::{EditionedFileId, FileRange, HasCrate, HasSource, Semantics};
+use ide_db::{RootDatabase, assists::Assist, source_change::SourceChange, text_edit::TextEdit};
+use syntax::{AstNode, TextRange, TextSize, ast::HasVisibility};
+
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
// Diagnostic: private-field
//
@@ -16,11 +20,59 @@
d.expr.map(|it| it.into()),
)
.stable()
+ .with_fixes(field_is_private_fixes(
+ &ctx.sema,
+ d.expr.file_id.original_file(ctx.sema.db),
+ d.field,
+ ctx.sema.original_range(d.expr.to_node(ctx.sema.db).syntax()).range,
+ ))
+}
+
+pub(crate) fn field_is_private_fixes(
+ sema: &Semantics<'_, RootDatabase>,
+ usage_file_id: EditionedFileId,
+ private_field: hir::Field,
+ fix_range: TextRange,
+) -> Option<Vec<Assist>> {
+ let def_crate = private_field.krate(sema.db);
+ let usage_crate = sema.file_to_module_def(usage_file_id.file_id(sema.db))?.krate();
+ let mut visibility_text = if usage_crate == def_crate { "pub(crate) " } else { "pub " };
+
+ let source = private_field.source(sema.db)?;
+ let existing_visibility = match &source.value {
+ hir::FieldSource::Named(it) => it.visibility(),
+ hir::FieldSource::Pos(it) => it.visibility(),
+ };
+ let range = match existing_visibility {
+ Some(visibility) => {
+ // If there is an existing visibility, don't insert whitespace after.
+ visibility_text = visibility_text.trim_end();
+ source.with_value(visibility.syntax()).original_file_range_opt(sema.db)?.0
+ }
+ None => {
+ let (range, _) = source.syntax().original_file_range_opt(sema.db)?;
+ FileRange {
+ file_id: range.file_id,
+ range: TextRange::at(range.range.start(), TextSize::new(0)),
+ }
+ }
+ };
+ let source_change = SourceChange::from_text_edit(
+ range.file_id.file_id(sema.db),
+ TextEdit::replace(range.range, visibility_text.into()),
+ );
+
+ Some(vec![fix(
+ "increase_field_visibility",
+ "Increase field visibility",
+ source_change,
+ fix_range,
+ )])
}
#[cfg(test)]
mod tests {
- use crate::tests::check_diagnostics;
+ use crate::tests::{check_diagnostics, check_fix};
#[test]
fn private_field() {
@@ -29,7 +81,7 @@
mod module { pub struct Struct { field: u32 } }
fn main(s: module::Struct) {
s.field;
- //^^^^^^^ error: field `field` of `Struct` is private
+ //^^^^^^^ đź’ˇ error: field `field` of `Struct` is private
}
"#,
);
@@ -42,7 +94,7 @@
mod module { pub struct Struct(u32); }
fn main(s: module::Struct) {
s.0;
- //^^^ error: field `0` of `Struct` is private
+ //^^^ đź’ˇ error: field `0` of `Struct` is private
}
"#,
);
@@ -113,4 +165,68 @@
"#,
);
}
+
+ #[test]
+ fn change_visibility_fix() {
+ check_fix(
+ r#"
+pub mod foo {
+ pub mod bar {
+ pub struct Struct {
+ field: i32,
+ }
+ }
+}
+
+fn foo(v: foo::bar::Struct) {
+ v.field$0;
+}
+ "#,
+ r#"
+pub mod foo {
+ pub mod bar {
+ pub struct Struct {
+ pub(crate) field: i32,
+ }
+ }
+}
+
+fn foo(v: foo::bar::Struct) {
+ v.field;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn change_visibility_with_existing_visibility() {
+ check_fix(
+ r#"
+pub mod foo {
+ pub mod bar {
+ pub struct Struct {
+ pub(super) field: i32,
+ }
+ }
+}
+
+fn foo(v: foo::bar::Struct) {
+ v.field$0;
+}
+ "#,
+ r#"
+pub mod foo {
+ pub mod bar {
+ pub struct Struct {
+ pub(crate) field: i32,
+ }
+ }
+}
+
+fn foo(v: foo::bar::Struct) {
+ v.field;
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 076df1a..e2957fc 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -20,7 +20,7 @@
//
// This diagnostic is triggered when the type of an expression or pattern does not match
// the expected type.
-pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
+pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<'_>) -> Diagnostic {
let display_range = adjusted_display_range(ctx, d.expr_or_pat, &|node| {
let Either::Left(expr) = node else { return None };
let salient_token_range = match expr {
@@ -39,7 +39,7 @@
cov_mark::hit!(type_mismatch_range_adjustment);
Some(salient_token_range)
});
- let mut diag = Diagnostic::new(
+ Diagnostic::new(
DiagnosticCode::RustcHardError("E0308"),
format!(
"expected {}, found {}",
@@ -52,14 +52,10 @@
),
display_range,
)
- .with_fixes(fixes(ctx, d));
- if diag.fixes.is_some() {
- diag.experimental = false;
- }
- diag
+ .with_fixes(fixes(ctx, d))
}
-fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assist>> {
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<'_>) -> Option<Vec<Assist>> {
let mut fixes = Vec::new();
if let Some(expr_ptr) = d.expr_or_pat.value.cast::<ast::Expr>() {
@@ -76,7 +72,7 @@
fn add_reference(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
@@ -98,7 +94,7 @@
fn add_missing_ok_or_some(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
@@ -188,7 +184,7 @@
fn remove_unnecessary_wrapper(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
@@ -271,7 +267,7 @@
fn remove_semicolon(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
@@ -301,7 +297,7 @@
fn str_ref_to_owned(
ctx: &DiagnosticsContext<'_>,
- d: &hir::TypeMismatch,
+ d: &hir::TypeMismatch<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs
index 1915a88..8d42770 100644
--- a/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -20,7 +20,7 @@
// Diagnostic: typed-hole
//
// This diagnostic is triggered when an underscore expression is used in an invalid position.
-pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Diagnostic {
+pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole<'_>) -> Diagnostic {
let display_range = ctx.sema.diagnostics_display_range(d.expr.map(|it| it.into()));
let (message, fixes) = if d.expected.is_unknown() {
("`_` expressions may only appear on the left-hand side of an assignment".to_owned(), None)
@@ -41,7 +41,7 @@
.with_fixes(fixes)
}
-fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole<'_>) -> Option<Vec<Assist>> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.expr.file_id);
let (original_range, _) =
@@ -61,7 +61,7 @@
};
let paths = term_search(&term_search_ctx);
- let mut formatter = |_: &hir::Type| String::from("_");
+ let mut formatter = |_: &hir::Type<'_>| String::from("_");
let assists: Vec<Assist> = d
.expected
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 0649c97..6901589 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -25,7 +25,7 @@
// This diagnostic is triggered if a field does not exist on a given type.
pub(crate) fn unresolved_field(
ctx: &DiagnosticsContext<'_>,
- d: &hir::UnresolvedField,
+ d: &hir::UnresolvedField<'_>,
) -> Diagnostic {
let method_suffix = if d.method_with_same_name_exists {
", but a method with a similar name exists"
@@ -54,7 +54,7 @@
.with_fixes(fixes(ctx, d))
}
-fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<Assist>> {
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField<'_>) -> Option<Vec<Assist>> {
let mut fixes = Vec::new();
if d.method_with_same_name_exists {
fixes.extend(method_fix(ctx, &d.expr));
@@ -64,7 +64,7 @@
}
// FIXME: Add Snippet Support
-fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Assist> {
+fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField<'_>) -> Option<Assist> {
// Get the FileRange of the invalid field access
let root = ctx.sema.db.parse_or_expand(d.expr.file_id);
let expr = d.expr.value.to_node(&root).left()?;
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 00c2a8c..1f2d671 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -18,7 +18,7 @@
// This diagnostic is triggered if a method does not exist on a given type.
pub(crate) fn unresolved_method(
ctx: &DiagnosticsContext<'_>,
- d: &hir::UnresolvedMethodCall,
+ d: &hir::UnresolvedMethodCall<'_>,
) -> Diagnostic {
let suffix = if d.field_with_same_name.is_some() {
", but a field with a similar name exists"
@@ -49,7 +49,7 @@
.with_fixes(fixes(ctx, d))
}
-fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Vec<Assist>> {
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall<'_>) -> Option<Vec<Assist>> {
let field_fix = if let Some(ty) = &d.field_with_same_name {
field_fix(ctx, d, ty)
} else {
@@ -72,8 +72,8 @@
fn field_fix(
ctx: &DiagnosticsContext<'_>,
- d: &hir::UnresolvedMethodCall,
- ty: &hir::Type,
+ d: &hir::UnresolvedMethodCall<'_>,
+ ty: &hir::Type<'_>,
) -> Option<Assist> {
if !ty.impls_fnonce(ctx.sema.db) {
return None;
@@ -107,7 +107,10 @@
})
}
-fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Assist> {
+fn assoc_func_fix(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedMethodCall<'_>,
+) -> Option<Assist> {
if let Some(f) = d.assoc_func_with_same_name {
let db = ctx.sema.db;
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs
index 43c56ac..e4b20f3 100644
--- a/crates/ide-ssr/src/lib.rs
+++ b/crates/ide-ssr/src/lib.rs
@@ -110,7 +110,7 @@
pub struct MatchFinder<'db> {
/// Our source of information about the user's code.
sema: Semantics<'db, ide_db::RootDatabase>,
- rules: Vec<ResolvedRule>,
+ rules: Vec<ResolvedRule<'db>>,
resolution_scope: resolving::ResolutionScope<'db>,
restrict_ranges: Vec<ide_db::FileRange>,
}
diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs
index cff4eed..b350315 100644
--- a/crates/ide-ssr/src/matching.rs
+++ b/crates/ide-ssr/src/matching.rs
@@ -84,12 +84,12 @@
/// Checks if `code` matches the search pattern found in `search_scope`, returning information about
/// the match, if it does. Since we only do matching in this module and searching is done by the
/// parent module, we don't populate nested matches.
-pub(crate) fn get_match(
+pub(crate) fn get_match<'db>(
debug_active: bool,
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
code: &SyntaxNode,
restrict_range: &Option<FileRange>,
- sema: &Semantics<'_, ide_db::RootDatabase>,
+ sema: &Semantics<'db, ide_db::RootDatabase>,
) -> Result<Match, MatchFailed> {
record_match_fails_reasons_scope(debug_active, || {
Matcher::try_match(rule, code, restrict_range, sema)
@@ -102,7 +102,7 @@
/// If any placeholders come from anywhere outside of this range, then the match will be
/// rejected.
restrict_range: Option<FileRange>,
- rule: &'sema ResolvedRule,
+ rule: &'sema ResolvedRule<'db>,
}
/// Which phase of matching we're currently performing. We do two phases because most attempted
@@ -117,7 +117,7 @@
impl<'db, 'sema> Matcher<'db, 'sema> {
fn try_match(
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
code: &SyntaxNode,
restrict_range: &Option<FileRange>,
sema: &'sema Semantics<'db, ide_db::RootDatabase>,
@@ -535,7 +535,7 @@
fn attempt_match_ufcs_to_method_call(
&self,
phase: &mut Phase<'_>,
- pattern_ufcs: &UfcsCallInfo,
+ pattern_ufcs: &UfcsCallInfo<'db>,
code: &ast::MethodCallExpr,
) -> Result<(), MatchFailed> {
use ast::HasArgList;
@@ -597,7 +597,7 @@
fn attempt_match_ufcs_to_ufcs(
&self,
phase: &mut Phase<'_>,
- pattern_ufcs: &UfcsCallInfo,
+ pattern_ufcs: &UfcsCallInfo<'db>,
code: &ast::CallExpr,
) -> Result<(), MatchFailed> {
use ast::HasArgList;
@@ -615,7 +615,7 @@
/// times. Returns the number of times it needed to be dereferenced.
fn check_expr_type(
&self,
- pattern_type: &hir::Type,
+ pattern_type: &hir::Type<'db>,
expr: &ast::Expr,
) -> Result<usize, MatchFailed> {
use hir::HirDisplay;
@@ -656,10 +656,10 @@
}
impl Match {
- fn render_template_paths(
+ fn render_template_paths<'db>(
&mut self,
- template: &ResolvedPattern,
- sema: &Semantics<'_, ide_db::RootDatabase>,
+ template: &ResolvedPattern<'db>,
+ sema: &Semantics<'db, ide_db::RootDatabase>,
) -> Result<(), MatchFailed> {
let module = sema
.scope(&self.matched_node)
diff --git a/crates/ide-ssr/src/replacing.rs b/crates/ide-ssr/src/replacing.rs
index 3c92697..752edd6 100644
--- a/crates/ide-ssr/src/replacing.rs
+++ b/crates/ide-ssr/src/replacing.rs
@@ -14,21 +14,21 @@
/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
/// template. Placeholders in the template will have been substituted with whatever they matched to
/// in the original code.
-pub(crate) fn matches_to_edit(
- db: &dyn hir::db::ExpandDatabase,
+pub(crate) fn matches_to_edit<'db>(
+ db: &'db dyn hir::db::ExpandDatabase,
matches: &SsrMatches,
file_src: &str,
- rules: &[ResolvedRule],
+ rules: &[ResolvedRule<'db>],
) -> TextEdit {
matches_to_edit_at_offset(db, matches, file_src, 0.into(), rules)
}
-fn matches_to_edit_at_offset(
- db: &dyn hir::db::ExpandDatabase,
+fn matches_to_edit_at_offset<'db>(
+ db: &'db dyn hir::db::ExpandDatabase,
matches: &SsrMatches,
file_src: &str,
relative_start: TextSize,
- rules: &[ResolvedRule],
+ rules: &[ResolvedRule<'db>],
) -> TextEdit {
let mut edit_builder = TextEdit::builder();
for m in &matches.matches {
@@ -40,12 +40,12 @@
edit_builder.finish()
}
-struct ReplacementRenderer<'a> {
- db: &'a dyn hir::db::ExpandDatabase,
+struct ReplacementRenderer<'a, 'db> {
+ db: &'db dyn hir::db::ExpandDatabase,
match_info: &'a Match,
file_src: &'a str,
- rules: &'a [ResolvedRule],
- rule: &'a ResolvedRule,
+ rules: &'a [ResolvedRule<'db>],
+ rule: &'a ResolvedRule<'db>,
out: String,
// Map from a range within `out` to a token in `template` that represents a placeholder. This is
// used to validate that the generated source code doesn't split any placeholder expansions (see
@@ -58,11 +58,11 @@
edition: Edition,
}
-fn render_replace(
- db: &dyn hir::db::ExpandDatabase,
+fn render_replace<'db>(
+ db: &'db dyn hir::db::ExpandDatabase,
match_info: &Match,
file_src: &str,
- rules: &[ResolvedRule],
+ rules: &[ResolvedRule<'db>],
edition: Edition,
) -> String {
let rule = &rules[match_info.rule_index];
@@ -89,7 +89,7 @@
renderer.out
}
-impl ReplacementRenderer<'_> {
+impl<'db> ReplacementRenderer<'_, 'db> {
fn render_node_children(&mut self, node: &SyntaxNode) {
for node_or_token in node.children_with_tokens() {
self.render_node_or_token(&node_or_token);
diff --git a/crates/ide-ssr/src/resolving.rs b/crates/ide-ssr/src/resolving.rs
index a687db4..8f28a1c 100644
--- a/crates/ide-ssr/src/resolving.rs
+++ b/crates/ide-ssr/src/resolving.rs
@@ -15,18 +15,18 @@
node: SyntaxNode,
}
-pub(crate) struct ResolvedRule {
- pub(crate) pattern: ResolvedPattern,
- pub(crate) template: Option<ResolvedPattern>,
+pub(crate) struct ResolvedRule<'db> {
+ pub(crate) pattern: ResolvedPattern<'db>,
+ pub(crate) template: Option<ResolvedPattern<'db>>,
pub(crate) index: usize,
}
-pub(crate) struct ResolvedPattern {
+pub(crate) struct ResolvedPattern<'db> {
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
pub(crate) node: SyntaxNode,
// Paths in `node` that we've resolved.
pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
- pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo>,
+ pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo<'db>>,
pub(crate) contains_self: bool,
}
@@ -36,18 +36,18 @@
pub(crate) depth: u32,
}
-pub(crate) struct UfcsCallInfo {
+pub(crate) struct UfcsCallInfo<'db> {
pub(crate) call_expr: ast::CallExpr,
pub(crate) function: hir::Function,
- pub(crate) qualifier_type: Option<hir::Type>,
+ pub(crate) qualifier_type: Option<hir::Type<'db>>,
}
-impl ResolvedRule {
+impl<'db> ResolvedRule<'db> {
pub(crate) fn new(
rule: parsing::ParsedRule,
- resolution_scope: &ResolutionScope<'_>,
+ resolution_scope: &ResolutionScope<'db>,
index: usize,
- ) -> Result<ResolvedRule, SsrError> {
+ ) -> Result<ResolvedRule<'db>, SsrError> {
let resolver =
Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
let resolved_template = match rule.template {
@@ -74,8 +74,8 @@
placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
}
-impl Resolver<'_, '_> {
- fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
+impl<'db> Resolver<'_, 'db> {
+ fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern<'db>, SsrError> {
use syntax::ast::AstNode;
use syntax::{SyntaxElement, T};
let mut resolved_paths = FxHashMap::default();
@@ -250,7 +250,7 @@
}
}
- fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type> {
+ fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type<'db>> {
use syntax::ast::AstNode;
if let Some(path) = ast::Path::cast(path.clone()) {
if let Some(qualifier) = path.qualifier() {
diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs
index 9afbedb..99a98fb 100644
--- a/crates/ide-ssr/src/search.rs
+++ b/crates/ide-ssr/src/search.rs
@@ -21,13 +21,13 @@
usages: Vec<(Definition, UsageSearchResult)>,
}
-impl MatchFinder<'_> {
+impl<'db> MatchFinder<'db> {
/// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
/// replacement impossible, so further processing is required in order to properly nest matches
/// and remove overlapping matches. This is done in the `nesting` module.
pub(crate) fn find_matches_for_rule(
&self,
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
usage_cache: &mut UsageCache,
matches_out: &mut Vec<Match>,
) {
@@ -49,8 +49,8 @@
fn find_matches_for_pattern_tree(
&self,
- rule: &ResolvedRule,
- pattern: &ResolvedPattern,
+ rule: &ResolvedRule<'db>,
+ pattern: &ResolvedPattern<'db>,
usage_cache: &mut UsageCache,
matches_out: &mut Vec<Match>,
) {
@@ -144,7 +144,7 @@
SearchScope::files(&files)
}
- fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
+ fn slow_scan(&self, rule: &ResolvedRule<'db>, matches_out: &mut Vec<Match>) {
self.search_files_do(|file_id| {
let file = self.sema.parse_guess_edition(file_id);
let code = file.syntax();
@@ -177,7 +177,7 @@
fn slow_scan_node(
&self,
code: &SyntaxNode,
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
restrict_range: &Option<FileRange>,
matches_out: &mut Vec<Match>,
) {
@@ -206,7 +206,7 @@
fn try_add_match(
&self,
- rule: &ResolvedRule,
+ rule: &ResolvedRule<'db>,
code: &SyntaxNode,
restrict_range: &Option<FileRange>,
matches_out: &mut Vec<Match>,
@@ -274,7 +274,7 @@
/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
/// longest as this is hopefully more likely to be less common, making it faster to find.
-fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
+fn pick_path_for_usages<'a>(pattern: &'a ResolvedPattern<'_>) -> Option<&'a ResolvedPath> {
// FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
// private to the current module, then we definitely would want to pick them over say a path
// from std. Possibly we should go further than this and intersect the search scopes for all
diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs
index 1bc28f2..02d96a6 100644
--- a/crates/ide/src/goto_implementation.rs
+++ b/crates/ide/src/goto_implementation.rs
@@ -83,7 +83,7 @@
Some(RangeInfo { range, info: navs })
}
-fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec<NavigationTarget> {
+fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type<'_>) -> Vec<NavigationTarget> {
Impl::all_for_type(sema.db, ty)
.into_iter()
.filter_map(|imp| imp.try_to_nav(sema.db))
diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs
index 9781e71..86d72fe 100644
--- a/crates/ide/src/goto_type_definition.rs
+++ b/crates/ide/src/goto_type_definition.rs
@@ -38,7 +38,7 @@
}
}
};
- let mut process_ty = |ty: hir::Type| {
+ let mut process_ty = |ty: hir::Type<'_>| {
// collect from each `ty` into the `res` result vec
let ty = ty.strip_references();
ty.walk(db, |t| {
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 5404a9d..e4d6279 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -426,7 +426,7 @@
sema: &Semantics<'_, RootDatabase>,
file_id: FileId,
def: Definition,
- subst: Option<GenericSubstitution>,
+ subst: Option<GenericSubstitution<'_>>,
scope_node: &SyntaxNode,
macro_arm: Option<u32>,
render_extras: bool,
@@ -483,10 +483,10 @@
}
}
-fn notable_traits(
- db: &RootDatabase,
- ty: &hir::Type,
-) -> Vec<(hir::Trait, Vec<(Option<hir::Type>, hir::Name)>)> {
+fn notable_traits<'db>(
+ db: &'db RootDatabase,
+ ty: &hir::Type<'db>,
+) -> Vec<(hir::Trait, Vec<(Option<hir::Type<'db>>, hir::Name)>)> {
db.notable_traits_in_deps(ty.krate(db).into())
.iter()
.flat_map(|it| &**it)
@@ -567,8 +567,8 @@
fn goto_type_action_for_def(
db: &RootDatabase,
def: Definition,
- notable_traits: &[(hir::Trait, Vec<(Option<hir::Type>, hir::Name)>)],
- subst_types: Option<Vec<(hir::Symbol, hir::Type)>>,
+ notable_traits: &[(hir::Trait, Vec<(Option<hir::Type<'_>>, hir::Name)>)],
+ subst_types: Option<Vec<(hir::Symbol, hir::Type<'_>)>>,
edition: Edition,
) -> Option<HoverAction> {
let mut targets: Vec<hir::ModuleDef> = Vec::new();
@@ -622,7 +622,7 @@
fn walk_and_push_ty(
db: &RootDatabase,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
push_new_def: &mut dyn FnMut(hir::ModuleDef),
) {
ty.walk(db, |t| {
diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs
index c24864a..670210d 100644
--- a/crates/ide/src/hover/render.rs
+++ b/crates/ide/src/hover/render.rs
@@ -476,10 +476,10 @@
db: &RootDatabase,
def: Definition,
famous_defs: Option<&FamousDefs<'_, '_>>,
- notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
+ notable_traits: &[(Trait, Vec<(Option<Type<'_>>, Name)>)],
macro_arm: Option<u32>,
render_extras: bool,
- subst_types: Option<&Vec<(Symbol, Type)>>,
+ subst_types: Option<&Vec<(Symbol, Type<'_>)>>,
config: &HoverConfig,
edition: Edition,
display_target: DisplayTarget,
@@ -938,7 +938,7 @@
fn render_notable_trait(
db: &RootDatabase,
- notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
+ notable_traits: &[(Trait, Vec<(Option<Type<'_>>, Name)>)],
edition: Edition,
display_target: DisplayTarget,
) -> Option<String> {
@@ -979,7 +979,7 @@
fn type_info(
sema: &Semantics<'_, RootDatabase>,
config: &HoverConfig,
- ty: TypeInfo,
+ ty: TypeInfo<'_>,
edition: Edition,
display_target: DisplayTarget,
) -> Option<HoverResult> {
@@ -1038,7 +1038,7 @@
fn closure_ty(
sema: &Semantics<'_, RootDatabase>,
config: &HoverConfig,
- TypeInfo { original, adjusted }: &TypeInfo,
+ TypeInfo { original, adjusted }: &TypeInfo<'_>,
edition: Edition,
display_target: DisplayTarget,
) -> Option<HoverResult> {
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index b094b09..19e5509 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -722,14 +722,14 @@
fn label_of_ty(
famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
display_target: DisplayTarget,
) -> Option<InlayHintLabel> {
fn rec(
sema: &Semantics<'_, RootDatabase>,
famous_defs: &FamousDefs<'_, '_>,
mut max_length: Option<usize>,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
label_builder: &mut InlayHintLabelBuilder<'_>,
config: &InlayHintsConfig,
display_target: DisplayTarget,
@@ -788,11 +788,11 @@
}
/// Checks if the type is an Iterator from std::iter and returns the iterator trait and the item type of the concrete iterator.
-fn hint_iterator(
- sema: &Semantics<'_, RootDatabase>,
- famous_defs: &FamousDefs<'_, '_>,
- ty: &hir::Type,
-) -> Option<(hir::Trait, hir::TypeAlias, hir::Type)> {
+fn hint_iterator<'db>(
+ sema: &Semantics<'db, RootDatabase>,
+ famous_defs: &FamousDefs<'_, 'db>,
+ ty: &hir::Type<'db>,
+) -> Option<(hir::Trait, hir::TypeAlias, hir::Type<'db>)> {
let db = sema.db;
let strukt = ty.strip_references().as_adt()?;
let krate = strukt.module(db).krate();
@@ -826,7 +826,7 @@
sema: &Semantics<'_, RootDatabase>,
config: &InlayHintsConfig,
node_for_hint: &SyntaxNode,
- ty: &hir::Type,
+ ty: &hir::Type<'_>,
offset_to_insert_ty: TextSize,
additional_edits: &dyn Fn(&mut TextEditBuilder),
prefix: impl Into<String>,
diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs
index 5ff9fee..5174228 100644
--- a/crates/ide/src/inlay_hints/param_name.rs
+++ b/crates/ide/src/inlay_hints/param_name.rs
@@ -87,10 +87,10 @@
Some(())
}
-fn get_callable(
- sema: &Semantics<'_, RootDatabase>,
+fn get_callable<'db>(
+ sema: &Semantics<'db, RootDatabase>,
expr: &ast::Expr,
-) -> Option<(hir::Callable, ast::ArgList)> {
+) -> Option<(hir::Callable<'db>, ast::ArgList)> {
match expr {
ast::Expr::CallExpr(expr) => {
let descended = sema.descend_node_into_attributes(expr.clone()).pop();
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 82dbcde..b3b8deb 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -409,7 +409,7 @@
self.with_db(|db| typing::on_enter(db, position))
}
- pub const SUPPORTED_TRIGGER_CHARS: &'static str = typing::TRIGGER_CHARS;
+ pub const SUPPORTED_TRIGGER_CHARS: &[char] = typing::TRIGGER_CHARS;
/// Returns an edit which should be applied after a character was typed.
///
@@ -421,7 +421,7 @@
char_typed: char,
) -> Cancellable<Option<SourceChange>> {
// Fast path to not even parse the file.
- if !typing::TRIGGER_CHARS.contains(char_typed) {
+ if !typing::TRIGGER_CHARS.contains(&char_typed) {
return Ok(None);
}
diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs
index 0e17b35..e30a3eb 100644
--- a/crates/ide/src/signature_help.rs
+++ b/crates/ide/src/signature_help.rs
@@ -278,7 +278,7 @@
}
res.signature.push(')');
- let mut render = |ret_type: hir::Type| {
+ let mut render = |ret_type: hir::Type<'_>| {
if !ret_type.is_unit() {
format_to!(res.signature, " -> {}", ret_type.display(db, display_target));
}
@@ -597,11 +597,11 @@
Some(res)
}
-fn signature_help_for_record_(
- sema: &Semantics<'_, RootDatabase>,
+fn signature_help_for_record_<'db>(
+ sema: &Semantics<'db, RootDatabase>,
field_list_children: SyntaxElementChildren,
path: &ast::Path,
- fields2: impl Iterator<Item = (hir::Field, hir::Type)>,
+ fields2: impl Iterator<Item = (hir::Field, hir::Type<'db>)>,
token: SyntaxToken,
edition: Edition,
display_target: DisplayTarget,
@@ -689,13 +689,13 @@
Some(res)
}
-fn signature_help_for_tuple_pat_ish(
- db: &RootDatabase,
+fn signature_help_for_tuple_pat_ish<'db>(
+ db: &'db RootDatabase,
mut res: SignatureHelp,
pat: &SyntaxNode,
token: SyntaxToken,
mut field_pats: AstChildren<ast::Pat>,
- fields: impl ExactSizeIterator<Item = hir::Type>,
+ fields: impl ExactSizeIterator<Item = hir::Type<'db>>,
display_target: DisplayTarget,
) -> SignatureHelp {
let rest_pat = field_pats.find(|it| matches!(it, ast::Pat::RestPat(_)));
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index 4df7e25..ed55ac5 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -15,6 +15,7 @@
mod on_enter;
+use either::Either;
use hir::EditionedFileId;
use ide_db::{FilePosition, RootDatabase, base_db::RootQueryDb};
use span::Edition;
@@ -33,7 +34,7 @@
pub(crate) use on_enter::on_enter;
// Don't forget to add new trigger characters to `server_capabilities` in `caps.rs`.
-pub(crate) const TRIGGER_CHARS: &str = ".=<>{(|";
+pub(crate) const TRIGGER_CHARS: &[char] = &['.', '=', '<', '>', '{', '(', '|', '+'];
struct ExtendedTextEdit {
edit: TextEdit,
@@ -66,7 +67,7 @@
position: FilePosition,
char_typed: char,
) -> Option<SourceChange> {
- if !stdx::always!(TRIGGER_CHARS.contains(char_typed)) {
+ if !TRIGGER_CHARS.contains(&char_typed) {
return None;
}
// FIXME: We need to figure out the edition of the file here, but that means hitting the
@@ -101,6 +102,7 @@
'>' => on_right_angle_typed(&file.tree(), offset),
'{' | '(' | '<' => on_opening_delimiter_typed(file, offset, char_typed, edition),
'|' => on_pipe_typed(&file.tree(), offset),
+ '+' => on_plus_typed(&file.tree(), offset),
_ => None,
}
.map(conv)
@@ -402,6 +404,28 @@
Some(TextEdit::insert(after_lpipe, "|".to_owned()))
}
+fn on_plus_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
+ let plus_token = file.syntax().token_at_offset(offset).right_biased()?;
+ if plus_token.kind() != SyntaxKind::PLUS {
+ return None;
+ }
+ let mut ancestors = plus_token.parent_ancestors();
+ ancestors.next().and_then(ast::TypeBoundList::cast)?;
+ let trait_type =
+ ancestors.next().and_then(<Either<ast::DynTraitType, ast::ImplTraitType>>::cast)?;
+ let kind = ancestors.next()?.kind();
+
+ if ast::RefType::can_cast(kind) || ast::PtrType::can_cast(kind) || ast::RetType::can_cast(kind)
+ {
+ let mut builder = TextEdit::builder();
+ builder.insert(trait_type.syntax().text_range().start(), "(".to_owned());
+ builder.insert(trait_type.syntax().text_range().end(), ")".to_owned());
+ Some(builder.finish())
+ } else {
+ None
+ }
+}
+
/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }`
fn on_right_angle_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
let file_text = file.syntax().text();
@@ -1597,4 +1621,64 @@
"#,
);
}
+
+ #[test]
+ fn adds_parentheses_around_trait_object_in_ref_type() {
+ type_char(
+ '+',
+ r#"
+fn foo(x: &dyn A$0) {}
+"#,
+ r#"
+fn foo(x: &(dyn A+)) {}
+"#,
+ );
+ type_char(
+ '+',
+ r#"
+fn foo(x: &'static dyn A$0B) {}
+"#,
+ r#"
+fn foo(x: &'static (dyn A+B)) {}
+"#,
+ );
+ type_char_noop(
+ '+',
+ r#"
+fn foo(x: &(dyn A$0)) {}
+"#,
+ );
+ type_char_noop(
+ '+',
+ r#"
+fn foo(x: Box<dyn A$0>) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn adds_parentheses_around_trait_object_in_ptr_type() {
+ type_char(
+ '+',
+ r#"
+fn foo(x: *const dyn A$0) {}
+"#,
+ r#"
+fn foo(x: *const (dyn A+)) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn adds_parentheses_around_trait_object_in_return_type() {
+ type_char(
+ '+',
+ r#"
+fn foo(x: fn() -> dyn A$0) {}
+"#,
+ r#"
+fn foo(x: fn() -> (dyn A+)) {}
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/view_memory_layout.rs b/crates/ide/src/view_memory_layout.rs
index 140ae42..63701a4 100644
--- a/crates/ide/src/view_memory_layout.rs
+++ b/crates/ide/src/view_memory_layout.rs
@@ -107,7 +107,7 @@
fn read_layout(
nodes: &mut Vec<MemoryLayoutNode>,
db: &RootDatabase,
- ty: &Type,
+ ty: &Type<'_>,
layout: &Layout,
parent_idx: usize,
display_target: DisplayTarget,
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index d5cbb73..adc5813 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -496,6 +496,7 @@
vectorcall,
wasm,
win64,
+ args,
array,
boxed_slice,
completions,
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 89b8631..52f5967 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -42,7 +42,7 @@
root: &Path,
cargo_config: &CargoConfig,
load_config: &LoadCargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroClient>)> {
let root = AbsPathBuf::assert_utf8(std::env::current_dir()?.join(root));
let root = ProjectManifest::discover_single(&root)?;
diff --git a/crates/project-model/src/build_dependencies.rs b/crates/project-model/src/build_dependencies.rs
index e0c38cc..4435376 100644
--- a/crates/project-model/src/build_dependencies.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -20,7 +20,9 @@
use crate::{
CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, Package, Sysroot,
- TargetKind, utf8_stdout,
+ TargetKind,
+ toolchain_info::{QueryConfig, version},
+ utf8_stdout,
};
/// Output of the build script and proc-macro building steps for a workspace.
@@ -446,10 +448,30 @@
}
};
- if config.wrap_rustc_in_build_scripts {
+ // If [`--compile-time-deps` flag](https://github.com/rust-lang/cargo/issues/14434) is
+ // available in current toolchain's cargo, use it to build compile time deps only.
+ const COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION: semver::Version = semver::Version {
+ major: 1,
+ minor: 90,
+ patch: 0,
+ pre: semver::Prerelease::EMPTY,
+ build: semver::BuildMetadata::EMPTY,
+ };
+
+ let query_config = QueryConfig::Cargo(sysroot, manifest_path);
+ let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
+ let cargo_comp_time_deps_available =
+ toolchain.is_some_and(|v| v >= COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION);
+
+ if cargo_comp_time_deps_available {
+ cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly");
+ cmd.arg("-Zunstable-options");
+ cmd.arg("--compile-time-deps");
+ } else if config.wrap_rustc_in_build_scripts {
// Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
// that to compile only proc macros and build scripts during the initial
// `cargo check`.
+ // We don't need this if we are using `--compile-time-deps` flag.
let myself = std::env::current_exe()?;
cmd.env("RUSTC_WRAPPER", myself);
cmd.env("RA_RUSTC_WRAPPER", "1");
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 1fade7b..5850741 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -7,16 +7,25 @@
use base_db::Env;
use cargo_metadata::{CargoOpt, MetadataCommand};
use la_arena::{Arena, Idx};
-use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use serde_derive::Deserialize;
use serde_json::from_value;
use span::Edition;
+use stdx::process::spawn_with_streaming_output;
use toolchain::Tool;
use crate::{CfgOverrides, InvocationStrategy};
use crate::{ManifestPath, Sysroot};
+const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version = semver::Version {
+ major: 1,
+ minor: 82,
+ patch: 0,
+ pre: semver::Prerelease::EMPTY,
+ build: semver::BuildMetadata::EMPTY,
+};
+
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
/// workspace. It pretty closely mirrors `cargo metadata` output.
///
@@ -290,6 +299,13 @@
pub extra_args: Vec<String>,
/// Extra env vars to set when invoking the cargo command
pub extra_env: FxHashMap<String, Option<String>>,
+ /// The target dir for this workspace load.
+ pub target_dir: Utf8PathBuf,
+ /// What kind of metadata are we fetching: workspace, rustc, or sysroot.
+ pub kind: &'static str,
+ /// The toolchain version, if known.
+ /// Used to conditionally enable unstable cargo features.
+ pub toolchain_version: Option<semver::Version>,
}
// Deserialize helper for the cargo metadata
@@ -382,28 +398,74 @@
config.targets.iter().flat_map(|it| ["--filter-platform".to_owned(), it.clone()]),
);
}
- // The manifest is a rust file, so this means its a script manifest
- if cargo_toml.is_rust_manifest() {
- // Deliberately don't set up RUSTC_BOOTSTRAP or a nightly override here, the user should
- // opt into it themselves.
- other_options.push("-Zscript".to_owned());
- }
- if locked {
- other_options.push("--locked".to_owned());
- }
if no_deps {
other_options.push("--no-deps".to_owned());
}
+
+ let mut using_lockfile_copy = false;
+ // The manifest is a rust file, so this means its a script manifest
+ if cargo_toml.is_rust_manifest() {
+ other_options.push("-Zscript".to_owned());
+ } else if config
+ .toolchain_version
+ .as_ref()
+ .is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH)
+ {
+ let lockfile = <_ as AsRef<Utf8Path>>::as_ref(cargo_toml).with_extension("lock");
+ let target_lockfile = config
+ .target_dir
+ .join("rust-analyzer")
+ .join("metadata")
+ .join(config.kind)
+ .join("Cargo.lock");
+ match std::fs::copy(&lockfile, &target_lockfile) {
+ Ok(_) => {
+ using_lockfile_copy = true;
+ other_options.push("--lockfile-path".to_owned());
+ other_options.push(target_lockfile.to_string());
+ }
+ Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
+ // There exists no lockfile yet
+ using_lockfile_copy = true;
+ other_options.push("--lockfile-path".to_owned());
+ other_options.push(target_lockfile.to_string());
+ }
+ Err(e) => {
+ tracing::warn!(
+ "Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}",
+ );
+ }
+ }
+ }
+ if using_lockfile_copy {
+ other_options.push("-Zunstable-options".to_owned());
+ meta.env("RUSTC_BOOTSTRAP", "1");
+ }
+ // No need to lock it if we copied the lockfile, we won't modify the original after all/
+ // This way cargo cannot error out on us if the lockfile requires updating.
+ if !using_lockfile_copy && locked {
+ other_options.push("--locked".to_owned());
+ }
meta.other_options(other_options);
// FIXME: Fetching metadata is a slow process, as it might require
// calling crates.io. We should be reporting progress here, but it's
// unclear whether cargo itself supports it.
- progress("metadata".to_owned());
+ progress("cargo metadata: started".to_owned());
- (|| -> anyhow::Result<(_, _)> {
- let output = meta.cargo_command().output()?;
+ let res = (|| -> anyhow::Result<(_, _)> {
+ let mut errored = false;
+ let output =
+ spawn_with_streaming_output(meta.cargo_command(), &mut |_| (), &mut |line| {
+ errored = errored || line.starts_with("error") || line.starts_with("warning");
+ if errored {
+ progress("cargo metadata: ?".to_owned());
+ return;
+ }
+ progress(format!("cargo metadata: {line}"));
+ })?;
if !output.status.success() {
+ progress(format!("cargo metadata: failed {}", output.status));
let error = cargo_metadata::Error::CargoMetadata {
stderr: String::from_utf8(output.stderr)?,
}
@@ -416,8 +478,8 @@
current_dir,
config,
sysroot,
- locked,
true,
+ locked,
progress,
) {
return Ok((metadata, Some(error)));
@@ -431,7 +493,9 @@
.ok_or(cargo_metadata::Error::NoJson)?;
Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None))
})()
- .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
+ .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()));
+ progress("cargo metadata: finished".to_owned());
+ res
}
pub fn new(
diff --git a/crates/project-model/src/manifest_path.rs b/crates/project-model/src/manifest_path.rs
index 4f43be2..fba8cc9 100644
--- a/crates/project-model/src/manifest_path.rs
+++ b/crates/project-model/src/manifest_path.rs
@@ -1,7 +1,7 @@
//! See [`ManifestPath`].
use std::{borrow::Borrow, fmt, ops};
-use paths::{AbsPath, AbsPathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8Path};
/// More or less [`AbsPathBuf`] with non-None parent.
///
@@ -78,6 +78,12 @@
}
}
+impl AsRef<Utf8Path> for ManifestPath {
+ fn as_ref(&self) -> &Utf8Path {
+ self.file.as_ref()
+ }
+}
+
impl Borrow<AbsPath> for ManifestPath {
fn borrow(&self) -> &AbsPath {
self.file.borrow()
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index ebd86e3..4b34fc0 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -4,6 +4,7 @@
//! but we can't process `.rlib` and need source code instead. The source code
//! is typically installed with `rustup component add rust-src` command.
+use core::fmt;
use std::{env, fs, ops::Not, path::Path, process::Command};
use anyhow::{Result, format_err};
@@ -34,6 +35,19 @@
Empty,
}
+impl fmt::Display for RustLibSrcWorkspace {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ RustLibSrcWorkspace::Workspace(ws) => write!(f, "workspace {}", ws.workspace_root()),
+ RustLibSrcWorkspace::Json(json) => write!(f, "json {}", json.manifest_or_root()),
+ RustLibSrcWorkspace::Stitched(stitched) => {
+ write!(f, "stitched with {} crates", stitched.crates.len())
+ }
+ RustLibSrcWorkspace::Empty => write!(f, "empty"),
+ }
+ }
+}
+
impl Sysroot {
pub const fn empty() -> Sysroot {
Sysroot {
@@ -195,6 +209,8 @@
pub fn load_workspace(
&self,
sysroot_source_config: &RustSourceWorkspaceConfig,
+ current_dir: &AbsPath,
+ progress: &dyn Fn(String),
) -> Option<RustLibSrcWorkspace> {
assert!(matches!(self.workspace, RustLibSrcWorkspace::Empty), "workspace already loaded");
let Self { root: _, rust_lib_src_root: Some(src_root), workspace: _, error: _ } = self
@@ -204,10 +220,16 @@
if let RustSourceWorkspaceConfig::CargoMetadata(cargo_config) = sysroot_source_config {
let library_manifest = ManifestPath::try_from(src_root.join("Cargo.toml")).unwrap();
if fs::metadata(&library_manifest).is_ok() {
- if let Some(loaded) =
- self.load_library_via_cargo(library_manifest, src_root, cargo_config)
- {
- return Some(loaded);
+ match self.load_library_via_cargo(
+ &library_manifest,
+ current_dir,
+ cargo_config,
+ progress,
+ ) {
+ Ok(loaded) => return Some(loaded),
+ Err(e) => {
+ tracing::error!("`cargo metadata` failed on `{library_manifest}` : {e}")
+ }
}
}
tracing::debug!("Stitching sysroot library: {src_root}");
@@ -293,10 +315,11 @@
fn load_library_via_cargo(
&self,
- library_manifest: ManifestPath,
- rust_lib_src_dir: &AbsPathBuf,
+ library_manifest: &ManifestPath,
+ current_dir: &AbsPath,
cargo_config: &CargoMetadataConfig,
- ) -> Option<RustLibSrcWorkspace> {
+ progress: &dyn Fn(String),
+ ) -> Result<RustLibSrcWorkspace> {
tracing::debug!("Loading library metadata: {library_manifest}");
let mut cargo_config = cargo_config.clone();
// the sysroot uses `public-dependency`, so we make cargo think it's a nightly
@@ -305,22 +328,16 @@
Some("nightly".to_owned()),
);
- let (mut res, _) = match CargoWorkspace::fetch_metadata(
- &library_manifest,
- rust_lib_src_dir,
+ let (mut res, _) = CargoWorkspace::fetch_metadata(
+ library_manifest,
+ current_dir,
&cargo_config,
self,
false,
// Make sure we never attempt to write to the sysroot
true,
- &|_| (),
- ) {
- Ok(it) => it,
- Err(e) => {
- tracing::error!("`cargo metadata` failed on `{library_manifest}` : {e}");
- return None;
- }
- };
+ progress,
+ )?;
// Patch out `rustc-std-workspace-*` crates to point to the real crates.
// This is done prior to `CrateGraph` construction to prevent de-duplication logic from failing.
@@ -371,8 +388,9 @@
res.packages.remove(idx);
});
- let cargo_workspace = CargoWorkspace::new(res, library_manifest, Default::default(), true);
- Some(RustLibSrcWorkspace::Workspace(cargo_workspace))
+ let cargo_workspace =
+ CargoWorkspace::new(res, library_manifest.clone(), Default::default(), true);
+ Ok(RustLibSrcWorkspace::Workspace(cargo_workspace))
}
}
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index c69891b..4f11af2 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -1,3 +1,5 @@
+use std::env::temp_dir;
+
use base_db::{CrateGraphBuilder, ProcMacroPaths};
use cargo_metadata::Metadata;
use cfg::{CfgAtom, CfgDiff};
@@ -235,11 +237,18 @@
AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))),
&Default::default(),
);
- let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
+ let cwd = AbsPathBuf::assert_utf8(temp_dir().join("smoke_test_real_sysroot_cargo"));
+ std::fs::create_dir_all(&cwd).unwrap();
+ let loaded_sysroot =
+ sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), &cwd, &|_| ());
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
- assert!(matches!(sysroot.workspace(), RustLibSrcWorkspace::Workspace(_)));
+ assert!(
+ matches!(sysroot.workspace(), RustLibSrcWorkspace::Workspace(_)),
+ "got {}",
+ sysroot.workspace()
+ );
let project_workspace = ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {
cargo: cargo_workspace,
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index eec0077..a6743a3 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -12,7 +12,7 @@
};
use cfg::{CfgAtom, CfgDiff, CfgOptions};
use intern::{Symbol, sym};
-use paths::{AbsPath, AbsPathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use span::{Edition, FileId};
@@ -170,7 +170,7 @@
pub fn load(
manifest: ProjectManifest,
config: &CargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> anyhow::Result<ProjectWorkspace> {
ProjectWorkspace::load_inner(&manifest, config, progress)
.with_context(|| format!("Failed to load the project at {manifest}"))
@@ -179,7 +179,7 @@
fn load_inner(
manifest: &ProjectManifest,
config: &CargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> anyhow::Result<ProjectWorkspace> {
let res = match manifest {
ProjectManifest::ProjectJson(project_json) => {
@@ -206,9 +206,10 @@
fn load_cargo(
cargo_toml: &ManifestPath,
config: &CargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> Result<ProjectWorkspace, anyhow::Error> {
progress("Discovering sysroot".to_owned());
+ let workspace_dir = cargo_toml.parent();
let CargoConfig {
features,
rustc_source,
@@ -224,15 +225,9 @@
..
} = config;
let mut sysroot = match (sysroot, sysroot_src) {
- (Some(RustLibSource::Discover), None) => {
- Sysroot::discover(cargo_toml.parent(), extra_env)
- }
+ (Some(RustLibSource::Discover), None) => Sysroot::discover(workspace_dir, extra_env),
(Some(RustLibSource::Discover), Some(sysroot_src)) => {
- Sysroot::discover_with_src_override(
- cargo_toml.parent(),
- extra_env,
- sysroot_src.clone(),
- )
+ Sysroot::discover_with_src_override(workspace_dir, extra_env, sysroot_src.clone())
}
(Some(RustLibSource::Path(path)), None) => {
Sysroot::discover_rust_lib_src_dir(path.clone())
@@ -248,24 +243,23 @@
let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml);
let targets =
target_tuple::get(toolchain_config, target.as_deref(), extra_env).unwrap_or_default();
+ let toolchain = version::get(toolchain_config, extra_env)
+ .inspect_err(|e| {
+ tracing::error!(%e,
+ "failed fetching toolchain version for {cargo_toml:?} workspace"
+ )
+ })
+ .ok()
+ .flatten();
+
+ let target_dir =
+ config.target_dir.clone().unwrap_or_else(|| workspace_dir.join("target").into());
// We spawn a bunch of processes to query various information about the workspace's
// toolchain and sysroot
// We can speed up loading a bit by spawning all of these processes in parallel (especially
// on systems were process spawning is delayed)
let join = thread::scope(|s| {
- let workspace_dir = cargo_toml.parent();
- let toolchain = s.spawn(|| {
- version::get(toolchain_config, extra_env)
- .inspect_err(|e| {
- tracing::error!(%e,
- "failed fetching toolchain version for {cargo_toml:?} workspace"
- )
- })
- .ok()
- .flatten()
- });
-
let rustc_cfg = s.spawn(|| {
rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
});
@@ -300,11 +294,14 @@
targets: targets.clone(),
extra_args: extra_args.clone(),
extra_env: extra_env.clone(),
+ target_dir: target_dir.clone(),
+ toolchain_version: toolchain.clone(),
+ kind: "rustc-dev"
},
&sysroot,
*no_deps,
- false,
- &|_| (),
+ true,
+ progress,
) {
Ok((meta, _error)) => {
let workspace = CargoWorkspace::new(
@@ -343,22 +340,31 @@
targets: targets.clone(),
extra_args: extra_args.clone(),
extra_env: extra_env.clone(),
+ target_dir: target_dir.clone(),
+ toolchain_version: toolchain.clone(),
+ kind: "workspace",
},
&sysroot,
*no_deps,
false,
- &|_| (),
+ progress,
)
});
let loaded_sysroot = s.spawn(|| {
- sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
- sysroot_metadata_config(extra_env, &targets),
- ))
+ sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
+ config,
+ &targets,
+ toolchain.clone(),
+ target_dir.clone(),
+ )),
+ workspace_dir,
+ progress,
+ )
});
let cargo_config_extra_env =
s.spawn(|| cargo_config_env(cargo_toml, extra_env, &sysroot));
thread::Result::Ok((
- toolchain.join()?,
rustc_cfg.join()?,
data_layout.join()?,
rustc_dir.join()?,
@@ -368,18 +374,11 @@
))
});
- let (
- toolchain,
- rustc_cfg,
- data_layout,
- rustc,
- loaded_sysroot,
- cargo_metadata,
- cargo_config_extra_env,
- ) = match join {
- Ok(it) => it,
- Err(e) => std::panic::resume_unwind(e),
- };
+ let (rustc_cfg, data_layout, rustc, loaded_sysroot, cargo_metadata, cargo_config_extra_env) =
+ match join {
+ Ok(it) => it,
+ Err(e) => std::panic::resume_unwind(e),
+ };
let (meta, error) = cargo_metadata.with_context(|| {
format!(
@@ -388,6 +387,7 @@
})?;
let cargo = CargoWorkspace::new(meta, cargo_toml.clone(), cargo_config_extra_env, false);
if let Some(loaded_sysroot) = loaded_sysroot {
+ tracing::info!(src_root = ?sysroot.rust_lib_src_root(), root = %loaded_sysroot, "Loaded sysroot");
sysroot.set_workspace(loaded_sysroot);
}
@@ -411,7 +411,7 @@
pub fn load_inline(
mut project_json: ProjectJson,
config: &CargoConfig,
- progress: &dyn Fn(String),
+ progress: &(dyn Fn(String) + Sync),
) -> ProjectWorkspace {
progress("Discovering sysroot".to_owned());
let mut sysroot =
@@ -423,14 +423,13 @@
let query_config = QueryConfig::Rustc(&sysroot, project_json.path().as_ref());
let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env)
.unwrap_or_default();
+ let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
// We spawn a bunch of processes to query various information about the workspace's
// toolchain and sysroot
// We can speed up loading a bit by spawning all of these processes in parallel (especially
// on systems were process spawning is delayed)
let join = thread::scope(|s| {
- let toolchain =
- s.spawn(|| version::get(query_config, &config.extra_env).ok().flatten());
let rustc_cfg = s.spawn(|| {
rustc_cfg::get(query_config, targets.first().map(Deref::deref), &config.extra_env)
});
@@ -442,24 +441,35 @@
)
});
let loaded_sysroot = s.spawn(|| {
+ let project_root = project_json.project_root();
if let Some(sysroot_project) = sysroot_project {
- sysroot.load_workspace(&RustSourceWorkspaceConfig::Json(*sysroot_project))
+ sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::Json(*sysroot_project),
+ project_root,
+ progress,
+ )
} else {
- sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
- sysroot_metadata_config(&config.extra_env, &targets),
- ))
+ let target_dir = config
+ .target_dir
+ .clone()
+ .unwrap_or_else(|| project_root.join("target").into());
+ sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
+ config,
+ &targets,
+ toolchain.clone(),
+ target_dir,
+ )),
+ project_root,
+ progress,
+ )
}
});
- thread::Result::Ok((
- toolchain.join()?,
- rustc_cfg.join()?,
- data_layout.join()?,
- loaded_sysroot.join()?,
- ))
+ thread::Result::Ok((rustc_cfg.join()?, data_layout.join()?, loaded_sysroot.join()?))
});
- let (toolchain, rustc_cfg, target_layout, loaded_sysroot) = match join {
+ let (rustc_cfg, target_layout, loaded_sysroot) = match join {
Ok(it) => it,
Err(e) => std::panic::resume_unwind(e),
};
@@ -497,9 +507,17 @@
.unwrap_or_default();
let rustc_cfg = rustc_cfg::get(query_config, None, &config.extra_env);
let data_layout = target_data_layout::get(query_config, None, &config.extra_env);
- let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
- sysroot_metadata_config(&config.extra_env, &targets),
- ));
+ let target_dir = config.target_dir.clone().unwrap_or_else(|| dir.join("target").into());
+ let loaded_sysroot = sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
+ config,
+ &targets,
+ toolchain.clone(),
+ target_dir.clone(),
+ )),
+ dir,
+ &|_| (),
+ );
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
@@ -512,6 +530,9 @@
targets,
extra_args: config.extra_args.clone(),
extra_env: config.extra_env.clone(),
+ target_dir,
+ toolchain_version: toolchain.clone(),
+ kind: "detached-file",
},
&sysroot,
config.no_deps,
@@ -1804,13 +1825,18 @@
}
fn sysroot_metadata_config(
- extra_env: &FxHashMap<String, Option<String>>,
+ config: &CargoConfig,
targets: &[String],
+ toolchain_version: Option<Version>,
+ target_dir: Utf8PathBuf,
) -> CargoMetadataConfig {
CargoMetadataConfig {
features: Default::default(),
targets: targets.to_vec(),
extra_args: Default::default(),
- extra_env: extra_env.clone(),
+ extra_env: config.extra_env.clone(),
+ target_dir,
+ toolchain_version,
+ kind: "sysroot",
}
}
diff --git a/crates/query-group-macro/src/queries.rs b/crates/query-group-macro/src/queries.rs
index baac3e8..c151cca 100644
--- a/crates/query-group-macro/src/queries.rs
+++ b/crates/query-group-macro/src/queries.rs
@@ -74,8 +74,8 @@
quote! {
#sig {
#annotation
- fn #shim(
- db: &dyn #trait_name,
+ fn #shim<'db>(
+ db: &'db dyn #trait_name,
_input: #input_struct_name,
#(#pat_and_tys),*
) #ret
@@ -88,8 +88,8 @@
quote! {
#sig {
#annotation
- fn #shim(
- db: &dyn #trait_name,
+ fn #shim<'db>(
+ db: &'db dyn #trait_name,
#(#pat_and_tys),*
) #ret
#invoke_block
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 12b393b..0ee0198 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -532,7 +532,7 @@
}
let todo = syntax::ast::make::ext::expr_todo().to_string();
- let mut formatter = |_: &hir::Type| todo.clone();
+ let mut formatter = |_: &hir::Type<'_>| todo.clone();
let mut syntax_hit_found = false;
for term in found_terms {
let generated = term
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index e3b372c..740fcd8 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -9,7 +9,6 @@
use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
use ide_db::base_db;
use itertools::Either;
-use paths::Utf8PathBuf;
use profile::StopWatch;
use project_model::toolchain_info::{QueryConfig, target_data_layout};
use project_model::{
@@ -64,9 +63,9 @@
impl Tester {
fn new() -> Result<Self> {
- let mut path = std::env::temp_dir();
- path.push("ra-rustc-test.rs");
- let tmp_file = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap();
+ let mut path = AbsPathBuf::assert_utf8(std::env::temp_dir());
+ path.push("ra-rustc-test");
+ let tmp_file = path.join("ra-rustc-test.rs");
std::fs::write(&tmp_file, "")?;
let cargo_config = CargoConfig {
sysroot: Some(RustLibSource::Discover),
@@ -76,7 +75,8 @@
};
let mut sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env);
- let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
+ let loaded_sysroot =
+ sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), &path, &|_| ());
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 762b63f..05e1b83 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -452,6 +452,8 @@
assist_emitMustUse: bool = false,
/// Placeholder expression to use for missing expressions in assists.
assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo,
+ /// When inserting a type (e.g. in "fill match arms" assist), prefer to use `Self` over the type name where possible.
+ assist_preferSelf: bool = false,
/// Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.
assist_termSearch_borrowcheck: bool = true,
/// Term search fuel in "units of work" for assists (Defaults to 1800).
@@ -760,7 +762,11 @@
/// though Cargo might be the eventual consumer.
vfs_extraIncludes: Vec<String> = vec![],
- /// Exclude imports from symbol search.
+ /// Exclude all imports from workspace symbol search.
+ ///
+ /// In addition to regular imports (which are always excluded),
+ /// this option removes public imports (better known as re-exports)
+ /// and removes imports that rename the imported symbol.
workspace_symbol_search_excludeImports: bool = false,
/// Workspace symbol search kind.
workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = WorkspaceSymbolSearchKindDef::OnlyTypes,
@@ -1505,6 +1511,7 @@
ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
ExprFillDefaultDef::Underscore => ExprFillDefaultMode::Underscore,
},
+ prefer_self_ty: *self.assist_preferSelf(source_root),
}
}
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index b7373f2..200e972 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -239,7 +239,7 @@
let (config, e, _) = config.apply_change(change);
this.config_errors = e.is_empty().not().then_some(e);
- // Client config changes neccesitates .update_config method to be called.
+ // Client config changes necessitates .update_config method to be called.
this.update_configuration(config);
}
}
diff --git a/crates/rust-analyzer/src/lsp/capabilities.rs b/crates/rust-analyzer/src/lsp/capabilities.rs
index 418fe95..04e31f3 100644
--- a/crates/rust-analyzer/src/lsp/capabilities.rs
+++ b/crates/rust-analyzer/src/lsp/capabilities.rs
@@ -77,7 +77,7 @@
_ => Some(OneOf::Left(false)),
},
document_on_type_formatting_provider: Some({
- let mut chars = ide::Analysis::SUPPORTED_TRIGGER_CHARS.chars();
+ let mut chars = ide::Analysis::SUPPORTED_TRIGGER_CHARS.iter();
DocumentOnTypeFormattingOptions {
first_trigger_character: chars.next().unwrap().to_string(),
more_trigger_character: Some(chars.map(|c| c.to_string()).collect()),
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 4677880..189d95e 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -114,6 +114,16 @@
Durability::HIGH,
);
}
+
+ if self.config.cargo(None) != old_config.cargo(None) {
+ let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false };
+ self.fetch_workspaces_queue.request_op("cargo config changed".to_owned(), req)
+ }
+
+ if self.config.cfg_set_test(None) != old_config.cfg_set_test(None) {
+ let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false };
+ self.fetch_workspaces_queue.request_op("cfg_set_test config changed".to_owned(), req)
+ }
}
pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
diff --git a/crates/rust-analyzer/src/test_runner.rs b/crates/rust-analyzer/src/test_runner.rs
index 9c0bc33..e7528db 100644
--- a/crates/rust-analyzer/src/test_runner.rs
+++ b/crates/rust-analyzer/src/test_runner.rs
@@ -103,6 +103,7 @@
) -> std::io::Result<Self> {
let mut cmd = toolchain::command(Tool::Cargo.path(), root, &options.extra_env);
cmd.env("RUSTC_BOOTSTRAP", "1");
+ cmd.arg("--color=always");
cmd.arg("test");
cmd.arg("--package");
diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs
index 9a292ea..978c50d 100644
--- a/crates/stdx/src/lib.rs
+++ b/crates/stdx/src/lib.rs
@@ -13,6 +13,7 @@
pub mod process;
pub mod rand;
pub mod thread;
+pub mod variance;
pub use itertools;
diff --git a/crates/stdx/src/variance.rs b/crates/stdx/src/variance.rs
new file mode 100644
index 0000000..8465d72
--- /dev/null
+++ b/crates/stdx/src/variance.rs
@@ -0,0 +1,270 @@
+//! This is a copy of [`std::marker::variance`].
+
+use std::any::type_name;
+use std::cmp::Ordering;
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::marker::PhantomData;
+
+macro_rules! first_token {
+ ($first:tt $($rest:tt)*) => {
+ $first
+ };
+}
+macro_rules! phantom_type {
+ ($(
+ $(#[$attr:meta])*
+ pub struct $name:ident <$t:ident> ($($inner:tt)*);
+ )*) => {$(
+ $(#[$attr])*
+ pub struct $name<$t>($($inner)*) where T: ?Sized;
+
+ impl<T> $name<T>
+ where T: ?Sized
+ {
+ /// Constructs a new instance of the variance marker.
+ pub const fn new() -> Self {
+ Self(PhantomData)
+ }
+ }
+
+ impl<T> self::sealed::Sealed for $name<T> where T: ?Sized {
+ const VALUE: Self = Self::new();
+ }
+
+ impl<T> Variance for $name<T> where T: ?Sized {}
+
+ impl<T> Default for $name<T>
+ where T: ?Sized
+ {
+ fn default() -> Self {
+ Self(PhantomData)
+ }
+ }
+
+ impl<T> fmt::Debug for $name<T>
+ where T: ?Sized
+ {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}<{}>", stringify!($name), type_name::<T>())
+ }
+ }
+
+ impl<T> Clone for $name<T>
+ where T: ?Sized
+ {
+ fn clone(&self) -> Self {
+ *self
+ }
+ }
+
+ impl<T> Copy for $name<T> where T: ?Sized {}
+
+ impl<T> PartialEq for $name<T>
+ where T: ?Sized
+ {
+ fn eq(&self, _: &Self) -> bool {
+ true
+ }
+ }
+
+ impl<T> Eq for $name<T> where T: ?Sized {}
+
+ #[allow(clippy::non_canonical_partial_ord_impl)]
+ impl<T> PartialOrd for $name<T>
+ where T: ?Sized
+ {
+ fn partial_cmp(&self, _: &Self) -> Option<Ordering> {
+ Some(Ordering::Equal)
+ }
+ }
+
+ impl<T> Ord for $name<T>
+ where T: ?Sized
+ {
+ fn cmp(&self, _: &Self) -> Ordering {
+ Ordering::Equal
+ }
+ }
+
+ impl<T> Hash for $name<T>
+ where T: ?Sized
+ {
+ fn hash<H: Hasher>(&self, _: &mut H) {}
+ }
+ )*};
+}
+
+macro_rules! phantom_lifetime {
+ ($(
+ $(#[$attr:meta])*
+ pub struct $name:ident <$lt:lifetime> ($($inner:tt)*);
+ )*) => {$(
+ $(#[$attr])*
+
+ #[derive(Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+ pub struct $name<$lt>($($inner)*);
+
+ impl $name<'_> {
+ /// Constructs a new instance of the variance marker.
+ pub const fn new() -> Self {
+ Self(first_token!($($inner)*)(PhantomData))
+ }
+ }
+
+ impl self::sealed::Sealed for $name<'_> {
+ const VALUE: Self = Self::new();
+ }
+
+ impl Variance for $name<'_> {}
+
+ impl fmt::Debug for $name<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", stringify!($name))
+ }
+ }
+ )*};
+}
+
+phantom_lifetime! {
+ /// Zero-sized type used to mark a lifetime as covariant.
+ ///
+ /// Covariant lifetimes must live at least as long as declared. See [the reference][1] for more
+ /// information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `'a`, the following are guaranteed:
+ /// * `size_of::<PhantomCovariantLifetime<'a>>() == 0`
+ /// * `align_of::<PhantomCovariantLifetime<'a>>() == 1`
+
+ pub struct PhantomCovariantLifetime<'a>(PhantomCovariant<&'a ()>);
+ /// Zero-sized type used to mark a lifetime as contravariant.
+ ///
+ /// Contravariant lifetimes must live at most as long as declared. See [the reference][1] for
+ /// more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `'a`, the following are guaranteed:
+ /// * `size_of::<PhantomContravariantLifetime<'a>>() == 0`
+ /// * `align_of::<PhantomContravariantLifetime<'a>>() == 1`
+
+ pub struct PhantomContravariantLifetime<'a>(PhantomContravariant<&'a ()>);
+ /// Zero-sized type used to mark a lifetime as invariant.
+ ///
+ /// Invariant lifetimes must be live for the exact length declared, neither shorter nor longer.
+ /// See [the reference][1] for more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `'a`, the following are guaranteed:
+ /// * `size_of::<PhantomInvariantLifetime<'a>>() == 0`
+ /// * `align_of::<PhantomInvariantLifetime<'a>>() == 1`
+
+ pub struct PhantomInvariantLifetime<'a>(PhantomInvariant<&'a ()>);
+
+}
+
+phantom_type! {
+ /// Zero-sized type used to mark a type parameter as covariant.
+ ///
+ /// Types used as part of the return value from a function are covariant. If the type is _also_
+ /// passed as a parameter then it is [invariant][PhantomInvariant]. See [the reference][1] for
+ /// more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `T`, the following are guaranteed:
+ /// * `size_of::<PhantomCovariant<T>>() == 0`
+ /// * `align_of::<PhantomCovariant<T>>() == 1`
+
+ pub struct PhantomCovariant<T>(PhantomData<fn() -> T>);
+ /// Zero-sized type used to mark a type parameter as contravariant.
+ ///
+ /// Types passed as arguments to a function are contravariant. If the type is _also_ part of the
+ /// return value from a function then it is [invariant][PhantomInvariant]. See [the
+ /// reference][1] for more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `T`, the following are guaranteed:
+ /// * `size_of::<PhantomContravariant<T>>() == 0`
+ /// * `align_of::<PhantomContravariant<T>>() == 1`
+
+ pub struct PhantomContravariant<T>(PhantomData<fn(T)>);
+ /// Zero-sized type used to mark a type parameter as invariant.
+ ///
+ /// Types that are both passed as an argument _and_ used as part of the return value from a
+ /// function are invariant. See [the reference][1] for more information.
+ ///
+ /// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
+ ///
+ /// ## Layout
+ ///
+ /// For all `T`, the following are guaranteed:
+ /// * `size_of::<PhantomInvariant<T>>() == 0`
+ /// * `align_of::<PhantomInvariant<T>>() == 1`
+
+ pub struct PhantomInvariant<T>(PhantomData<fn(T) -> T>);
+
+}
+
+mod sealed {
+
+ pub trait Sealed {
+ const VALUE: Self;
+ }
+}
+/// A marker trait for phantom variance types.
+pub trait Variance: sealed::Sealed + Default {}
+/// Construct a variance marker; equivalent to [`Default::default`].
+///
+/// This type can be any of the following. You generally should not need to explicitly name the
+/// type, however.
+///
+/// - [`PhantomCovariant`]
+/// - [`PhantomContravariant`]
+/// - [`PhantomInvariant`]
+/// - [`PhantomCovariantLifetime`]
+/// - [`PhantomContravariantLifetime`]
+/// - [`PhantomInvariantLifetime`]
+///
+/// # Example
+///
+/// ```rust
+/// #![feature(phantom_variance_markers)]
+///
+/// use core::marker::{PhantomCovariant, variance};
+///
+/// struct BoundFn<F, P, R>
+/// where
+/// F: Fn(P) -> R,
+/// {
+/// function: F,
+/// parameter: P,
+/// return_value: PhantomCovariant<R>,
+/// }
+///
+/// let bound_fn = BoundFn {
+/// function: core::convert::identity,
+/// parameter: 5u8,
+/// return_value: variance(),
+/// };
+/// ```
+pub const fn variance<T>() -> T
+where
+ T: Variance,
+{
+ T::VALUE
+}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index fab4cb2..955aada 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -134,6 +134,13 @@
}
}
}
+pub fn name_ref_self_ty() -> ast::NameRef {
+ quote! {
+ NameRef {
+ [Self]
+ }
+ }
+}
fn raw_ident_esc(ident: &str) -> &'static str {
if is_raw_identifier(ident, Edition::CURRENT) { "r#" } else { "" }
}
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index 7240069..1d821e9 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -412,22 +412,36 @@
}
let mut active_regions = Vec::new();
+ let mut inactive_regions = Vec::new();
let mut seen_regions = Vec::new();
for line in lines {
let trimmed = line.trim();
if let Some(region) = trimmed.strip_prefix("// region:") {
- active_regions.push(region);
- continue;
+ if let Some(region) = region.strip_prefix('!') {
+ inactive_regions.push(region);
+ continue;
+ } else {
+ active_regions.push(region);
+ continue;
+ }
}
if let Some(region) = trimmed.strip_prefix("// endregion:") {
- let prev = active_regions.pop().unwrap();
+ let (prev, region) = if let Some(region) = region.strip_prefix('!') {
+ (inactive_regions.pop().unwrap(), region)
+ } else {
+ (active_regions.pop().unwrap(), region)
+ };
assert_eq!(prev, region, "unbalanced region pairs");
continue;
}
- let mut line_region = false;
- if let Some(idx) = trimmed.find("// :") {
- line_region = true;
+ let mut active_line_region = false;
+ let mut inactive_line_region = false;
+ if let Some(idx) = trimmed.find("// :!") {
+ inactive_line_region = true;
+ inactive_regions.push(&trimmed[idx + "// :!".len()..]);
+ } else if let Some(idx) = trimmed.find("// :") {
+ active_line_region = true;
active_regions.push(&trimmed[idx + "// :".len()..]);
}
@@ -438,18 +452,30 @@
seen_regions.push(region);
keep &= self.has_flag(region);
}
+ for ®ion in &inactive_regions {
+ assert!(!region.starts_with(' '), "region marker starts with a space: {region:?}");
+ self.assert_valid_flag(region);
+ seen_regions.push(region);
+ keep &= !self.has_flag(region);
+ }
if keep {
buf.push_str(line);
}
- if line_region {
+ if active_line_region {
active_regions.pop().unwrap();
}
+ if inactive_line_region {
+ inactive_regions.pop().unwrap();
+ }
}
if !active_regions.is_empty() {
panic!("unclosed regions: {active_regions:?} Add an `endregion` comment");
}
+ if !inactive_regions.is_empty() {
+ panic!("unclosed regions: {inactive_regions:?} Add an `endregion` comment");
+ }
for flag in &self.valid_flags {
if !seen_regions.iter().any(|it| it == flag) {
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index 4bdd791..d13a81d 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -31,6 +31,7 @@
//! eq: sized
//! error: fmt
//! fmt: option, result, transmute, coerce_unsized, copy, clone, derive
+//! fmt_before_1_89_0: fmt
//! fn: tuple
//! from: sized, result
//! future: pin
@@ -1175,6 +1176,7 @@
}
}
+ // region:fmt_before_1_89_0
#[lang = "format_unsafe_arg"]
pub struct UnsafeArg {
_private: (),
@@ -1185,6 +1187,7 @@
UnsafeArg { _private: () }
}
}
+ // endregion:fmt_before_1_89_0
}
#[derive(Copy, Clone)]
@@ -1204,6 +1207,7 @@
Arguments { pieces, fmt: None, args: &[] }
}
+ // region:fmt_before_1_89_0
pub fn new_v1_formatted(
pieces: &'a [&'static str],
args: &'a [rt::Argument<'a>],
@@ -1212,6 +1216,17 @@
) -> Arguments<'a> {
Arguments { pieces, fmt: Some(fmt), args }
}
+ // endregion:fmt_before_1_89_0
+
+ // region:!fmt_before_1_89_0
+ pub unsafe fn new_v1_formatted(
+ pieces: &'a [&'static str],
+ args: &'a [rt::Argument<'a>],
+ fmt: &'a [rt::Placeholder],
+ ) -> Arguments<'a> {
+ Arguments { pieces, fmt: Some(fmt), args }
+ }
+ // endregion:!fmt_before_1_89_0
pub const fn as_str(&self) -> Option<&'static str> {
match (self.pieces, self.args) {
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 4eb9cfc..9404b14 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -13,6 +13,13 @@
Placeholder expression to use for missing expressions in assists.
+## rust-analyzer.assist.preferSelf {#assist.preferSelf}
+
+Default: `false`
+
+When inserting a type (e.g. in "fill match arms" assist), prefer to use `Self` over the type name where possible.
+
+
## rust-analyzer.assist.termSearch.borrowcheck {#assist.termSearch.borrowcheck}
Default: `true`
@@ -1535,7 +1542,11 @@
Default: `false`
-Exclude imports from symbol search.
+Exclude all imports from workspace symbol search.
+
+In addition to regular imports (which are always excluded),
+this option removes public imports (better known as re-exports)
+and removes imports that rename the imported symbol.
## rust-analyzer.workspace.symbol.search.kind {#workspace.symbol.search.kind}
diff --git a/editors/code/package.json b/editors/code/package.json
index dcdb4fe..26a21c1 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -683,6 +683,16 @@
{
"title": "assist",
"properties": {
+ "rust-analyzer.assist.preferSelf": {
+ "markdownDescription": "When inserting a type (e.g. in \"fill match arms\" assist), prefer to use `Self` over the type name where possible.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "assist",
+ "properties": {
"rust-analyzer.assist.termSearch.borrowcheck": {
"markdownDescription": "Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.",
"default": true,
@@ -2895,7 +2905,7 @@
"title": "workspace",
"properties": {
"rust-analyzer.workspace.symbol.search.excludeImports": {
- "markdownDescription": "Exclude imports from symbol search.",
+ "markdownDescription": "Exclude all imports from workspace symbol search.\n\nIn addition to regular imports (which are always excluded),\nthis option removes public imports (better known as re-exports)\nand removes imports that rename the imported symbol.",
"default": false,
"type": "boolean"
}
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index f36e18a..d2dc740 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -20,15 +20,9 @@
configureLang: vscode.Disposable | undefined;
readonly rootSection = "rust-analyzer";
- private readonly requiresServerReloadOpts = [
- "cargo",
- "procMacro",
- "serverPath",
- "server",
- "files",
- "cfg",
- "showSyntaxTree",
- ].map((opt) => `${this.rootSection}.${opt}`);
+ private readonly requiresServerReloadOpts = ["server", "files", "showSyntaxTree"].map(
+ (opt) => `${this.rootSection}.${opt}`,
+ );
private readonly requiresWindowReloadOpts = ["testExplorer"].map(
(opt) => `${this.rootSection}.${opt}`,
@@ -208,7 +202,7 @@
}
get serverPath() {
- return this.get<null | string>("server.path") ?? this.get<null | string>("serverPath");
+ return this.get<null | string>("server.path");
}
get serverExtraEnv(): Env {
diff --git a/rust-version b/rust-version
index af0dd5c..786c656 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-7c10378e1fee5ddc6573b916aeb884ab10e0de17
+58d5e1169056f31553ecf680b009a5770eb0e859