Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--crates/hir-def/src/attr.rs42
-rw-r--r--crates/hir-ty/src/infer.rs2
-rw-r--r--crates/hir-ty/src/infer/unify.rs83
-rw-r--r--crates/hir-ty/src/lib.rs4
-rw-r--r--crates/hir-ty/src/mir/borrowck.rs185
-rw-r--r--crates/hir-ty/src/mir/lower.rs2
-rw-r--r--crates/hir/src/lib.rs246
-rw-r--r--crates/hir/src/term_search.rs298
-rw-r--r--crates/hir/src/term_search/expr.rs468
-rw-r--r--crates/hir/src/term_search/tactics.rs859
-rw-r--r--crates/ide-assists/src/handlers/term_search.rs253
-rw-r--r--crates/ide-assists/src/lib.rs2
-rw-r--r--crates/ide-completion/src/completions.rs10
-rw-r--r--crates/ide-completion/src/completions/expr.rs56
-rw-r--r--crates/ide-completion/src/config.rs1
-rw-r--r--crates/ide-completion/src/item.rs2
-rw-r--r--crates/ide-completion/src/render.rs99
-rw-r--r--crates/ide-completion/src/tests.rs1
-rw-r--r--crates/ide-completion/src/tests/expression.rs11
-rw-r--r--crates/ide-completion/src/tests/record.rs2
-rw-r--r--crates/ide-completion/src/tests/special.rs40
-rw-r--r--crates/ide-db/src/famous_defs.rs8
-rw-r--r--crates/ide-db/src/path_transform.rs2
-rw-r--r--crates/ide-diagnostics/src/handlers/type_mismatch.rs3
-rw-r--r--crates/ide-diagnostics/src/handlers/typed_hole.rs256
-rw-r--r--crates/ide-diagnostics/src/tests.rs85
-rw-r--r--crates/ide/src/hover/tests.rs8
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs216
-rw-r--r--crates/rust-analyzer/src/cli/flags.rs7
-rw-r--r--crates/rust-analyzer/src/config.rs3
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs3
-rw-r--r--crates/rust-analyzer/src/lsp/to_proto.rs1
-rw-r--r--crates/syntax/src/ast/node_ext.rs20
-rw-r--r--crates/test-utils/src/minicore.rs33
-rw-r--r--docs/user/generated_config.adoc5
-rw-r--r--editors/code/package.json5
36 files changed, 3171 insertions, 150 deletions
diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs
index c91a549726..519706c65f 100644
--- a/crates/hir-def/src/attr.rs
+++ b/crates/hir-def/src/attr.rs
@@ -377,27 +377,39 @@ impl AttrsWithOwner {
AttrDefId::GenericParamId(it) => match it {
GenericParamId::ConstParamId(it) => {
let src = it.parent().child_source(db);
- RawAttrs::from_attrs_owner(
- db.upcast(),
- src.with_value(&src.value[it.local_id()]),
- db.span_map(src.file_id).as_ref(),
- )
+ // FIXME: We should be never getting `None` here.
+ match src.value.get(it.local_id()) {
+ Some(val) => RawAttrs::from_attrs_owner(
+ db.upcast(),
+ src.with_value(val),
+ db.span_map(src.file_id).as_ref(),
+ ),
+ None => RawAttrs::EMPTY,
+ }
}
GenericParamId::TypeParamId(it) => {
let src = it.parent().child_source(db);
- RawAttrs::from_attrs_owner(
- db.upcast(),
- src.with_value(&src.value[it.local_id()]),
- db.span_map(src.file_id).as_ref(),
- )
+ // FIXME: We should be never getting `None` here.
+ match src.value.get(it.local_id()) {
+ Some(val) => RawAttrs::from_attrs_owner(
+ db.upcast(),
+ src.with_value(val),
+ db.span_map(src.file_id).as_ref(),
+ ),
+ None => RawAttrs::EMPTY,
+ }
}
GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db);
- RawAttrs::from_attrs_owner(
- db.upcast(),
- src.with_value(&src.value[it.local_id]),
- db.span_map(src.file_id).as_ref(),
- )
+ // FIXME: We should be never getting `None` here.
+ match src.value.get(it.local_id) {
+ Some(val) => RawAttrs::from_attrs_owner(
+ db.upcast(),
+ src.with_value(val),
+ db.span_map(src.file_id).as_ref(),
+ ),
+ None => RawAttrs::EMPTY,
+ }
}
},
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 71c3f89716..b0ae437ee3 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -68,7 +68,7 @@ use crate::{
#[allow(unreachable_pub)]
pub use coerce::could_coerce;
#[allow(unreachable_pub)]
-pub use unify::could_unify;
+pub use unify::{could_unify, could_unify_deeply};
use cast::CastCheck;
pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index de23ca3499..709760b64f 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -74,6 +74,12 @@ impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
}
}
+/// Check if types unify.
+///
+/// Note that we consider placeholder types to unify with everything.
+/// This means that there may be some unresolved goals that actually set bounds for the placeholder
+/// type for the types to unify. For example `Option<T>` and `Option<U>` unify although there is
+/// unresolved goal `T = U`.
pub fn could_unify(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
@@ -82,21 +88,35 @@ pub fn could_unify(
unify(db, env, tys).is_some()
}
+/// Check if types unify eagerly making sure there are no unresolved goals.
+///
+/// This means that placeholder types are not considered to unify if there are any bounds set on
+/// them. For example `Option<T>` and `Option<U>` do not unify as we cannot show that `T = U`
+pub fn could_unify_deeply(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> bool {
+ let mut table = InferenceTable::new(db, env);
+ let vars = make_substitutions(tys, &mut table);
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
+ let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars);
+ let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars);
+ table.resolve_obligations_as_possible();
+ table.propagate_diverging_flag();
+ let ty1_with_vars = table.resolve_completely(ty1_with_vars);
+ let ty2_with_vars = table.resolve_completely(ty2_with_vars);
+ table.unify_deeply(&ty1_with_vars, &ty2_with_vars)
+}
+
pub(crate) fn unify(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
tys: &Canonical<(Ty, Ty)>,
) -> Option<Substitution> {
let mut table = InferenceTable::new(db, env);
- let vars = Substitution::from_iter(
- Interner,
- tys.binders.iter(Interner).map(|it| match &it.kind {
- chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner),
- // FIXME: maybe wrong?
- chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner),
- chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner),
- }),
- );
+ let vars = make_substitutions(tys, &mut table);
let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
if !table.unify(&ty1_with_vars, &ty2_with_vars) {
@@ -125,6 +145,21 @@ pub(crate) fn unify(
))
}
+fn make_substitutions(
+ tys: &chalk_ir::Canonical<(chalk_ir::Ty<Interner>, chalk_ir::Ty<Interner>)>,
+ table: &mut InferenceTable<'_>,
+) -> chalk_ir::Substitution<Interner> {
+ Substitution::from_iter(
+ Interner,
+ tys.binders.iter(Interner).map(|it| match &it.kind {
+ chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner),
+ // FIXME: maybe wrong?
+ chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner),
+ chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner),
+ }),
+ )
+}
+
bitflags::bitflags! {
#[derive(Default, Clone, Copy)]
pub(crate) struct TypeVariableFlags: u8 {
@@ -431,6 +466,18 @@ impl<'a> InferenceTable<'a> {
true
}
+ /// Unify two relatable values (e.g. `Ty`) and check whether trait goals which arise from that could be fulfilled
+ pub(crate) fn unify_deeply<T: ?Sized + Zip<Interner>>(&mut self, ty1: &T, ty2: &T) -> bool {
+ let result = match self.try_unify(ty1, ty2) {
+ Ok(r) => r,
+ Err(_) => return false,
+ };
+ result.goals.iter().all(|goal| {
+ let canonicalized = self.canonicalize(goal.clone());
+ self.try_resolve_obligation(&canonicalized).is_some()
+ })
+ }
+
/// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
/// caller needs to deal with them.
pub(crate) fn try_unify<T: ?Sized + Zip<Interner>>(
@@ -501,7 +548,8 @@ impl<'a> InferenceTable<'a> {
fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) {
let canonicalized = self.canonicalize(goal);
- if !self.try_resolve_obligation(&canonicalized) {
+ let solution = self.try_resolve_obligation(&canonicalized);
+ if matches!(solution, Some(Solution::Ambig(_))) {
self.pending_obligations.push(canonicalized);
}
}
@@ -627,38 +675,35 @@ impl<'a> InferenceTable<'a> {
fn try_resolve_obligation(
&mut self,
canonicalized: &Canonicalized<InEnvironment<Goal>>,
- ) -> bool {
+ ) -> Option<chalk_solve::Solution<Interner>> {
let solution = self.db.trait_solve(
self.trait_env.krate,
self.trait_env.block,
canonicalized.value.clone(),
);
- match solution {
+ match &solution {
Some(Solution::Unique(canonical_subst)) => {
canonicalized.apply_solution(
self,
Canonical {
- binders: canonical_subst.binders,
+ binders: canonical_subst.binders.clone(),
// FIXME: handle constraints
- value: canonical_subst.value.subst,
+ value: canonical_subst.value.subst.clone(),
},
);
- true
}
Some(Solution::Ambig(Guidance::Definite(substs))) => {
- canonicalized.apply_solution(self, substs);
- false
+ canonicalized.apply_solution(self, substs.clone());
}
Some(_) => {
// FIXME use this when trying to resolve everything at the end
- false
}
None => {
// FIXME obligation cannot be fulfilled => diagnostic
- true
}
}
+ solution
}
pub(crate) fn callable_sig(
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 7013863334..ec97bdc2c4 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -79,8 +79,8 @@ pub use builder::{ParamKind, TyBuilder};
pub use chalk_ext::*;
pub use infer::{
closure::{CaptureKind, CapturedItem},
- could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic,
- InferenceResult, OverloadedDeref, PointerCast,
+ could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode,
+ InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast,
};
pub use interner::Interner;
pub use lower::{
diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs
index 9089c11c5d..63fa87ad66 100644
--- a/crates/hir-ty/src/mir/borrowck.rs
+++ b/crates/hir-ty/src/mir/borrowck.rs
@@ -7,6 +7,7 @@ use std::iter;
use hir_def::{DefWithBodyId, HasModule};
use la_arena::ArenaMap;
+use rustc_hash::FxHashMap;
use stdx::never;
use triomphe::Arc;
@@ -14,7 +15,7 @@ use crate::{
db::{HirDatabase, InternedClosure},
mir::Operand,
utils::ClosureSubst,
- ClosureId, Interner, Ty, TyExt, TypeFlags,
+ ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags,
};
use super::{
@@ -37,10 +38,26 @@ pub struct MovedOutOfRef {
}
#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct PartiallyMoved {
+ pub ty: Ty,
+ pub span: MirSpan,
+ pub local: LocalId,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct BorrowRegion {
+ pub local: LocalId,
+ pub kind: BorrowKind,
+ pub places: Vec<MirSpan>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowckResult {
pub mir_body: Arc<MirBody>,
pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef>,
+ pub partially_moved: Vec<PartiallyMoved>,
+ pub borrow_regions: Vec<BorrowRegion>,
}
fn all_mir_bodies(
@@ -80,12 +97,26 @@ pub fn borrowck_query(
res.push(BorrowckResult {
mutability_of_locals: mutability_of_locals(db, &body),
moved_out_of_ref: moved_out_of_ref(db, &body),
+ partially_moved: partially_moved(db, &body),
+ borrow_regions: borrow_regions(db, &body),
mir_body: body,
});
})?;
Ok(res.into())
}
+fn make_fetch_closure_field(
+ db: &dyn HirDatabase,
+) -> impl FnOnce(ClosureId, &Substitution, usize) -> Ty + '_ {
+ |c: ClosureId, subst: &Substitution, f: usize| {
+ let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
+ let infer = db.infer(def);
+ let (captures, _) = infer.closure_info(&c);
+ let parent_subst = ClosureSubst(subst).parent_subst();
+ captures.get(f).expect("broken closure field").ty.clone().substitute(Interner, parent_subst)
+ }
+}
+
fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> {
let mut result = vec![];
let mut for_operand = |op: &Operand, span: MirSpan| match op {
@@ -99,18 +130,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
ty = proj.projected_ty(
ty,
db,
- |c, subst, f| {
- let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
- let infer = db.infer(def);
- let (captures, _) = infer.closure_info(&c);
- let parent_subst = ClosureSubst(subst).parent_subst();
- captures
- .get(f)
- .expect("broken closure field")
- .ty
- .clone()
- .substitute(Interner, parent_subst)
- },
+ make_fetch_closure_field(db),
body.owner.module(db.upcast()).krate(),
);
}
@@ -188,6 +208,132 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
result
}
+fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved> {
+ let mut result = vec![];
+ let mut for_operand = |op: &Operand, span: MirSpan| match op {
+ Operand::Copy(p) | Operand::Move(p) => {
+ let mut ty: Ty = body.locals[p.local].ty.clone();
+ for proj in p.projection.lookup(&body.projection_store) {
+ ty = proj.projected_ty(
+ ty,
+ db,
+ make_fetch_closure_field(db),
+ body.owner.module(db.upcast()).krate(),
+ );
+ }
+ if !ty.clone().is_copy(db, body.owner)
+ && !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR)
+ {
+ result.push(PartiallyMoved { span, ty, local: p.local });
+ }
+ }
+ Operand::Constant(_) | Operand::Static(_) => (),
+ };
+ for (_, block) in body.basic_blocks.iter() {
+ db.unwind_if_cancelled();
+ for statement in &block.statements {
+ match &statement.kind {
+ StatementKind::Assign(_, r) => match r {
+ Rvalue::ShallowInitBoxWithAlloc(_) => (),
+ Rvalue::ShallowInitBox(o, _)
+ | Rvalue::UnaryOp(_, o)
+ | Rvalue::Cast(_, o, _)
+ | Rvalue::Repeat(o, _)
+ | Rvalue::Use(o) => for_operand(o, statement.span),
+ Rvalue::CopyForDeref(_)
+ | Rvalue::Discriminant(_)
+ | Rvalue::Len(_)
+ | Rvalue::Ref(_, _) => (),
+ Rvalue::CheckedBinaryOp(_, o1, o2) => {
+ for_operand(o1, statement.span);
+ for_operand(o2, statement.span);
+ }
+ Rvalue::Aggregate(_, ops) => {
+ for op in ops.iter() {
+ for_operand(op, statement.span);
+ }
+ }
+ },
+ StatementKind::FakeRead(_)
+ | StatementKind::Deinit(_)
+ | StatementKind::StorageLive(_)
+ | StatementKind::StorageDead(_)
+ | StatementKind::Nop => (),
+ }
+ }
+ match &block.terminator {
+ Some(terminator) => match &terminator.kind {
+ TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, terminator.span),
+ TerminatorKind::FalseEdge { .. }
+ | TerminatorKind::FalseUnwind { .. }
+ | TerminatorKind::Goto { .. }
+ | TerminatorKind::UnwindResume
+ | TerminatorKind::CoroutineDrop
+ | TerminatorKind::Abort
+ | TerminatorKind::Return
+ | TerminatorKind::Unreachable
+ | TerminatorKind::Drop { .. } => (),
+ TerminatorKind::DropAndReplace { value, .. } => {
+ for_operand(value, terminator.span);
+ }
+ TerminatorKind::Call { func, args, .. } => {
+ for_operand(func, terminator.span);
+ args.iter().for_each(|it| for_operand(it, terminator.span));
+ }
+ TerminatorKind::Assert { cond, .. } => {
+ for_operand(cond, terminator.span);
+ }
+ TerminatorKind::Yield { value, .. } => {
+ for_operand(value, terminator.span);
+ }
+ },
+ None => (),
+ }
+ }
+ result.shrink_to_fit();
+ result
+}
+
+fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
+ let mut borrows = FxHashMap::default();
+ for (_, block) in body.basic_blocks.iter() {
+ db.unwind_if_cancelled();
+ for statement in &block.statements {
+ if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind {
+ borrows
+ .entry(p.local)
+ .and_modify(|it: &mut BorrowRegion| {
+ it.places.push(statement.span);
+ })
+ .or_insert_with(|| BorrowRegion {
+ local: p.local,
+ kind: *kind,
+ places: vec![statement.span],
+ });
+ }
+ }
+ match &block.terminator {
+ Some(terminator) => match &terminator.kind {
+ TerminatorKind::FalseEdge { .. }
+ | TerminatorKind::FalseUnwind { .. }
+ | TerminatorKind::Goto { .. }
+ | TerminatorKind::UnwindResume
+ | TerminatorKind::CoroutineDrop
+ | TerminatorKind::Abort
+ | TerminatorKind::Return
+ | TerminatorKind::Unreachable
+ | TerminatorKind::Drop { .. } => (),
+ TerminatorKind::DropAndReplace { .. } => {}
+ TerminatorKind::Call { .. } => {}
+ _ => (),
+ },
+ None => (),
+ }
+ }
+
+ borrows.into_values().collect()
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ProjectionCase {
/// Projection is a local
@@ -217,18 +363,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
ty = proj.projected_ty(
ty,
db,
- |c, subst, f| {
- let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
- let infer = db.infer(def);
- let (captures, _) = infer.closure_info(&c);
- let parent_subst = ClosureSubst(subst).parent_subst();
- captures
- .get(f)
- .expect("broken closure field")
- .ty
- .clone()
- .substitute(Interner, parent_subst)
- },
+ make_fetch_closure_field(db),
body.owner.module(db.upcast()).krate(),
);
}
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 1572a6d497..0ba8a17103 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -1246,7 +1246,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_assignment(current, place, op.into(), expr_id.into());
Ok(Some(current))
}
- Expr::Underscore => not_supported!("underscore"),
+ Expr::Underscore => Ok(Some(current)),
}
}
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 32abbc80c6..08f7bb14ca 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -31,6 +31,7 @@ mod has_source;
pub mod db;
pub mod diagnostics;
pub mod symbols;
+pub mod term_search;
mod display;
@@ -1084,6 +1085,27 @@ impl Field {
Type::new(db, var_id, ty)
}
+ // FIXME: Find better API to also handle const generics
+ pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
+ let var_id = self.parent.into();
+ let def_id: AdtId = match self.parent {
+ VariantDef::Struct(it) => it.id.into(),
+ VariantDef::Union(it) => it.id.into(),
+ VariantDef::Variant(it) => it.parent_enum(db).id.into(),
+ };
+ let mut generics = generics.map(|it| it.ty.clone());
+ let substs = TyBuilder::subst_for_def(db, def_id, None)
+ .fill(|x| match x {
+ ParamKind::Type => {
+ generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
+ }
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ })
+ .build();
+ let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
+ Type::new(db, var_id, ty)
+ }
+
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty(
self.ty(db).ty,
@@ -1152,6 +1174,10 @@ impl Struct {
fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
db.struct_data(self.id).variant_data.clone()
}
+
+ pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
+ db.attrs(self.id.into()).is_unstable()
+ }
}
impl HasVisibility for Struct {
@@ -1194,6 +1220,10 @@ impl Union {
fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
db.union_data(self.id).variant_data.clone()
}
+
+ pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
+ db.attrs(self.id.into()).is_unstable()
+ }
}
impl HasVisibility for Union {
@@ -1269,6 +1299,10 @@ impl Enum {
pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
Adt::from(self).layout(db)
}
+
+ pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
+ db.attrs(self.id.into()).is_unstable()
+ }
}
impl HasVisibility for Enum {
@@ -1344,6 +1378,10 @@ impl Variant {
_ => parent_layout,
})
}
+
+ pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
+ db.attrs(self.id.into()).is_unstable()
+ }
}
/// Variants inherit visibility from the parent enum.
@@ -1394,9 +1432,9 @@ impl Adt {
/// Turns this ADT into a type with the given type parameters. This isn't
/// the greatest API, FIXME find a better one.
- pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type {
+ pub fn ty_with_args(self, db: &dyn HirDatabase, args: impl Iterator<Item = Type>) -> Type {
let id = AdtId::from(self);
- let mut it = args.iter().map(|t| t.ty.clone());
+ let mut it = args.map(|t| t.ty.clone());
let ty = TyBuilder::def_ty(db, id.into(), None)
.fill(|x| {
let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
@@ -1789,6 +1827,35 @@ impl Function {
Type::new_with_resolver_inner(db, &resolver, ty)
}
+ // FIXME: Find better API to also handle const generics
+ pub fn ret_type_with_args(
+ self,
+ db: &dyn HirDatabase,
+ generics: impl Iterator<Item = Type>,
+ ) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container {
+ ItemContainerId::ImplId(it) => Some(it.into()),
+ ItemContainerId::TraitId(it) => Some(it.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ };
+ let mut generics = generics.map(|it| it.ty.clone());
+ let mut filler = |x: &_| match x {
+ ParamKind::Type => {
+ generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
+ }
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ };
+
+ let parent_substs =
+ parent_id.map(|id| TyBuilder::subst_for_def(db, id, None).fill(&mut filler).build());
+ let substs = TyBuilder::subst_for_def(db, self.id, parent_substs).fill(&mut filler).build();
+
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ty = callable_sig.ret().clone();
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
if !self.is_async(db) {
return None;
@@ -1855,6 +1922,51 @@ impl Function {
.collect()
}
+ // FIXME: Find better API to also handle const generics
+ pub fn params_without_self_with_args(
+ self,
+ db: &dyn HirDatabase,
+ generics: impl Iterator<Item = Type>,
+ ) -> Vec<Param> {
+ let environment = db.trait_environment(self.id.into());
+ let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container {
+ ItemContainerId::ImplId(it) => Some(it.into()),
+ ItemContainerId::TraitId(it) => Some(it.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ };
+ let mut generics = generics.map(|it| it.ty.clone());
+ let parent_substs = parent_id.map(|id| {
+ TyBuilder::subst_for_def(db, id, None)
+ .fill(|x| match x {
+ ParamKind::Type => generics
+ .next()
+ .unwrap_or_else(|| TyKind::Error.intern(Interner))
+ .cast(Interner),
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ })
+ .build()
+ });
+
+ let substs = TyBuilder::subst_for_def(db, self.id, parent_substs)
+ .fill(|_| {
+ let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
+ GenericArg::new(Interner, GenericArgData::Ty(ty))
+ })
+ .build();
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
+ callable_sig
+ .params()
+ .iter()
+ .enumerate()
+ .skip(skip)
+ .map(|(idx, ty)| {
+ let ty = Type { env: environment.clone(), ty: ty.clone() };
+ Param { func: self, ty, idx }
+ })
+ .collect()
+ }
+
pub fn is_const(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).has_const_kw()
}
@@ -1889,6 +2001,11 @@ impl Function {
db.function_data(self.id).attrs.is_bench()
}
+ /// Is this function marked as unstable with `#[feature]` attribute?
+ pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).attrs.is_unstable()
+ }
+
pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
hir_ty::is_fn_unsafe_to_call(db, self.id)
}
@@ -2052,6 +2169,34 @@ impl SelfParam {
let ty = callable_sig.params()[0].clone();
Type { env: environment, ty }
}
+
+ // FIXME: Find better API to also handle const generics
+ pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
+ let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container {
+ ItemContainerId::ImplId(it) => it.into(),
+ ItemContainerId::TraitId(it) => it.into(),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ panic!("Never get here")
+ }
+ };
+
+ let mut generics = generics.map(|it| it.ty.clone());
+ let mut filler = |x: &_| match x {
+ ParamKind::Type => {
+ generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
+ }
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ };
+
+ let parent_substs = TyBuilder::subst_for_def(db, parent_id, None).fill(&mut filler).build();
+ let substs =
+ TyBuilder::subst_for_def(db, self.func, Some(parent_substs)).fill(&mut filler).build();
+ let callable_sig =
+ db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
+ let environment = db.trait_environment(self.func.into());
+ let ty = callable_sig.params()[0].clone();
+ Type { env: environment, ty }
+ }
}
impl HasVisibility for Function {
@@ -2754,7 +2899,7 @@ impl GenericDef {
.collect()
}
- pub fn type_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
+ pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
let generics = db.generic_params(self.into());
generics
.type_or_consts
@@ -3126,12 +3271,16 @@ impl TypeParam {
let ty = generic_arg_from_param(db, self.id.into())?;
let resolver = self.id.parent().resolver(db.upcast());
match ty.data(Interner) {
- GenericArgData::Ty(it) => {
+ GenericArgData::Ty(it) if *it.kind(Interner) != TyKind::Error => {
Some(Type::new_with_resolver_inner(db, &resolver, it.clone()))
}
_ => None,
}
}
+
+ pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
+ db.attrs(GenericParamId::from(self.id).into()).is_unstable()
+ }
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@@ -3241,6 +3390,26 @@ impl TypeOrConstParam {
Either::Right(it) => it.ty(db),
}
}
+
+ pub fn as_type_param(self, db: &dyn HirDatabase) -> Option<TypeParam> {
+ let params = db.generic_params(self.id.parent);
+ match &params.type_or_consts[self.id.local_id] {
+ hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
+ Some(TypeParam { id: TypeParamId::from_unchecked(self.id) })
+ }
+ hir_def::generics::TypeOrConstParamData::ConstParamData(_) => None,
+ }
+ }
+
+ pub fn as_const_param(self, db: &dyn HirDatabase) -> Option<ConstParam> {
+ let params = db.generic_params(self.id.parent);
+ match &params.type_or_consts[self.id.local_id] {
+ hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None,
+ hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
+ Some(ConstParam { id: ConstParamId::from_unchecked(self.id) })
+ }
+ }
+ }
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -3285,12 +3454,11 @@ impl Impl {
.filter(filter),
)
});
+
for id in def_crates
.iter()
.flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
.map(|Crate { id }| id)
- .chain(def_crates.iter().copied())
- .unique()
{
all.extend(
db.trait_impls_in_crate(id)
@@ -3520,7 +3688,7 @@ pub enum CaptureKind {
Move,
}
-#[derive(Clone, PartialEq, Eq, Debug)]
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct Type {
env: Arc<TraitEnvironment>,
ty: Ty,
@@ -3620,6 +3788,50 @@ impl Type {
matches!(self.ty.kind(Interner), TyKind::Ref(..))
}
+ pub fn contains_reference(&self, db: &dyn HirDatabase) -> bool {
+ return go(db, self.env.krate, &self.ty);
+
+ fn go(db: &dyn HirDatabase, krate: CrateId, ty: &Ty) -> bool {
+ match ty.kind(Interner) {
+ // Reference itself
+ TyKind::Ref(_, _, _) => true,
+
+ // For non-phantom_data adts we check variants/fields as well as generic parameters
+ TyKind::Adt(adt_id, substitution)
+ if !db.struct_datum(krate, *adt_id).flags.phantom_data =>
+ {
+ let adt_datum = &db.struct_datum(krate, *adt_id);
+ let adt_datum_bound =
+ adt_datum.binders.clone().substitute(Interner, substitution);
+ adt_datum_bound
+ .variants
+ .into_iter()
+ .flat_map(|variant| variant.fields.into_iter())
+ .any(|ty| go(db, krate, &ty))
+ || substitution
+ .iter(Interner)
+ .filter_map(|x| x.ty(Interner))
+ .any(|ty| go(db, krate, ty))
+ }
+ // And for `PhantomData<T>`, we check `T`.
+ TyKind::Adt(_, substitution)
+ | TyKind::Tuple(_, substitution)
+ | TyKind::OpaqueType(_, substitution)
+ | TyKind::AssociatedType(_, substitution)
+ | TyKind::FnDef(_, substitution) => substitution
+ .iter(Interner)
+ .filter_map(|x| x.ty(Interner))
+ .any(|ty| go(db, krate, ty)),
+
+ // For `[T]` or `*T` we check `T`
+ TyKind::Array(ty, _) | TyKind::Slice(ty) | TyKind::Raw(_, ty) => go(db, krate, ty),
+
+ // Consider everything else as not reference
+ _ => false,
+ }
+ }
+ }
+
pub fn as_reference(&self) -> Option<(Type, Mutability)> {
let (ty, _lt, m) = self.ty.as_reference()?;
let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut));
@@ -3727,14 +3939,16 @@ impl Type {
)
}
+ // FIXME: Find better API that also handles const generics
pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
let mut it = args.iter().map(|t| t.ty.clone());
let trait_ref = TyBuilder::trait_ref(db, trait_.id)
.push(self.ty.clone())
.fill(|x| {
- let r = it.next().unwrap();
match x {
- ParamKind::Type => r.cast(Interner),
+ ParamKind::Type => {
+ it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
+ }
ParamKind::Const(ty) => {
// FIXME: this code is not covered in tests.
unknown_const_as_generic(ty.clone())
@@ -4368,12 +4582,24 @@ impl Type {
walk_type(db, self, &mut cb);
}
-
+ /// Check if type unifies with another type.
+ ///
+ /// Note that we consider placeholder types to unify with everything.
+ /// For example `Option<T>` and `Option<U>` unify although there is unresolved goal `T = U`.
pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
hir_ty::could_unify(db, self.env.clone(), &tys)
}
+ /// Check if type unifies with another type eagerly making sure there are no unresolved goals.
+ ///
+ /// This means that placeholder types are not considered to unify if there are any bounds set on
+ /// them. For example `Option<T>` and `Option<U>` do not unify as we cannot show that `T = U`
+ pub fn could_unify_with_deeply(&self, db: &dyn HirDatabase, other: &Type) -> bool {
+ let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
+ hir_ty::could_unify_deeply(db, self.env.clone(), &tys)
+ }
+
pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
hir_ty::could_coerce(db, self.env.clone(), &tys)
diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs
new file mode 100644
index 0000000000..72762007dc
--- /dev/null
+++ b/crates/hir/src/term_search.rs
@@ -0,0 +1,298 @@
+//! Term search
+
+use hir_def::type_ref::Mutability;
+use hir_ty::db::HirDatabase;
+use itertools::Itertools;
+use rustc_hash::{FxHashMap, FxHashSet};
+
+use crate::{ModuleDef, ScopeDef, Semantics, SemanticsScope, Type};
+
+mod expr;
+pub use expr::Expr;
+
+mod tactics;
+
+/// Key for lookup table to query new types reached.
+#[derive(Debug, Hash, PartialEq, Eq)]
+enum NewTypesKey {
+ ImplMethod,
+ StructProjection,
+}
+
+/// Helper enum to squash big number of alternative trees into `Many` variant as there is too many
+/// to take into account.
+#[derive(Debug)]
+enum AlternativeExprs {
+ /// There are few trees, so we keep track of them all
+ Few(FxHashSet<Expr>),
+ /// There are too many trees to keep track of
+ Many,
+}
+
+impl AlternativeExprs {
+ /// Construct alternative trees
+ ///
+ /// # Arguments
+ /// `threshold` - threshold value for many trees (more than that is many)
+ /// `exprs` - expressions iterator
+ fn new(threshold: usize, exprs: impl Iterator<Item = Expr>) -> AlternativeExprs {
+ let mut it = AlternativeExprs::Few(Default::default());
+ it.extend_with_threshold(threshold, exprs);
+ it
+ }
+
+ /// Get type trees stored in alternative trees (or `Expr::Many` in case of many)
+ ///
+ /// # Arguments
+ /// `ty` - Type of expressions queried (this is used to give type to `Expr::Many`)
+ fn exprs(&self, ty: &Type) -> Vec<Expr> {
+ match self {
+ AlternativeExprs::Few(exprs) => exprs.iter().cloned().collect(),
+ AlternativeExprs::Many => vec![Expr::Many(ty.clone())],
+ }
+ }
+
+ /// Extend alternative expressions
+ ///
+ /// # Arguments
+ /// `threshold` - threshold value for many trees (more than that is many)
+ /// `exprs` - expressions iterator
+ fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator<Item = Expr>) {
+ match self {
+ AlternativeExprs::Few(tts) => {
+ for it in exprs {
+ if tts.len() > threshold {
+ *self = AlternativeExprs::Many;
+ break;
+ }
+
+ tts.insert(it);
+ }
+ }
+ AlternativeExprs::Many => (),
+ }
+ }
+}
+
+/// # Lookup table for term search
+///
+/// Lookup table keeps all the state during term search.
+/// This means it knows what types and how are reachable.
+///
+/// The secondary functionality for lookup table is to keep track of new types reached since last
+/// iteration as well as keeping track of which `ScopeDef` items have been used.
+/// Both of them are to speed up the term search by leaving out types / ScopeDefs that likely do
+/// not produce any new results.
+#[derive(Default, Debug)]
+struct LookupTable {
+ /// All the `Expr`s in "value" produce the type of "key"
+ data: FxHashMap<Type, AlternativeExprs>,
+ /// New types reached since last query by the `NewTypesKey`
+ new_types: FxHashMap<NewTypesKey, Vec<Type>>,
+ /// ScopeDefs that are not interesting any more
+ exhausted_scopedefs: FxHashSet<ScopeDef>,
+ /// ScopeDefs that were used in current round
+ round_scopedef_hits: FxHashSet<ScopeDef>,
+ /// Amount of rounds since scopedef was first used.
+ rounds_since_sopedef_hit: FxHashMap<ScopeDef, u32>,
+ /// Types queried but not present
+ types_wishlist: FxHashSet<Type>,
+ /// Threshold to squash trees to `Many`
+ many_threshold: usize,
+}
+
+impl LookupTable {
+ /// Initialize lookup table
+ fn new(many_threshold: usize) -> Self {
+ let mut res = Self { many_threshold, ..Default::default() };
+ res.new_types.insert(NewTypesKey::ImplMethod, Vec::new());
+ res.new_types.insert(NewTypesKey::StructProjection, Vec::new());
+ res
+ }
+
+ /// Find all `Expr`s that unify with the `ty`
+ fn find(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
+ self.data
+ .iter()
+ .find(|(t, _)| t.could_unify_with_deeply(db, ty))
+ .map(|(t, tts)| tts.exprs(t))
+ }
+
+ /// Same as find but automatically creates shared reference of types in the lookup
+ ///
+ /// For example if we have type `i32` in data and we query for `&i32` it map all the type
+ /// trees we have for `i32` with `Expr::Reference` and returns them.
+ fn find_autoref(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
+ self.data
+ .iter()
+ .find(|(t, _)| t.could_unify_with_deeply(db, ty))
+ .map(|(t, it)| it.exprs(t))
+ .or_else(|| {
+ self.data
+ .iter()
+ .find(|(t, _)| {
+ Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, ty)
+ })
+ .map(|(t, it)| {
+ it.exprs(t)
+ .into_iter()
+ .map(|expr| Expr::Reference(Box::new(expr)))
+ .collect()
+ })
+ })
+ }
+
+ /// Insert new type trees for type
+ ///
+ /// Note that the types have to be the same, unification is not enough as unification is not
+ /// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>,
+ /// but they clearly do not unify themselves.
+ fn insert(&mut self, ty: Type, exprs: impl Iterator<Item = Expr>) {
+ match self.data.get_mut(&ty) {
+ Some(it) => it.extend_with_threshold(self.many_threshold, exprs),
+ None => {
+ self.data.insert(ty.clone(), AlternativeExprs::new(self.many_threshold, exprs));
+ for it in self.new_types.values_mut() {
+ it.push(ty.clone());
+ }
+ }
+ }
+ }
+
+ /// Iterate all the reachable types
+ fn iter_types(&self) -> impl Iterator<Item = Type> + '_ {
+ self.data.keys().cloned()
+ }
+
+ /// Query new types reached since last query by key
+ ///
+ /// Create new key if you wish to query it to avoid conflicting with existing queries.
+ fn new_types(&mut self, key: NewTypesKey) -> Vec<Type> {
+ match self.new_types.get_mut(&key) {
+ Some(it) => std::mem::take(it),
+ None => Vec::new(),
+ }
+ }
+
+ /// Mark `ScopeDef` as exhausted meaning it is not interesting for us any more
+ fn mark_exhausted(&mut self, def: ScopeDef) {
+ self.exhausted_scopedefs.insert(def);
+ }
+
+ /// Mark `ScopeDef` as used meaning we managed to produce something useful from it
+ fn mark_fulfilled(&mut self, def: ScopeDef) {
+ self.round_scopedef_hits.insert(def);
+ }
+
+ /// Start new round (meant to be called at the beginning of iteration in `term_search`)
+ ///
+ /// This functions marks some `ScopeDef`s as exhausted if there have been
+ /// `MAX_ROUNDS_AFTER_HIT` rounds after first using a `ScopeDef`.
+ fn new_round(&mut self) {
+ for def in &self.round_scopedef_hits {
+ let hits =
+ self.rounds_since_sopedef_hit.entry(*def).and_modify(|n| *n += 1).or_insert(0);
+ const MAX_ROUNDS_AFTER_HIT: u32 = 2;
+ if *hits > MAX_ROUNDS_AFTER_HIT {
+ self.exhausted_scopedefs.insert(*def);
+ }
+ }
+ self.round_scopedef_hits.clear();
+ }
+
+ /// Get exhausted `ScopeDef`s
+ fn exhausted_scopedefs(&self) -> &FxHashSet<ScopeDef> {
+ &self.exhausted_scopedefs
+ }
+
+ /// Types queried but not found
+ fn take_types_wishlist(&mut self) -> FxHashSet<Type> {
+ std::mem::take(&mut self.types_wishlist)
+ }
+}
+
+/// Context for the `term_search` function
+#[derive(Debug)]
+pub struct TermSearchCtx<'a, DB: HirDatabase> {
+ /// Semantics for the program
+ pub sema: &'a Semantics<'a, DB>,
+ /// Semantic scope, captures context for the term search
+ pub scope: &'a SemanticsScope<'a>,
+ /// Target / expected output type
+ pub goal: Type,
+ /// Configuration for term search
+ pub config: TermSearchConfig,
+}
+
+/// Configuration options for the term search
+#[derive(Debug, Clone, Copy)]
+pub struct TermSearchConfig {
+ /// Enable borrow checking, this guarantees the outputs of the `term_search` to borrow-check
+ pub enable_borrowcheck: bool,
+ /// Indicate when to squash multiple trees to `Many` as there are too many to keep track
+ pub many_alternatives_threshold: usize,
+ /// Depth of the search eg. number of cycles to run
+ pub depth: usize,
+}
+
+impl Default for TermSearchConfig {
+ fn default() -> Self {
+ Self { enable_borrowcheck: true, many_alternatives_threshold: 1, depth: 6 }
+ }
+}
+
+/// # Term search
+///
+/// Search for terms (expressions) that unify with the `goal` type.
+///
+/// # Arguments
+/// * `ctx` - Context for term search
+///
+/// Internally this function uses Breadth First Search to find path to `goal` type.
+/// The general idea is following:
+/// 1. Populate lookup (frontier for BFS) from values (local variables, statics, constants, etc)
+/// as well as from well knows values (such as `true/false` and `()`)
+/// 2. Iteratively expand the frontier (or contents of the lookup) by trying different type
+/// transformation tactics. For example functions take as from set of types (arguments) to some
+/// type (return type). Other transformations include methods on type, type constructors and
+/// projections to struct fields (field access).
+/// 3. Once we manage to find path to type we are interested in we continue for single round to see
+/// if we can find more paths that take us to the `goal` type.
+/// 4. Return all the paths (type trees) that take us to the `goal` type.
+///
+/// Note that there are usually more ways we can get to the `goal` type but some are discarded to
+/// reduce the memory consumption. It is also unlikely anyone is willing ti browse through
+/// thousands of possible responses so we currently take first 10 from every tactic.
+pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
+ let module = ctx.scope.module();
+ let mut defs = FxHashSet::default();
+ defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module)));
+
+ ctx.scope.process_all_names(&mut |_, def| {
+ defs.insert(def);
+ });
+
+ let mut lookup = LookupTable::new(ctx.config.many_alternatives_threshold);
+
+ // Try trivial tactic first, also populates lookup table
+ let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
+ // Use well known types tactic before iterations as it does not depend on other tactics
+ solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
+
+ for _ in 0..ctx.config.depth {
+ lookup.new_round();
+
+ solutions.extend(tactics::type_constructor(ctx, &defs, &mut lookup));
+ solutions.extend(tactics::free_function(ctx, &defs, &mut lookup));
+ solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup));
+ solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup));
+ solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup));
+
+ // Discard not interesting `ScopeDef`s for speedup
+ for def in lookup.exhausted_scopedefs() {
+ defs.remove(def);
+ }
+ }
+
+ solutions.into_iter().filter(|it| !it.is_many()).unique().collect()
+}
diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs
new file mode 100644
index 0000000000..254fbe7e2b
--- /dev/null
+++ b/crates/hir/src/term_search/expr.rs
@@ -0,0 +1,468 @@
+//! Type tree for term search
+
+use hir_def::find_path::PrefixKind;
+use hir_expand::mod_path::ModPath;
+use hir_ty::{
+ db::HirDatabase,
+ display::{DisplaySourceCodeError, HirDisplay},
+};
+use itertools::Itertools;
+
+use crate::{
+ Adt, AsAssocItem, Const, ConstParam, Field, Function, GenericDef, Local, ModuleDef,
+ SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant,
+};
+
+/// Helper function to get path to `ModuleDef`
+fn mod_item_path(
+ sema_scope: &SemanticsScope<'_>,
+ def: &ModuleDef,
+ prefer_no_std: bool,
+ prefer_prelude: bool,
+) -> Option<ModPath> {
+ let db = sema_scope.db;
+ // Account for locals shadowing items from module
+ let name_hit_count = def.name(db).map(|def_name| {
+ let mut name_hit_count = 0;
+ sema_scope.process_all_names(&mut |name, _| {
+ if name == def_name {
+ name_hit_count += 1;
+ }
+ });
+ name_hit_count
+ });
+
+ let m = sema_scope.module();
+ match name_hit_count {
+ Some(0..=1) | None => m.find_use_path(db.upcast(), *def, prefer_no_std, prefer_prelude),
+ Some(_) => m.find_use_path_prefixed(
+ db.upcast(),
+ *def,
+ PrefixKind::ByCrate,
+ prefer_no_std,
+ prefer_prelude,
+ ),
+ }
+}
+
+/// Helper function to get path to `ModuleDef` as string
+fn mod_item_path_str(
+ sema_scope: &SemanticsScope<'_>,
+ def: &ModuleDef,
+ prefer_no_std: bool,
+ prefer_prelude: bool,
+) -> Result<String, DisplaySourceCodeError> {
+ let path = mod_item_path(sema_scope, def, prefer_no_std, prefer_prelude);
+ path.map(|it| it.display(sema_scope.db.upcast()).to_string())
+ .ok_or(DisplaySourceCodeError::PathNotFound)
+}
+
+/// Helper function to get path to `Type`
+fn type_path(
+ sema_scope: &SemanticsScope<'_>,
+ ty: &Type,
+ prefer_no_std: bool,
+ prefer_prelude: bool,
+) -> Result<String, DisplaySourceCodeError> {
+ let db = sema_scope.db;
+ let m = sema_scope.module();
+
+ match ty.as_adt() {
+ Some(adt) => {
+ let ty_name = ty.display_source_code(db, m.id, true)?;
+
+ let mut path =
+ mod_item_path(sema_scope, &ModuleDef::Adt(adt), prefer_no_std, prefer_prelude)
+ .unwrap();
+ path.pop_segment();
+ let path = path.display(db.upcast()).to_string();
+ let res = match path.is_empty() {
+ true => ty_name,
+ false => format!("{path}::{ty_name}"),
+ };
+ Ok(res)
+ }
+ None => ty.display_source_code(db, m.id, true),
+ }
+}
+
+/// Helper function to filter out generic parameters that are default
+fn non_default_generics(db: &dyn HirDatabase, def: GenericDef, generics: &[Type]) -> Vec<Type> {
+ def.type_or_const_params(db)
+ .into_iter()
+ .filter_map(|it| it.as_type_param(db))
+ .zip(generics)
+ .filter(|(tp, arg)| tp.default(db).as_ref() != Some(arg))
+ .map(|(_, arg)| arg.clone())
+ .collect()
+}
+
+/// Type tree shows how can we get from set of types to some type.
+///
+/// Consider the following code as an example
+/// ```
+/// fn foo(x: i32, y: bool) -> Option<i32> { None }
+/// fn bar() {
+/// let a = 1;
+/// let b = true;
+/// let c: Option<i32> = _;
+/// }
+/// ```
+/// If we generate type tree in the place of `_` we get
+/// ```txt
+/// Option<i32>
+/// |
+/// foo(i32, bool)
+/// / \
+/// a: i32 b: bool
+/// ```
+/// So in short it pretty much gives us a way to get type `Option<i32>` using the items we have in
+/// scope.
+#[derive(Debug, Clone, Eq, Hash, PartialEq)]
+pub enum Expr {
+ /// Constant
+ Const(Const),
+ /// Static variable
+ Static(Static),
+ /// Local variable
+ Local(Local),
+ /// Constant generic parameter
+ ConstParam(ConstParam),
+ /// Well known type (such as `true` for bool)
+ FamousType { ty: Type, value: &'static str },
+ /// Function call (does not take self param)
+ Function { func: Function, generics: Vec<Type>, params: Vec<Expr> },
+ /// Method call (has self param)
+ Method { func: Function, generics: Vec<Type>, target: Box<Expr>, params: Vec<Expr> },
+ /// Enum variant construction
+ Variant { variant: Variant, generics: Vec<Type>, params: Vec<Expr> },
+ /// Struct construction
+ Struct { strukt: Struct, generics: Vec<Type>, params: Vec<Expr> },
+ /// Struct field access
+ Field { expr: Box<Expr>, field: Field },
+ /// Passing type as reference (with `&`)
+ Reference(Box<Expr>),
+ /// Indicates possibility of many different options that all evaluate to `ty`
+ Many(Type),
+}
+
+impl Expr {
+ /// Generate source code for type tree.
+ ///
+ /// Note that trait imports are not added to generated code.
+ /// To make sure that the code is valid, callee has to also ensure that all the traits listed
+ /// by `traits_used` method are also imported.
+ pub fn gen_source_code(
+ &self,
+ sema_scope: &SemanticsScope<'_>,
+ many_formatter: &mut dyn FnMut(&Type) -> String,
+ prefer_no_std: bool,
+ prefer_prelude: bool,
+ ) -> Result<String, DisplaySourceCodeError> {
+ let db = sema_scope.db;
+ let mod_item_path_str = |s, def| mod_item_path_str(s, def, prefer_no_std, prefer_prelude);
+ match self {
+ Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
+ Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
+ Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()),
+ Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()),
+ Expr::FamousType { value, .. } => Ok(value.to_string()),
+ Expr::Function { func, params, .. } => {
+ let args = params
+ .iter()
+ .map(|f| {
+ f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
+ })
+ .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
+ .into_iter()
+ .join(", ");
+
+ match func.as_assoc_item(db).map(|it| it.container(db)) {
+ Some(container) => {
+ let container_name = match container {
+ crate::AssocItemContainer::Trait(trait_) => {
+ mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?
+ }
+ crate::AssocItemContainer::Impl(imp) => {
+ let self_ty = imp.self_ty(db);
+ // Should it be guaranteed that `mod_item_path` always exists?
+ match self_ty.as_adt().and_then(|adt| {
+ mod_item_path(
+ sema_scope,
+ &adt.into(),
+ prefer_no_std,
+ prefer_prelude,
+ )
+ }) {
+ Some(path) => path.display(sema_scope.db.upcast()).to_string(),
+ None => self_ty.display(db).to_string(),
+ }
+ }
+ };
+ let fn_name = func.name(db).display(db.upcast()).to_string();
+ Ok(format!("{container_name}::{fn_name}({args})"))
+ }
+ None => {
+ let fn_name = mod_item_path_str(sema_scope, &ModuleDef::Function(*func))?;
+ Ok(format!("{fn_name}({args})"))
+ }
+ }
+ }
+ Expr::Method { func, target, params, .. } => {
+ if target.contains_many_in_illegal_pos() {
+ return Ok(many_formatter(&target.ty(db)));
+ }
+
+ let func_name = func.name(db).display(db.upcast()).to_string();
+ let self_param = func.self_param(db).unwrap();
+ let target = target.gen_source_code(
+ sema_scope,
+ many_formatter,
+ prefer_no_std,
+ prefer_prelude,
+ )?;
+ let args = params
+ .iter()
+ .map(|f| {
+ f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude)
+ })
+ .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
+ .into_iter()
+ .join(", ");
+
+ match func.as_assoc_item(db).and_then(|it| it.container_or_implemented_trait(db)) {
+ Some(trait_) => {
+ let trait_name = mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?;
+ let target = match self_param.access(db) {
+ crate::Access::Shared => format!("&{target}"),
+ crate::Access::Exclusive => format!("&mut {target}"),
+ crate::Access::Owned => target,
+ };
+ let res = match args.is_empty() {
+ true => format!("{trait_name}::{func_name}({target})",),
+ false => format!("{trait_name}::{func_name}({target}, {args})",),
+ };
+ Ok(res)
+ }
+ None => Ok(format!("{target}.{func_name}({args})")),
+ }
+ }
+ Expr::Variant { variant, generics, params } => {
+ let generics = non_default_generics(db, (*variant).into(), generics);
+ let generics_str = match generics.is_empty() {
+ true => String::new(),
+ false => {
+ let generics = generics
+ .iter()
+ .map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude))
+ .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
+ .into_iter()
+ .join(", ");
+ format!("::<{generics}>")
+ }
+ };
+ let inner = match variant.kind(db) {
+ StructKind::Tuple => {
+ let args = params
+ .iter()
+ .map(|f| {
+ f.gen_source_code(
+ sema_scope,
+ many_formatter,
+ prefer_no_std,
+ prefer_prelude,
+ )
+ })
+ .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
+ .into_iter()
+ .join(", ");
+ format!("{generics_str}({args})")
+ }
+ StructKind::Record => {
+ let fields = variant.fields(db);
+ let args = params
+ .iter()
+ .zip(fields.iter())
+ .map(|(a, f)| {
+ let tmp = format!(
+ "{}: {}",
+ f.name(db).display(db.upcast()),
+ a.gen_source_code(
+ sema_scope,
+ many_formatter,
+ prefer_no_std,
+ prefer_prelude
+ )?
+ );
+ Ok(tmp)
+ })
+ .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
+ .into_iter()
+ .join(", ");
+ format!("{generics_str}{{ {args} }}")
+ }
+ StructKind::Unit => generics_str,
+ };
+
+ let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant))?;
+ Ok(format!("{prefix}{inner}"))
+ }
+ Expr::Struct { strukt, generics, params } => {
+ let generics = non_default_generics(db, (*strukt).into(), generics);
+ let inner = match strukt.kind(db) {
+ StructKind::Tuple => {
+ let args = params
+ .iter()
+ .map(|a| {
+ a.gen_source_code(
+ sema_scope,
+ many_formatter,
+ prefer_no_std,
+ prefer_prelude,
+ )
+ })
+ .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
+ .into_iter()
+ .join(", ");
+ format!("({args})")
+ }
+ StructKind::Record => {
+ let fields = strukt.fields(db);
+ let args = params
+ .iter()
+ .zip(fields.iter())
+ .map(|(a, f)| {
+ let tmp = format!(
+ "{}: {}",
+ f.name(db).display(db.upcast()),
+ a.gen_source_code(
+ sema_scope,
+ many_formatter,
+ prefer_no_std,
+ prefer_prelude
+ )?
+ );
+ Ok(tmp)
+ })
+ .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
+ .into_iter()
+ .join(", ");
+ format!(" {{ {args} }}")
+ }
+ StructKind::Unit => match generics.is_empty() {
+ true => String::new(),
+ false => {
+ let generics = generics
+ .iter()
+ .map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude))
+ .collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
+ .into_iter()
+ .join(", ");
+ format!("::<{generics}>")
+ }
+ },
+ };
+
+ let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)))?;
+ Ok(format!("{prefix}{inner}"))
+ }
+ Expr::Field { expr, field } => {
+ if expr.contains_many_in_illegal_pos() {
+ return Ok(many_formatter(&expr.ty(db)));
+ }
+
+ let strukt = expr.gen_source_code(
+ sema_scope,
+ many_formatter,
+ prefer_no_std,
+ prefer_prelude,
+ )?;
+ let field = field.name(db).display(db.upcast()).to_string();
+ Ok(format!("{strukt}.{field}"))
+ }
+ Expr::Reference(expr) => {
+ if expr.contains_many_in_illegal_pos() {
+ return Ok(many_formatter(&expr.ty(db)));
+ }
+
+ let inner = expr.gen_source_code(
+ sema_scope,
+ many_formatter,
+ prefer_no_std,
+ prefer_prelude,
+ )?;
+ Ok(format!("&{inner}"))
+ }
+ Expr::Many(ty) => Ok(many_formatter(ty)),
+ }
+ }
+
+ /// Get type of the type tree.
+ ///
+ /// Same as getting the type of root node
+ pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ match self {
+ Expr::Const(it) => it.ty(db),
+ Expr::Static(it) => it.ty(db),
+ Expr::Local(it) => it.ty(db),
+ Expr::ConstParam(it) => it.ty(db),
+ Expr::FamousType { ty, .. } => ty.clone(),
+ Expr::Function { func, generics, .. } => {
+ func.ret_type_with_args(db, generics.iter().cloned())
+ }
+ Expr::Method { func, generics, target, .. } => func.ret_type_with_args(
+ db,
+ target.ty(db).type_arguments().chain(generics.iter().cloned()),
+ ),
+ Expr::Variant { variant, generics, .. } => {
+ Adt::from(variant.parent_enum(db)).ty_with_args(db, generics.iter().cloned())
+ }
+ Expr::Struct { strukt, generics, .. } => {
+ Adt::from(*strukt).ty_with_args(db, generics.iter().cloned())
+ }
+ Expr::Field { expr, field } => field.ty_with_args(db, expr.ty(db).type_arguments()),
+ Expr::Reference(it) => it.ty(db),
+ Expr::Many(ty) => ty.clone(),
+ }
+ }
+
+ /// List the traits used in type tree
+ pub fn traits_used(&self, db: &dyn HirDatabase) -> Vec<Trait> {
+ let mut res = Vec::new();
+
+ if let Expr::Method { func, params, .. } = self {
+ res.extend(params.iter().flat_map(|it| it.traits_used(db)));
+ if let Some(it) = func.as_assoc_item(db) {
+ if let Some(it) = it.container_or_implemented_trait(db) {
+ res.push(it);
+ }
+ }
+ }
+
+ res
+ }
+
+ /// Check in the tree contains `Expr::Many` variant in illegal place to insert `todo`,
+ /// `unimplemented` or similar macro
+ ///
+ /// Some examples are following
+ /// ```no_compile
+ /// macro!().foo
+ /// macro!().bar()
+ /// &macro!()
+ /// ```
+ fn contains_many_in_illegal_pos(&self) -> bool {
+ match self {
+ Expr::Method { target, .. } => target.contains_many_in_illegal_pos(),
+ Expr::Field { expr, .. } => expr.contains_many_in_illegal_pos(),
+ Expr::Reference(target) => target.is_many(),
+ Expr::Many(_) => true,
+ _ => false,
+ }
+ }
+
+ /// Helper function to check if outermost type tree is `Expr::Many` variant
+ pub fn is_many(&self) -> bool {
+ matches!(self, Expr::Many(_))
+ }
+}
diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs
new file mode 100644
index 0000000000..666d63ac15
--- /dev/null
+++ b/crates/hir/src/term_search/tactics.rs
@@ -0,0 +1,859 @@
+//! Tactics for term search
+//!
+//! All the tactics take following arguments
+//! * `ctx` - Context for the term search
+//! * `defs` - Set of items in scope at term search target location
+//! * `lookup` - Lookup table for types
+//! And they return iterator that yields type trees that unify with the `goal` type.
+
+use std::iter;
+
+use hir_ty::db::HirDatabase;
+use hir_ty::mir::BorrowKind;
+use hir_ty::TyBuilder;
+use itertools::Itertools;
+use rustc_hash::FxHashSet;
+
+use crate::{
+ Adt, AssocItem, Enum, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type,
+ TypeParam, Variant,
+};
+
+use crate::term_search::{Expr, TermSearchConfig};
+
+use super::{LookupTable, NewTypesKey, TermSearchCtx};
+
+/// # Trivial tactic
+///
+/// Attempts to fulfill the goal by trying items in scope
+/// Also works as a starting point to move all items in scope to lookup table.
+///
+/// # Arguments
+/// * `ctx` - Context for the term search
+/// * `defs` - Set of items in scope at term search target location
+/// * `lookup` - Lookup table for types
+///
+/// Returns iterator that yields elements that unify with `goal`.
+///
+/// _Note that there is no use of calling this tactic in every iteration as the output does not
+/// depend on the current state of `lookup`_
+pub(super) fn trivial<'a, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'a, DB>,
+ defs: &'a FxHashSet<ScopeDef>,
+ lookup: &'a mut LookupTable,
+) -> impl Iterator<Item = Expr> + 'a {
+ let db = ctx.sema.db;
+ defs.iter().filter_map(|def| {
+ let expr = match def {
+ ScopeDef::ModuleDef(ModuleDef::Const(it)) => Some(Expr::Const(*it)),
+ ScopeDef::ModuleDef(ModuleDef::Static(it)) => Some(Expr::Static(*it)),
+ ScopeDef::GenericParam(GenericParam::ConstParam(it)) => Some(Expr::ConstParam(*it)),
+ ScopeDef::Local(it) => {
+ if ctx.config.enable_borrowcheck {
+ let borrowck = db.borrowck(it.parent).ok()?;
+
+ let invalid = borrowck.iter().any(|b| {
+ b.partially_moved.iter().any(|moved| {
+ Some(&moved.local) == b.mir_body.binding_locals.get(it.binding_id)
+ }) || b.borrow_regions.iter().any(|region| {
+ // Shared borrows are fine
+ Some(&region.local) == b.mir_body.binding_locals.get(it.binding_id)
+ && region.kind != BorrowKind::Shared
+ })
+ });
+
+ if invalid {
+ return None;
+ }
+ }
+
+ Some(Expr::Local(*it))
+ }
+ _ => None,
+ }?;
+
+ lookup.mark_exhausted(*def);
+
+ let ty = expr.ty(db);
+ lookup.insert(ty.clone(), std::iter::once(expr.clone()));
+
+ // Don't suggest local references as they are not valid for return
+ if matches!(expr, Expr::Local(_)) && ty.contains_reference(db) {
+ return None;
+ }
+
+ ty.could_unify_with_deeply(db, &ctx.goal).then_some(expr)
+ })
+}
+
+/// # Type constructor tactic
+///
+/// Attempts different type constructors for enums and structs in scope
+///
+/// Updates lookup by new types reached and returns iterator that yields
+/// elements that unify with `goal`.
+///
+/// # Arguments
+/// * `ctx` - Context for the term search
+/// * `defs` - Set of items in scope at term search target location
+/// * `lookup` - Lookup table for types
+pub(super) fn type_constructor<'a, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'a, DB>,
+ defs: &'a FxHashSet<ScopeDef>,
+ lookup: &'a mut LookupTable,
+) -> impl Iterator<Item = Expr> + 'a {
+ let db = ctx.sema.db;
+ let module = ctx.scope.module();
+ fn variant_helper(
+ db: &dyn HirDatabase,
+ lookup: &mut LookupTable,
+ parent_enum: Enum,
+ variant: Variant,
+ goal: &Type,
+ config: &TermSearchConfig,
+ ) -> Vec<(Type, Vec<Expr>)> {
+ // Ignore unstable
+ if variant.is_unstable(db) {
+ return Vec::new();
+ }
+
+ let generics = GenericDef::from(variant.parent_enum(db));
+ let Some(type_params) = generics
+ .type_or_const_params(db)
+ .into_iter()
+ .map(|it| it.as_type_param(db))
+ .collect::<Option<Vec<TypeParam>>>()
+ else {
+ // Ignore enums with const generics
+ return Vec::new();
+ };
+
+ // We currently do not check lifetime bounds so ignore all types that have something to do
+ // with them
+ if !generics.lifetime_params(db).is_empty() {
+ return Vec::new();
+ }
+
+ // Only account for stable type parameters for now, unstable params can be default
+ // tho, for example in `Box<T, #[unstable] A: Allocator>`
+ if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
+ return Vec::new();
+ }
+
+ let non_default_type_params_len =
+ type_params.iter().filter(|it| it.default(db).is_none()).count();
+
+ let generic_params = lookup
+ .iter_types()
+ .collect::<Vec<_>>() // Force take ownership
+ .into_iter()
+ .permutations(non_default_type_params_len);
+
+ generic_params
+ .filter_map(move |generics| {
+ // Insert default type params
+ let mut g = generics.into_iter();
+ let generics: Vec<_> = type_params
+ .iter()
+ .map(|it| it.default(db).unwrap_or_else(|| g.next().expect("No generic")))
+ .collect();
+
+ let enum_ty = Adt::from(parent_enum).ty_with_args(db, generics.iter().cloned());
+
+ // Allow types with generics only if they take us straight to goal for
+ // performance reasons
+ if !generics.is_empty() && !enum_ty.could_unify_with_deeply(db, goal) {
+ return None;
+ }
+
+ // Ignore types that have something to do with lifetimes
+ if config.enable_borrowcheck && enum_ty.contains_reference(db) {
+ return None;
+ }
+
+ // Early exit if some param cannot be filled from lookup
+ let param_exprs: Vec<Vec<Expr>> = variant
+ .fields(db)
+ .into_iter()
+ .map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
+ .collect::<Option<_>>()?;
+
+ // Note that we need special case for 0 param constructors because of multi cartesian
+ // product
+ let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ vec![Expr::Variant { variant, generics: generics.clone(), params: Vec::new() }]
+ } else {
+ param_exprs
+ .into_iter()
+ .multi_cartesian_product()
+ .map(|params| Expr::Variant { variant, generics: generics.clone(), params })
+ .collect()
+ };
+ lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned());
+
+ Some((enum_ty, variant_exprs))
+ })
+ .collect()
+ }
+ defs.iter()
+ .filter_map(move |def| match def {
+ ScopeDef::ModuleDef(ModuleDef::Variant(it)) => {
+ let variant_exprs =
+ variant_helper(db, lookup, it.parent_enum(db), *it, &ctx.goal, &ctx.config);
+ if variant_exprs.is_empty() {
+ return None;
+ }
+ lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it)));
+ Some(variant_exprs)
+ }
+ ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => {
+ let exprs: Vec<(Type, Vec<Expr>)> = enum_
+ .variants(db)
+ .into_iter()
+ .flat_map(|it| variant_helper(db, lookup, *enum_, it, &ctx.goal, &ctx.config))
+ .collect();
+
+ if !exprs.is_empty() {
+ lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_))));
+ }
+
+ Some(exprs)
+ }
+ ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => {
+ // Ignore unstable and not visible
+ if it.is_unstable(db) || !it.is_visible_from(db, module) {
+ return None;
+ }
+
+ let generics = GenericDef::from(*it);
+
+ // Ignore const params for now
+ let type_params = generics
+ .type_or_const_params(db)
+ .into_iter()
+ .map(|it| it.as_type_param(db))
+ .collect::<Option<Vec<TypeParam>>>()?;
+
+ // We currently do not check lifetime bounds so ignore all types that have something to do
+ // with them
+ if !generics.lifetime_params(db).is_empty() {
+ return None;
+ }
+
+ // Only account for stable type parameters for now, unstable params can be default
+ // tho, for example in `Box<T, #[unstable] A: Allocator>`
+ if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
+ return None;
+ }
+
+ let non_default_type_params_len =
+ type_params.iter().filter(|it| it.default(db).is_none()).count();
+
+ let generic_params = lookup
+ .iter_types()
+ .collect::<Vec<_>>() // Force take ownership
+ .into_iter()
+ .permutations(non_default_type_params_len);
+
+ let exprs = generic_params
+ .filter_map(|generics| {
+ // Insert default type params
+ let mut g = generics.into_iter();
+ let generics: Vec<_> = type_params
+ .iter()
+ .map(|it| {
+ it.default(db)
+ .unwrap_or_else(|| g.next().expect("Missing type param"))
+ })
+ .collect();
+
+ let struct_ty = Adt::from(*it).ty_with_args(db, generics.iter().cloned());
+
+ // Allow types with generics only if they take us straight to goal for
+ // performance reasons
+ if non_default_type_params_len != 0
+ && struct_ty.could_unify_with_deeply(db, &ctx.goal)
+ {
+ return None;
+ }
+
+ // Ignore types that have something to do with lifetimes
+ if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) {
+ return None;
+ }
+ let fileds = it.fields(db);
+ // Check if all fields are visible, otherwise we cannot fill them
+ if fileds.iter().any(|it| !it.is_visible_from(db, module)) {
+ return None;
+ }
+
+ // Early exit if some param cannot be filled from lookup
+ let param_exprs: Vec<Vec<Expr>> = fileds
+ .into_iter()
+ .map(|field| lookup.find(db, &field.ty(db)))
+ .collect::<Option<_>>()?;
+
+ // Note that we need special case for 0 param constructors because of multi cartesian
+ // product
+ let struct_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }]
+ } else {
+ param_exprs
+ .into_iter()
+ .multi_cartesian_product()
+ .map(|params| Expr::Struct {
+ strukt: *it,
+ generics: generics.clone(),
+ params,
+ })
+ .collect()
+ };
+
+ lookup
+ .mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(*it))));
+ lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned());
+
+ Some((struct_ty, struct_exprs))
+ })
+ .collect();
+ Some(exprs)
+ }
+ _ => None,
+ })
+ .flatten()
+ .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
+ .flatten()
+}
+
+/// # Free function tactic
+///
+/// Attempts to call different functions in scope with parameters from lookup table.
+/// Functions that include generics are not used for performance reasons.
+///
+/// Updates lookup by new types reached and returns iterator that yields
+/// elements that unify with `goal`.
+///
+/// # Arguments
+/// * `ctx` - Context for the term search
+/// * `defs` - Set of items in scope at term search target location
+/// * `lookup` - Lookup table for types
+pub(super) fn free_function<'a, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'a, DB>,
+ defs: &'a FxHashSet<ScopeDef>,
+ lookup: &'a mut LookupTable,
+) -> impl Iterator<Item = Expr> + 'a {
+ let db = ctx.sema.db;
+ let module = ctx.scope.module();
+ defs.iter()
+ .filter_map(move |def| match def {
+ ScopeDef::ModuleDef(ModuleDef::Function(it)) => {
+ let generics = GenericDef::from(*it);
+
+ // Ignore const params for now
+ let type_params = generics
+ .type_or_const_params(db)
+ .into_iter()
+ .map(|it| it.as_type_param(db))
+ .collect::<Option<Vec<TypeParam>>>()?;
+
+ // Ignore lifetimes as we do not check them
+ if !generics.lifetime_params(db).is_empty() {
+ return None;
+ }
+
+ // Only account for stable type parameters for now, unstable params can be default
+ // tho, for example in `Box<T, #[unstable] A: Allocator>`
+ if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
+ return None;
+ }
+
+ let non_default_type_params_len =
+ type_params.iter().filter(|it| it.default(db).is_none()).count();
+
+ // Ignore bigger number of generics for now as they kill the performance
+ if non_default_type_params_len > 0 {
+ return None;
+ }
+
+ let generic_params = lookup
+ .iter_types()
+ .collect::<Vec<_>>() // Force take ownership
+ .into_iter()
+ .permutations(non_default_type_params_len);
+
+ let exprs: Vec<_> = generic_params
+ .filter_map(|generics| {
+ // Insert default type params
+ let mut g = generics.into_iter();
+ let generics: Vec<_> = type_params
+ .iter()
+ .map(|it| match it.default(db) {
+ Some(ty) => Some(ty),
+ None => {
+ let generic = g.next().expect("Missing type param");
+ // Filter out generics that do not unify due to trait bounds
+ it.ty(db).could_unify_with(db, &generic).then_some(generic)
+ }
+ })
+ .collect::<Option<_>>()?;
+
+ let ret_ty = it.ret_type_with_args(db, generics.iter().cloned());
+ // Filter out private and unsafe functions
+ if !it.is_visible_from(db, module)
+ || it.is_unsafe_to_call(db)
+ || it.is_unstable(db)
+ || ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
+ || ret_ty.is_raw_ptr()
+ {
+ return None;
+ }
+
+ // Early exit if some param cannot be filled from lookup
+ let param_exprs: Vec<Vec<Expr>> = it
+ .params_without_self_with_args(db, generics.iter().cloned())
+ .into_iter()
+ .map(|field| {
+ let ty = field.ty();
+ match ty.is_mutable_reference() {
+ true => None,
+ false => lookup.find_autoref(db, ty),
+ }
+ })
+ .collect::<Option<_>>()?;
+
+ // Note that we need special case for 0 param constructors because of multi cartesian
+ // product
+ let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ vec![Expr::Function { func: *it, generics, params: Vec::new() }]
+ } else {
+ param_exprs
+ .into_iter()
+ .multi_cartesian_product()
+ .map(|params| Expr::Function {
+ func: *it,
+ generics: generics.clone(),
+
+ params,
+ })
+ .collect()
+ };
+
+ lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it)));
+ lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
+ Some((ret_ty, fn_exprs))
+ })
+ .collect();
+ Some(exprs)
+ }
+ _ => None,
+ })
+ .flatten()
+ .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
+ .flatten()
+}
+
+/// # Impl method tactic
+///
+/// Attempts to to call methods on types from lookup table.
+/// This includes both functions from direct impl blocks as well as functions from traits.
+/// Methods defined in impl blocks that are generic and methods that are themselves have
+/// generics are ignored for performance reasons.
+///
+/// Updates lookup by new types reached and returns iterator that yields
+/// elements that unify with `goal`.
+///
+/// # Arguments
+/// * `ctx` - Context for the term search
+/// * `defs` - Set of items in scope at term search target location
+/// * `lookup` - Lookup table for types
+pub(super) fn impl_method<'a, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'a, DB>,
+ _defs: &'a FxHashSet<ScopeDef>,
+ lookup: &'a mut LookupTable,
+) -> impl Iterator<Item = Expr> + 'a {
+ let db = ctx.sema.db;
+ let module = ctx.scope.module();
+ lookup
+ .new_types(NewTypesKey::ImplMethod)
+ .into_iter()
+ .flat_map(|ty| {
+ Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
+ })
+ .flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item)))
+ .filter_map(|(imp, ty, it)| match it {
+ AssocItem::Function(f) => Some((imp, ty, f)),
+ _ => None,
+ })
+ .filter_map(move |(imp, ty, it)| {
+ let fn_generics = GenericDef::from(it);
+ let imp_generics = GenericDef::from(imp);
+
+ // Ignore const params for now
+ let imp_type_params = imp_generics
+ .type_or_const_params(db)
+ .into_iter()
+ .map(|it| it.as_type_param(db))
+ .collect::<Option<Vec<TypeParam>>>()?;
+
+ // Ignore const params for now
+ let fn_type_params = fn_generics
+ .type_or_const_params(db)
+ .into_iter()
+ .map(|it| it.as_type_param(db))
+ .collect::<Option<Vec<TypeParam>>>()?;
+
+ // Ignore all functions that have something to do with lifetimes as we don't check them
+ if !fn_generics.lifetime_params(db).is_empty() {
+ return None;
+ }
+
+ // Ignore functions without self param
+ if !it.has_self_param(db) {
+ return None;
+ }
+
+ // Filter out private and unsafe functions
+ if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) {
+ return None;
+ }
+
+ // Only account for stable type parameters for now, unstable params can be default
+ // tho, for example in `Box<T, #[unstable] A: Allocator>`
+ if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
+ || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
+ {
+ return None;
+ }
+
+ let non_default_type_params_len = imp_type_params
+ .iter()
+ .chain(fn_type_params.iter())
+ .filter(|it| it.default(db).is_none())
+ .count();
+
+ // Ignore bigger number of generics for now as they kill the performance
+ if non_default_type_params_len > 0 {
+ return None;
+ }
+
+ let generic_params = lookup
+ .iter_types()
+ .collect::<Vec<_>>() // Force take ownership
+ .into_iter()
+ .permutations(non_default_type_params_len);
+
+ let exprs: Vec<_> = generic_params
+ .filter_map(|generics| {
+ // Insert default type params
+ let mut g = generics.into_iter();
+ let generics: Vec<_> = imp_type_params
+ .iter()
+ .chain(fn_type_params.iter())
+ .map(|it| match it.default(db) {
+ Some(ty) => Some(ty),
+ None => {
+ let generic = g.next().expect("Missing type param");
+ // Filter out generics that do not unify due to trait bounds
+ it.ty(db).could_unify_with(db, &generic).then_some(generic)
+ }
+ })
+ .collect::<Option<_>>()?;
+
+ let ret_ty = it.ret_type_with_args(
+ db,
+ ty.type_arguments().chain(generics.iter().cloned()),
+ );
+ // Filter out functions that return references
+ if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
+ || ret_ty.is_raw_ptr()
+ {
+ return None;
+ }
+
+ // Ignore functions that do not change the type
+ if ty.could_unify_with_deeply(db, &ret_ty) {
+ return None;
+ }
+
+ let self_ty = it
+ .self_param(db)
+ .expect("No self param")
+ .ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned()));
+
+ // Ignore functions that have different self type
+ if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) {
+ return None;
+ }
+
+ let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
+
+ // Early exit if some param cannot be filled from lookup
+ let param_exprs: Vec<Vec<Expr>> = it
+ .params_without_self_with_args(
+ db,
+ ty.type_arguments().chain(generics.iter().cloned()),
+ )
+ .into_iter()
+ .map(|field| lookup.find_autoref(db, field.ty()))
+ .collect::<Option<_>>()?;
+
+ let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
+ .chain(param_exprs)
+ .multi_cartesian_product()
+ .map(|params| {
+ let mut params = params.into_iter();
+ let target = Box::new(params.next().unwrap());
+ Expr::Method {
+ func: it,
+ generics: generics.clone(),
+ target,
+ params: params.collect(),
+ }
+ })
+ .collect();
+
+ lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
+ Some((ret_ty, fn_exprs))
+ })
+ .collect();
+ Some(exprs)
+ })
+ .flatten()
+ .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
+ .flatten()
+}
+
+/// # Struct projection tactic
+///
+/// Attempts different struct fields (`foo.bar.baz`)
+///
+/// Updates lookup by new types reached and returns iterator that yields
+/// elements that unify with `goal`.
+///
+/// # Arguments
+/// * `ctx` - Context for the term search
+/// * `defs` - Set of items in scope at term search target location
+/// * `lookup` - Lookup table for types
+pub(super) fn struct_projection<'a, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'a, DB>,
+ _defs: &'a FxHashSet<ScopeDef>,
+ lookup: &'a mut LookupTable,
+) -> impl Iterator<Item = Expr> + 'a {
+ let db = ctx.sema.db;
+ let module = ctx.scope.module();
+ lookup
+ .new_types(NewTypesKey::StructProjection)
+ .into_iter()
+ .map(|ty| (ty.clone(), lookup.find(db, &ty).expect("Expr not in lookup")))
+ .flat_map(move |(ty, targets)| {
+ ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| {
+ if !field.is_visible_from(db, module) {
+ return None;
+ }
+ let exprs = targets
+ .clone()
+ .into_iter()
+ .map(move |target| Expr::Field { field, expr: Box::new(target) });
+ Some((filed_ty, exprs))
+ })
+ })
+ .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
+ .flatten()
+}
+
+/// # Famous types tactic
+///
+/// Attempts different values of well known types such as `true` or `false`.
+///
+/// Updates lookup by new types reached and returns iterator that yields
+/// elements that unify with `goal`.
+///
+/// _Note that there is no point of calling it iteratively as the output is always the same_
+///
+/// # Arguments
+/// * `ctx` - Context for the term search
+/// * `defs` - Set of items in scope at term search target location
+/// * `lookup` - Lookup table for types
+pub(super) fn famous_types<'a, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'a, DB>,
+ _defs: &'a FxHashSet<ScopeDef>,
+ lookup: &'a mut LookupTable,
+) -> impl Iterator<Item = Expr> + 'a {
+ let db = ctx.sema.db;
+ let module = ctx.scope.module();
+ [
+ Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "true" },
+ Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "false" },
+ Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" },
+ ]
+ .into_iter()
+ .map(|exprs| {
+ lookup.insert(exprs.ty(db), std::iter::once(exprs.clone()));
+ exprs
+ })
+ .filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal))
+}
+
+/// # Impl static method (without self type) tactic
+///
+/// Attempts different functions from impl blocks that take no self parameter.
+///
+/// Updates lookup by new types reached and returns iterator that yields
+/// elements that unify with `goal`.
+///
+/// # Arguments
+/// * `ctx` - Context for the term search
+/// * `defs` - Set of items in scope at term search target location
+/// * `lookup` - Lookup table for types
+pub(super) fn impl_static_method<'a, DB: HirDatabase>(
+ ctx: &'a TermSearchCtx<'a, DB>,
+ _defs: &'a FxHashSet<ScopeDef>,
+ lookup: &'a mut LookupTable,
+) -> impl Iterator<Item = Expr> + 'a {
+ let db = ctx.sema.db;
+ let module = ctx.scope.module();
+ lookup
+ .take_types_wishlist()
+ .into_iter()
+ .chain(iter::once(ctx.goal.clone()))
+ .flat_map(|ty| {
+ Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
+ })
+ .filter(|(_, imp)| !imp.is_unsafe(db))
+ .flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item)))
+ .filter_map(|(imp, ty, it)| match it {
+ AssocItem::Function(f) => Some((imp, ty, f)),
+ _ => None,
+ })
+ .filter_map(move |(imp, ty, it)| {
+ let fn_generics = GenericDef::from(it);
+ let imp_generics = GenericDef::from(imp);
+
+ // Ignore const params for now
+ let imp_type_params = imp_generics
+ .type_or_const_params(db)
+ .into_iter()
+ .map(|it| it.as_type_param(db))
+ .collect::<Option<Vec<TypeParam>>>()?;
+
+ // Ignore const params for now
+ let fn_type_params = fn_generics
+ .type_or_const_params(db)
+ .into_iter()
+ .map(|it| it.as_type_param(db))
+ .collect::<Option<Vec<TypeParam>>>()?;
+
+ // Ignore all functions that have something to do with lifetimes as we don't check them
+ if !fn_generics.lifetime_params(db).is_empty()
+ || !imp_generics.lifetime_params(db).is_empty()
+ {
+ return None;
+ }
+
+ // Ignore functions with self param
+ if it.has_self_param(db) {
+ return None;
+ }
+
+ // Filter out private and unsafe functions
+ if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) {
+ return None;
+ }
+
+ // Only account for stable type parameters for now, unstable params can be default
+ // tho, for example in `Box<T, #[unstable] A: Allocator>`
+ if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
+ || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none())
+ {
+ return None;
+ }
+
+ let non_default_type_params_len = imp_type_params
+ .iter()
+ .chain(fn_type_params.iter())
+ .filter(|it| it.default(db).is_none())
+ .count();
+
+ // Ignore bigger number of generics for now as they kill the performance
+ if non_default_type_params_len > 1 {
+ return None;
+ }
+
+ let generic_params = lookup
+ .iter_types()
+ .collect::<Vec<_>>() // Force take ownership
+ .into_iter()
+ .permutations(non_default_type_params_len);
+
+ let exprs: Vec<_> = generic_params
+ .filter_map(|generics| {
+ // Insert default type params
+ let mut g = generics.into_iter();
+ let generics: Vec<_> = imp_type_params
+ .iter()
+ .chain(fn_type_params.iter())
+ .map(|it| match it.default(db) {
+ Some(ty) => Some(ty),
+ None => {
+ let generic = g.next().expect("Missing type param");
+ it.trait_bounds(db)
+ .into_iter()
+ .all(|bound| generic.impls_trait(db, bound, &[]));
+ // Filter out generics that do not unify due to trait bounds
+ it.ty(db).could_unify_with(db, &generic).then_some(generic)
+ }
+ })
+ .collect::<Option<_>>()?;
+
+ let ret_ty = it.ret_type_with_args(
+ db,
+ ty.type_arguments().chain(generics.iter().cloned()),
+ );
+ // Filter out functions that return references
+ if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
+ || ret_ty.is_raw_ptr()
+ {
+ return None;
+ }
+
+ // Ignore functions that do not change the type
+ // if ty.could_unify_with_deeply(db, &ret_ty) {
+ // return None;
+ // }
+
+ // Early exit if some param cannot be filled from lookup
+ let param_exprs: Vec<Vec<Expr>> = it
+ .params_without_self_with_args(
+ db,
+ ty.type_arguments().chain(generics.iter().cloned()),
+ )
+ .into_iter()
+ .map(|field| lookup.find_autoref(db, field.ty()))
+ .collect::<Option<_>>()?;
+
+ // Note that we need special case for 0 param constructors because of multi cartesian
+ // product
+ let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
+ vec![Expr::Function { func: it, generics, params: Vec::new() }]
+ } else {
+ param_exprs
+ .into_iter()
+ .multi_cartesian_product()
+ .map(|params| Expr::Function {
+ func: it,
+ generics: generics.clone(),
+ params,
+ })
+ .collect()
+ };
+
+ lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
+ Some((ret_ty, fn_exprs))
+ })
+ .collect();
+ Some(exprs)
+ })
+ .flatten()
+ .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
+ .flatten()
+}
diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs
new file mode 100644
index 0000000000..51a1a406f3
--- /dev/null
+++ b/crates/ide-assists/src/handlers/term_search.rs
@@ -0,0 +1,253 @@
+//! Term search assist
+use hir::term_search::TermSearchCtx;
+use ide_db::{
+ assists::{AssistId, AssistKind, GroupLabel},
+ famous_defs::FamousDefs,
+};
+
+use itertools::Itertools;
+use syntax::{ast, AstNode};
+
+use crate::assist_context::{AssistContext, Assists};
+
+pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
+ let syntax = unexpanded.syntax();
+ let goal_range = syntax.text_range();
+
+ let parent = syntax.parent()?;
+ let scope = ctx.sema.scope(&parent)?;
+
+ let macro_call = ctx.sema.resolve_macro_call(&unexpanded)?;
+
+ let famous_defs = FamousDefs(&ctx.sema, scope.krate());
+ let std_todo = famous_defs.core_macros_todo()?;
+ let std_unimplemented = famous_defs.core_macros_unimplemented()?;
+
+ if macro_call != std_todo && macro_call != std_unimplemented {
+ return None;
+ }
+
+ let target_ty = ctx.sema.type_of_expr(&ast::Expr::cast(parent.clone())?)?.adjusted();
+
+ let term_search_ctx = TermSearchCtx {
+ sema: &ctx.sema,
+ scope: &scope,
+ goal: target_ty,
+ config: Default::default(),
+ };
+ let paths = hir::term_search::term_search(&term_search_ctx);
+
+ if paths.is_empty() {
+ return None;
+ }
+
+ let mut formatter = |_: &hir::Type| String::from("todo!()");
+
+ let paths = paths
+ .into_iter()
+ .filter_map(|path| {
+ path.gen_source_code(
+ &scope,
+ &mut formatter,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
+ .ok()
+ })
+ .unique();
+
+ for code in paths {
+ acc.add_group(
+ &GroupLabel(String::from("Term search")),
+ AssistId("term_search", AssistKind::Generate),
+ format!("Replace todo!() with {code}"),
+ goal_range,
+ |builder| {
+ builder.replace(goal_range, code);
+ },
+ );
+ }
+
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_complete_local() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+fn f() { let a: u128 = 1; let b: u128 = todo$0!() }"#,
+ r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
+ )
+ }
+
+ #[test]
+ fn test_complete_todo_with_msg() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
+ r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
+ )
+ }
+
+ #[test]
+ fn test_complete_unimplemented_with_msg() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
+ r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
+ )
+ }
+
+ #[test]
+ fn test_complete_unimplemented() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#,
+ r#"fn f() { let a: u128 = 1; let b: u128 = a }"#,
+ )
+ }
+
+ #[test]
+ fn test_complete_struct_field() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+struct A { pub x: i32, y: bool }
+fn f() { let a = A { x: 1, y: true }; let b: i32 = todo$0!(); }"#,
+ r#"struct A { pub x: i32, y: bool }
+fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#,
+ )
+ }
+
+ #[test]
+ fn test_enum_with_generics() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented, option
+fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
+ r#"fn f() { let a: i32 = 1; let b: Option<i32> = None; }"#,
+ )
+ }
+
+ #[test]
+ fn test_enum_with_generics2() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+enum Option<T> { None, Some(T) }
+fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
+ r#"enum Option<T> { None, Some(T) }
+fn f() { let a: i32 = 1; let b: Option<i32> = Option::Some(a); }"#,
+ )
+ }
+
+ #[test]
+ fn test_enum_with_generics3() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+enum Option<T> { None, Some(T) }
+fn f() { let a: Option<i32> = Option::None; let b: Option<Option<i32>> = todo$0!(); }"#,
+ r#"enum Option<T> { None, Some(T) }
+fn f() { let a: Option<i32> = Option::None; let b: Option<Option<i32>> = Option::Some(a); }"#,
+ )
+ }
+
+ #[test]
+ fn test_enum_with_generics4() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+enum Foo<T = i32> { Foo(T) }
+fn f() { let a = 0; let b: Foo = todo$0!(); }"#,
+ r#"enum Foo<T = i32> { Foo(T) }
+fn f() { let a = 0; let b: Foo = Foo::Foo(a); }"#,
+ );
+
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+enum Foo<T = i32> { Foo(T) }
+fn f() { let a: Foo<u32> = Foo::Foo(0); let b: Foo<u32> = todo$0!(); }"#,
+ r#"enum Foo<T = i32> { Foo(T) }
+fn f() { let a: Foo<u32> = Foo::Foo(0); let b: Foo<u32> = a; }"#,
+ )
+ }
+
+ #[test]
+ fn test_newtype() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+struct Foo(i32);
+fn f() { let a: i32 = 1; let b: Foo = todo$0!(); }"#,
+ r#"struct Foo(i32);
+fn f() { let a: i32 = 1; let b: Foo = Foo(a); }"#,
+ )
+ }
+
+ #[test]
+ fn test_shadowing() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = todo$0!(); }"#,
+ r#"fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = b; }"#,
+ )
+ }
+
+ #[test]
+ fn test_famous_bool() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+fn f() { let a: bool = todo$0!(); }"#,
+ r#"fn f() { let a: bool = false; }"#,
+ )
+ }
+
+ #[test]
+ fn test_fn_with_reference_types() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+fn f(a: &i32) -> f32 { a as f32 }
+fn g() { let a = 1; let b: f32 = todo$0!(); }"#,
+ r#"fn f(a: &i32) -> f32 { a as f32 }
+fn g() { let a = 1; let b: f32 = f(&a); }"#,
+ )
+ }
+
+ #[test]
+ fn test_fn_with_reference_types2() {
+ check_assist(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+fn f(a: &i32) -> f32 { a as f32 }
+fn g() { let a = &1; let b: f32 = todo$0!(); }"#,
+ r#"fn f(a: &i32) -> f32 { a as f32 }
+fn g() { let a = &1; let b: f32 = f(a); }"#,
+ )
+ }
+
+ #[test]
+ fn test_fn_with_reference_types3() {
+ check_assist_not_applicable(
+ term_search,
+ r#"//- minicore: todo, unimplemented
+ fn f(a: &i32) -> f32 { a as f32 }
+ fn g() { let a = &mut 1; let b: f32 = todo$0!(); }"#,
+ )
+ }
+}
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index 2fec104323..287062005d 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -210,6 +210,7 @@ mod handlers {
mod replace_turbofish_with_explicit_type;
mod sort_items;
mod split_import;
+ mod term_search;
mod toggle_ignore;
mod unmerge_match_arm;
mod unmerge_use;
@@ -332,6 +333,7 @@ mod handlers {
replace_arith_op::replace_arith_with_saturating,
sort_items::sort_items,
split_import::split_import,
+ term_search::term_search,
toggle_ignore::toggle_ignore,
unmerge_match_arm::unmerge_match_arm,
unmerge_use::unmerge_use,
diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs
index ba3c0cf3fd..1ea7220960 100644
--- a/crates/ide-completion/src/completions.rs
+++ b/crates/ide-completion/src/completions.rs
@@ -40,7 +40,8 @@ use crate::{
literal::{render_struct_literal, render_variant_lit},
macro_::render_macro,
pattern::{render_struct_pat, render_variant_pat},
- render_field, render_path_resolution, render_pattern_resolution, render_tuple_field,
+ render_expr, render_field, render_path_resolution, render_pattern_resolution,
+ render_tuple_field,
type_alias::{render_type_alias, render_type_alias_with_eq},
union_literal::render_union_literal,
RenderContext,
@@ -157,6 +158,12 @@ impl Completions {
item.add_to(self, ctx.db);
}
+ pub(crate) fn add_expr(&mut self, ctx: &CompletionContext<'_>, expr: &hir::term_search::Expr) {
+ if let Some(item) = render_expr(ctx, expr) {
+ item.add_to(self, ctx.db)
+ }
+ }
+
pub(crate) fn add_crate_roots(
&mut self,
ctx: &CompletionContext<'_>,
@@ -694,6 +701,7 @@ pub(super) fn complete_name_ref(
match &path_ctx.kind {
PathKind::Expr { expr_ctx } => {
expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx);
+ expr::complete_expr(acc, ctx);
dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx);
item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx);
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index 77fd5dd98b..802e9bc3a8 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -328,3 +328,59 @@ pub(crate) fn complete_expr_path(
}
}
}
+
+pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>) {
+ let _p = tracing::span!(tracing::Level::INFO, "complete_expr").entered();
+
+ if !ctx.config.enable_term_search {
+ return;
+ }
+
+ if !ctx.qualifier_ctx.none() {
+ return;
+ }
+
+ if let Some(ty) = &ctx.expected_type {
+ // Ignore unit types as they are not very interesting
+ if ty.is_unit() || ty.is_unknown() {
+ return;
+ }
+
+ let term_search_ctx = hir::term_search::TermSearchCtx {
+ sema: &ctx.sema,
+ scope: &ctx.scope,
+ goal: ty.clone(),
+ config: hir::term_search::TermSearchConfig {
+ enable_borrowcheck: false,
+ many_alternatives_threshold: 1,
+ depth: 6,
+ },
+ };
+ let exprs = hir::term_search::term_search(&term_search_ctx);
+ for expr in exprs {
+ // Expand method calls
+ match expr {
+ hir::term_search::Expr::Method { func, generics, target, params }
+ if target.is_many() =>
+ {
+ let target_ty = target.ty(ctx.db);
+ let term_search_ctx =
+ hir::term_search::TermSearchCtx { goal: target_ty, ..term_search_ctx };
+ let target_exprs = hir::term_search::term_search(&term_search_ctx);
+
+ for expr in target_exprs {
+ let expanded_expr = hir::term_search::Expr::Method {
+ func,
+ generics: generics.clone(),
+ target: Box::new(expr),
+ params: params.clone(),
+ };
+
+ acc.add_expr(ctx, &expanded_expr)
+ }
+ }
+ _ => acc.add_expr(ctx, &expr),
+ }
+ }
+ }
+}
diff --git a/crates/ide-completion/src/config.rs b/crates/ide-completion/src/config.rs
index ed5ddde8fb..04563fb0f4 100644
--- a/crates/ide-completion/src/config.rs
+++ b/crates/ide-completion/src/config.rs
@@ -14,6 +14,7 @@ pub struct CompletionConfig {
pub enable_imports_on_the_fly: bool,
pub enable_self_on_the_fly: bool,
pub enable_private_editable: bool,
+ pub enable_term_search: bool,
pub full_function_signatures: bool,
pub callable: Option<CallableSnippets>,
pub snippet_cap: Option<SnippetCap>,
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index 8552a20392..17dfcc08ca 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -297,6 +297,7 @@ pub enum CompletionItemKind {
Method,
Snippet,
UnresolvedReference,
+ Expression,
}
impl_from!(SymbolKind for CompletionItemKind);
@@ -341,6 +342,7 @@ impl CompletionItemKind {
CompletionItemKind::Method => "me",
CompletionItemKind::Snippet => "sn",
CompletionItemKind::UnresolvedReference => "??",
+ CompletionItemKind::Expression => "ex",
}
}
}
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 2ed080a834..88dc3b5cbe 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -17,7 +17,7 @@ use ide_db::{
imports::import_assets::LocatedImport,
RootDatabase, SnippetCap, SymbolKind,
};
-use syntax::{format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange};
+use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange};
use text_edit::TextEdit;
use crate::{
@@ -272,6 +272,82 @@ pub(crate) fn render_resolution_with_import_pat(
Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution))
}
+pub(crate) fn render_expr(
+ ctx: &CompletionContext<'_>,
+ expr: &hir::term_search::Expr,
+) -> Option<Builder> {
+ let mut i = 1;
+ let mut snippet_formatter = |ty: &hir::Type| {
+ let arg_name = ty
+ .as_adt()
+ .and_then(|adt| adt.name(ctx.db).as_text())
+ .map(|s| stdx::to_lower_snake_case(s.as_str()))
+ .unwrap_or_else(|| String::from("_"));
+ let res = format!("${{{i}:{arg_name}}}");
+ i += 1;
+ res
+ };
+
+ let mut label_formatter = |ty: &hir::Type| {
+ ty.as_adt()
+ .and_then(|adt| adt.name(ctx.db).as_text())
+ .map(|s| stdx::to_lower_snake_case(s.as_str()))
+ .unwrap_or_else(|| String::from("..."))
+ };
+
+ let label = expr
+ .gen_source_code(
+ &ctx.scope,
+ &mut label_formatter,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
+ .ok()?;
+
+ let source_range = match ctx.original_token.parent() {
+ Some(node) => match node.ancestors().find_map(ast::Path::cast) {
+ Some(path) => path.syntax().text_range(),
+ None => node.text_range(),
+ },
+ None => ctx.source_range(),
+ };
+
+ let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label.clone());
+
+ let snippet = format!(
+ "{}$0",
+ expr.gen_source_code(
+ &ctx.scope,
+ &mut snippet_formatter,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude
+ )
+ .ok()?
+ );
+ let edit = TextEdit::replace(source_range, snippet);
+ item.snippet_edit(ctx.config.snippet_cap?, edit);
+ item.documentation(Documentation::new(String::from("Autogenerated expression by term search")));
+ item.set_relevance(crate::CompletionRelevance {
+ type_match: compute_type_match(ctx, &expr.ty(ctx.db)),
+ ..Default::default()
+ });
+ for trait_ in expr.traits_used(ctx.db) {
+ let trait_item = hir::ItemInNs::from(hir::ModuleDef::from(trait_));
+ let Some(path) = ctx.module.find_use_path(
+ ctx.db,
+ trait_item,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ ) else {
+ continue;
+ };
+
+ item.add_import(LocatedImport::new(path, trait_item, trait_item));
+ }
+
+ Some(item)
+}
+
fn scope_def_to_name(
resolution: ScopeDef,
ctx: &RenderContext<'_>,
@@ -961,6 +1037,7 @@ fn func(input: Struct) { }
st Self [type]
sp Self [type]
st Struct [type]
+ ex Struct [type]
lc self [local]
fn func(…) []
me self.test() []
@@ -985,6 +1062,9 @@ fn main() {
"#,
expect![[r#"
lc input [type+name+local]
+ ex input [type]
+ ex true [type]
+ ex false [type]
lc inputbad [local]
fn main() []
fn test(…) []
@@ -1665,6 +1745,10 @@ fn f() { A { bar: b$0 }; }
expect![[r#"
fn bar() [type+name]
fn baz() [type]
+ ex baz() [type]
+ ex bar() [type]
+ ex A { bar: baz() }.bar [type]
+ ex A { bar: bar() }.bar [type]
st A []
fn f() []
"#]],
@@ -1749,6 +1833,8 @@ fn main() {
lc s [type+name+local]
st S [type]
st S [type]
+ ex s [type]
+ ex S [type]
fn foo(…) []
fn main() []
"#]],
@@ -1766,6 +1852,8 @@ fn main() {
lc ssss [type+local]
st S [type]
st S [type]
+ ex ssss [type]
+ ex S [type]
fn foo(…) []
fn main() []
"#]],
@@ -1798,6 +1886,8 @@ fn main() {
}
"#,
expect![[r#"
+ ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify]
+ ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify]
lc m [local]
lc t [local]
lc &t [type+local]
@@ -1846,6 +1936,8 @@ fn main() {
}
"#,
expect![[r#"
+ ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify]
+ ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify]
lc m [local]
lc t [local]
lc &mut t [type+local]
@@ -1894,6 +1986,8 @@ fn bar(t: Foo) {}
ev Foo::A [type]
ev Foo::B [type]
en Foo [type]
+ ex Foo::A [type]
+ ex Foo::B [type]
fn bar(…) []
fn foo() []
"#]],
@@ -1947,6 +2041,8 @@ fn main() {
}
"#,
expect![[r#"
+ ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify]
+ ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify]
st S []
st &S [type]
st S []
@@ -2160,6 +2256,7 @@ fn foo() {
"#,
expect![[r#"
lc foo [type+local]
+ ex foo [type]
ev Foo::A(…) [type_could_unify]
ev Foo::B [type_could_unify]
en Foo [type_could_unify]
diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs
index 154b69875a..1f032c7df4 100644
--- a/crates/ide-completion/src/tests.rs
+++ b/crates/ide-completion/src/tests.rs
@@ -65,6 +65,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: false,
+ enable_term_search: true,
full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index 78907a2896..556d3872d7 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -97,6 +97,11 @@ fn func(param0 @ (param1, param2): (i32, i32)) {
kw unsafe
kw while
kw while let
+ ex ifletlocal
+ ex letlocal
+ ex matcharm
+ ex param1
+ ex param2
"#]],
);
}
@@ -241,6 +246,8 @@ fn complete_in_block() {
sn macro_rules
sn pd
sn ppd
+ ex false
+ ex true
"#]],
)
}
@@ -682,7 +689,9 @@ fn main() {
}
"#,
expect![[r#"
- fn test() fn() -> Zulu
+ fn test() fn() -> Zulu
+ ex Zulu
+ ex Zulu::test()
"#]],
);
}
diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs
index 18afde1b7c..e64ec74c61 100644
--- a/crates/ide-completion/src/tests/record.rs
+++ b/crates/ide-completion/src/tests/record.rs
@@ -192,6 +192,8 @@ fn main() {
bt u32 u32
kw crate::
kw self::
+ ex Foo::default()
+ ex foo
"#]],
);
check(
diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs
index a87d16c789..ff32eccfbf 100644
--- a/crates/ide-completion/src/tests/special.rs
+++ b/crates/ide-completion/src/tests/special.rs
@@ -225,10 +225,10 @@ impl S {
fn foo() { let _ = lib::S::$0 }
"#,
expect![[r#"
- ct PUBLIC_CONST pub const PUBLIC_CONST: u32
- fn public_method() fn()
- ta PublicType pub type PublicType = u32
- "#]],
+ ct PUBLIC_CONST pub const PUBLIC_CONST: u32
+ fn public_method() fn()
+ ta PublicType pub type PublicType = u32
+ "#]],
);
}
@@ -242,8 +242,8 @@ impl U { fn m() { } }
fn foo() { let _ = U::$0 }
"#,
expect![[r#"
- fn m() fn()
- "#]],
+ fn m() fn()
+ "#]],
);
}
@@ -256,8 +256,8 @@ trait Trait { fn m(); }
fn foo() { let _ = Trait::$0 }
"#,
expect![[r#"
- fn m() (as Trait) fn()
- "#]],
+ fn m() (as Trait) fn()
+ "#]],
);
}
@@ -273,8 +273,8 @@ impl Trait for S {}
fn foo() { let _ = S::$0 }
"#,
expect![[r#"
- fn m() (as Trait) fn()
- "#]],
+ fn m() (as Trait) fn()
+ "#]],
);
}
@@ -290,8 +290,8 @@ impl Trait for S {}
fn foo() { let _ = <S as Trait>::$0 }
"#,
expect![[r#"
- fn m() (as Trait) fn()
- "#]],
+ fn m() (as Trait) fn()
+ "#]],
);
}
@@ -396,9 +396,9 @@ macro_rules! foo { () => {} }
fn main() { let _ = crate::$0 }
"#,
expect![[r#"
- fn main() fn()
- ma foo!(…) macro_rules! foo
- "#]],
+ fn main() fn()
+ ma foo!(…) macro_rules! foo
+ "#]],
);
}
@@ -694,8 +694,10 @@ fn bar() -> Bar {
}
"#,
expect![[r#"
- fn foo() (as Foo) fn() -> Self
- "#]],
+ fn foo() (as Foo) fn() -> Self
+ ex Bar
+ ex bar()
+ "#]],
);
}
@@ -722,6 +724,8 @@ fn bar() -> Bar {
expect![[r#"
fn bar() fn()
fn foo() (as Foo) fn() -> Self
+ ex Bar
+ ex bar()
"#]],
);
}
@@ -748,6 +752,8 @@ fn bar() -> Bar {
"#,
expect![[r#"
fn foo() (as Foo) fn() -> Self
+ ex Bar
+ ex bar()
"#]],
);
}
diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs
index 4edfa37b32..3106772e63 100644
--- a/crates/ide-db/src/famous_defs.rs
+++ b/crates/ide-db/src/famous_defs.rs
@@ -114,6 +114,14 @@ impl FamousDefs<'_, '_> {
self.find_function("core:mem:drop")
}
+ pub fn core_macros_todo(&self) -> Option<Macro> {
+ self.find_macro("core:todo")
+ }
+
+ pub fn core_macros_unimplemented(&self) -> Option<Macro> {
+ self.find_macro("core:unimplemented")
+ }
+
pub fn builtin_crates(&self) -> impl Iterator<Item = Crate> {
IntoIterator::into_iter([
self.std(),
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index 3862acc2af..7e1811b4ca 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -148,7 +148,7 @@ impl<'a> PathTransform<'a> {
let mut defaulted_params: Vec<DefaultedParam> = Default::default();
self.generic_def
.into_iter()
- .flat_map(|it| it.type_params(db))
+ .flat_map(|it| it.type_or_const_params(db))
.skip(skip)
// The actual list of trait type parameters may be longer than the one
// used in the `impl` block due to trailing default type parameters.
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index e93eea8ce2..8c97281b78 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -112,7 +112,8 @@ fn add_missing_ok_or_some(
let variant_name = if Some(expected_enum) == core_result { "Ok" } else { "Some" };
- let wrapped_actual_ty = expected_adt.ty_with_args(ctx.sema.db, &[d.actual.clone()]);
+ let wrapped_actual_ty =
+ expected_adt.ty_with_args(ctx.sema.db, std::iter::once(d.actual.clone()));
if !d.expected.could_unify_with(ctx.sema.db, &wrapped_actual_ty) {
return None;
diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs
index 6441343eba..56c8181e84 100644
--- a/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -1,14 +1,20 @@
-use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, StructKind};
+use hir::{
+ db::ExpandDatabase,
+ term_search::{term_search, TermSearchCtx},
+ ClosureStyle, HirDisplay,
+};
use ide_db::{
assists::{Assist, AssistId, AssistKind, GroupLabel},
label::Label,
source_change::SourceChange,
};
-use syntax::AstNode;
+use itertools::Itertools;
use text_edit::TextEdit;
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use syntax::AstNode;
+
// Diagnostic: typed-hole
//
// This diagnostic is triggered when an underscore expression is used in an invalid position.
@@ -36,50 +42,54 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>
let (original_range, _) =
d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?;
let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?;
- let mut assists = vec![];
- scope.process_all_names(&mut |name, def| {
- let ty = match def {
- hir::ScopeDef::ModuleDef(it) => match it {
- hir::ModuleDef::Function(it) => it.ty(db),
- hir::ModuleDef::Adt(hir::Adt::Struct(it)) if it.kind(db) != StructKind::Record => {
- it.constructor_ty(db)
- }
- hir::ModuleDef::Variant(it) if it.kind(db) != StructKind::Record => {
- it.constructor_ty(db)
- }
- hir::ModuleDef::Const(it) => it.ty(db),
- hir::ModuleDef::Static(it) => it.ty(db),
- _ => return,
- },
- hir::ScopeDef::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db),
- hir::ScopeDef::Local(it) => it.ty(db),
- _ => return,
- };
- // FIXME: should also check coercions if it is at a coercion site
- if !ty.contains_unknown() && ty.could_unify_with(db, &d.expected) {
- assists.push(Assist {
- id: AssistId("typed-hole", AssistKind::QuickFix),
- label: Label::new(format!("Replace `_` with `{}`", name.display(db))),
- group: Some(GroupLabel("Replace `_` with a matching entity in scope".to_owned())),
- target: original_range.range,
- source_change: Some(SourceChange::from_text_edit(
- original_range.file_id,
- TextEdit::replace(original_range.range, name.display(db).to_string()),
- )),
- trigger_signature_help: false,
- });
- }
- });
- if assists.is_empty() {
- None
- } else {
+
+ let term_search_ctx = TermSearchCtx {
+ sema: &ctx.sema,
+ scope: &scope,
+ goal: d.expected.clone(),
+ config: Default::default(),
+ };
+ let paths = term_search(&term_search_ctx);
+
+ let mut formatter = |_: &hir::Type| String::from("_");
+
+ let assists: Vec<Assist> = paths
+ .into_iter()
+ .filter_map(|path| {
+ path.gen_source_code(
+ &scope,
+ &mut formatter,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
+ .ok()
+ })
+ .unique()
+ .map(|code| Assist {
+ id: AssistId("typed-hole", AssistKind::QuickFix),
+ label: Label::new(format!("Replace `_` with `{}`", &code)),
+ group: Some(GroupLabel("Replace `_` with a term".to_owned())),
+ target: original_range.range,
+ source_change: Some(SourceChange::from_text_edit(
+ original_range.file_id,
+ TextEdit::replace(original_range.range, code),
+ )),
+ trigger_signature_help: false,
+ })
+ .collect();
+
+ if !assists.is_empty() {
Some(assists)
+ } else {
+ None
}
}
#[cfg(test)]
mod tests {
- use crate::tests::{check_diagnostics, check_fixes};
+ use crate::tests::{
+ check_diagnostics, check_fixes_unordered, check_has_fix, check_has_single_fix,
+ };
#[test]
fn unknown() {
@@ -99,7 +109,7 @@ fn main() {
r#"
fn main() {
if _ {}
- //^ error: invalid `_` expression, expected type `bool`
+ //^ 💡 error: invalid `_` expression, expected type `bool`
let _: fn() -> i32 = _;
//^ error: invalid `_` expression, expected type `fn() -> i32`
let _: fn() -> () = _; // FIXME: This should trigger an assist because `main` matches via *coercion*
@@ -129,7 +139,7 @@ fn main() {
fn main() {
let mut x = t();
x = _;
- //^ 💡 error: invalid `_` expression, expected type `&str`
+ //^ error: invalid `_` expression, expected type `&str`
x = "";
}
fn t<T>() -> T { loop {} }
@@ -143,7 +153,8 @@ fn t<T>() -> T { loop {} }
r#"
fn main() {
let _x = [(); _];
- let _y: [(); 10] = [(); _];
+ // FIXME: This should trigger error
+ // let _y: [(); 10] = [(); _];
_ = 0;
(_,) = (1,);
}
@@ -153,7 +164,7 @@ fn main() {
#[test]
fn check_quick_fix() {
- check_fixes(
+ check_fixes_unordered(
r#"
enum Foo {
Bar
@@ -175,7 +186,7 @@ use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
- let _: Foo = local;
+ let _: Foo = Bar;
//^ error: invalid `_` expression, expected type `fn()`
}
"#,
@@ -187,7 +198,7 @@ use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
- let _: Foo = param;
+ let _: Foo = local;
//^ error: invalid `_` expression, expected type `fn()`
}
"#,
@@ -199,7 +210,7 @@ use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
- let _: Foo = CP;
+ let _: Foo = param;
//^ error: invalid `_` expression, expected type `fn()`
}
"#,
@@ -211,7 +222,7 @@ use Foo::Bar;
const C: Foo = Foo::Bar;
fn main<const CP: Foo>(param: Foo) {
let local = Foo::Bar;
- let _: Foo = Bar;
+ let _: Foo = CP;
//^ error: invalid `_` expression, expected type `fn()`
}
"#,
@@ -230,4 +241,153 @@ fn main<const CP: Foo>(param: Foo) {
],
);
}
+
+ #[test]
+ fn local_item_use_trait() {
+ check_has_fix(
+ r#"
+struct Bar;
+struct Baz;
+trait Foo {
+ fn foo(self) -> Bar;
+}
+impl Foo for Baz {
+ fn foo(self) -> Bar {
+ unimplemented!()
+ }
+}
+fn asd() -> Bar {
+ let a = Baz;
+ _$0
+}
+"#,
+ r"
+struct Bar;
+struct Baz;
+trait Foo {
+ fn foo(self) -> Bar;
+}
+impl Foo for Baz {
+ fn foo(self) -> Bar {
+ unimplemented!()
+ }
+}
+fn asd() -> Bar {
+ let a = Baz;
+ Foo::foo(a)
+}
+",
+ );
+ }
+
+ #[test]
+ fn init_struct() {
+ check_has_fix(
+ r#"struct Abc {}
+struct Qwe { a: i32, b: Abc }
+fn main() {
+ let a: i32 = 1;
+ let c: Qwe = _$0;
+}"#,
+ r#"struct Abc {}
+struct Qwe { a: i32, b: Abc }
+fn main() {
+ let a: i32 = 1;
+ let c: Qwe = Qwe { a: a, b: Abc { } };
+}"#,
+ );
+ }
+
+ #[test]
+ fn ignore_impl_func_with_incorrect_return() {
+ check_has_single_fix(
+ r#"
+struct Bar {}
+trait Foo {
+ type Res;
+ fn foo(&self) -> Self::Res;
+}
+impl Foo for i32 {
+ type Res = Self;
+ fn foo(&self) -> Self::Res { 1 }
+}
+fn main() {
+ let a: i32 = 1;
+ let c: Bar = _$0;
+}"#,
+ r#"
+struct Bar {}
+trait Foo {
+ type Res;
+ fn foo(&self) -> Self::Res;
+}
+impl Foo for i32 {
+ type Res = Self;
+ fn foo(&self) -> Self::Res { 1 }
+}
+fn main() {
+ let a: i32 = 1;
+ let c: Bar = Bar { };
+}"#,
+ );
+ }
+
+ #[test]
+ fn use_impl_func_with_correct_return() {
+ check_has_fix(
+ r#"
+struct Bar {}
+struct A;
+trait Foo {
+ type Res;
+ fn foo(&self) -> Self::Res;
+}
+impl Foo for A {
+ type Res = Bar;
+ fn foo(&self) -> Self::Res { Bar { } }
+}
+fn main() {
+ let a = A;
+ let c: Bar = _$0;
+}"#,
+ r#"
+struct Bar {}
+struct A;
+trait Foo {
+ type Res;
+ fn foo(&self) -> Self::Res;
+}
+impl Foo for A {
+ type Res = Bar;
+ fn foo(&self) -> Self::Res { Bar { } }
+}
+fn main() {
+ let a = A;
+ let c: Bar = Foo::foo(&a);
+}"#,
+ );
+ }
+
+ #[test]
+ fn local_shadow_fn() {
+ check_fixes_unordered(
+ r#"
+fn f() {
+ let f: i32 = 0;
+ _$0
+}"#,
+ vec![
+ r#"
+fn f() {
+ let f: i32 = 0;
+ ()
+}"#,
+ r#"
+fn f() {
+ let f: i32 = 0;
+ crate::f()
+}"#,
+ ],
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs
index b62bb5affd..4e4a851f67 100644
--- a/crates/ide-diagnostics/src/tests.rs
+++ b/crates/ide-diagnostics/src/tests.rs
@@ -91,6 +91,91 @@ fn check_nth_fix_with_config(
assert_eq_text!(&after, &actual);
}
+pub(crate) fn check_fixes_unordered(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
+ for ra_fixture_after in ra_fixtures_after.iter() {
+ check_has_fix(ra_fixture_before, ra_fixture_after)
+ }
+}
+
+#[track_caller]
+pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let after = trim_indent(ra_fixture_after);
+
+ let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
+ let mut conf = DiagnosticsConfig::test_sample();
+ conf.expr_fill_default = ExprFillDefaultMode::Default;
+ let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
+ .into_iter()
+ .find(|d| {
+ d.fixes
+ .as_ref()
+ .and_then(|fixes| {
+ fixes.iter().find(|fix| {
+ if !fix.target.contains_inclusive(file_position.offset) {
+ return false;
+ }
+ let actual = {
+ let source_change = fix.source_change.as_ref().unwrap();
+ let file_id = *source_change.source_file_edits.keys().next().unwrap();
+ let mut actual = db.file_text(file_id).to_string();
+
+ for (edit, snippet_edit) in source_change.source_file_edits.values() {
+ edit.apply(&mut actual);
+ if let Some(snippet_edit) = snippet_edit {
+ snippet_edit.apply(&mut actual);
+ }
+ }
+ actual
+ };
+ after == actual
+ })
+ })
+ .is_some()
+ });
+ assert!(fix.is_some(), "no diagnostic with desired fix");
+}
+
+#[track_caller]
+pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let after = trim_indent(ra_fixture_after);
+
+ let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
+ let mut conf = DiagnosticsConfig::test_sample();
+ conf.expr_fill_default = ExprFillDefaultMode::Default;
+ let mut n_fixes = 0;
+ let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
+ .into_iter()
+ .find(|d| {
+ d.fixes
+ .as_ref()
+ .and_then(|fixes| {
+ n_fixes += fixes.len();
+ fixes.iter().find(|fix| {
+ if !fix.target.contains_inclusive(file_position.offset) {
+ return false;
+ }
+ let actual = {
+ let source_change = fix.source_change.as_ref().unwrap();
+ let file_id = *source_change.source_file_edits.keys().next().unwrap();
+ let mut actual = db.file_text(file_id).to_string();
+
+ for (edit, snippet_edit) in source_change.source_file_edits.values() {
+ edit.apply(&mut actual);
+ if let Some(snippet_edit) = snippet_edit {
+ snippet_edit.apply(&mut actual);
+ }
+ }
+ actual
+ };
+ after == actual
+ })
+ })
+ .is_some()
+ });
+ assert!(fix.is_some(), "no diagnostic with desired fix");
+ assert!(n_fixes == 1, "Too many fixes suggested");
+}
+
/// Checks that there's a diagnostic *without* fix at `$0`.
pub(crate) fn check_no_fix(ra_fixture: &str) {
let (db, file_position) = RootDatabase::with_position(ra_fixture);
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 30bfe6ee9d..69ddc1e45e 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -7263,8 +7263,8 @@ impl Iterator for S {
file_id: FileId(
1,
),
- full_range: 6157..6365,
- focus_range: 6222..6228,
+ full_range: 6290..6498,
+ focus_range: 6355..6361,
name: "Future",
kind: Trait,
container_name: "future",
@@ -7277,8 +7277,8 @@ impl Iterator for S {
file_id: FileId(
1,
),
- full_range: 6995..7461,
- focus_range: 7039..7047,
+ full_range: 7128..7594,
+ focus_range: 7172..7180,
name: "Iterator",
kind: Trait,
container_name: "iterator",
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 2741b45222..ce7e3b3cd6 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -32,7 +32,7 @@ use oorandom::Rand32;
use profile::{Bytes, StopWatch};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use rayon::prelude::*;
-use rustc_hash::FxHashSet;
+use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{AstNode, SyntaxNode};
use vfs::{AbsPathBuf, FileId, Vfs, VfsPath};
@@ -91,7 +91,7 @@ impl flags::AnalysisStats {
};
let (host, vfs, _proc_macro) =
- load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
+ load_workspace(workspace.clone(), &cargo_config.extra_env, &load_cargo_config)?;
let db = host.raw_database();
eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
eprint!(" (metadata {metadata_time}");
@@ -232,7 +232,11 @@ impl flags::AnalysisStats {
}
if self.run_all_ide_things {
- self.run_ide_things(host.analysis(), file_ids);
+ self.run_ide_things(host.analysis(), file_ids.clone());
+ }
+
+ if self.run_term_search {
+ self.run_term_search(&workspace, db, &vfs, file_ids, verbosity);
}
let total_span = analysis_sw.elapsed();
@@ -321,6 +325,212 @@ impl flags::AnalysisStats {
report_metric("const eval time", const_eval_time.time.as_millis() as u64, "ms");
}
+ fn run_term_search(
+ &self,
+ ws: &ProjectWorkspace,
+ db: &RootDatabase,
+ vfs: &Vfs,
+ mut file_ids: Vec<FileId>,
+ verbosity: Verbosity,
+ ) {
+ let cargo_config = CargoConfig {
+ sysroot: match self.no_sysroot {
+ true => None,
+ false => Some(RustLibSource::Discover),
+ },
+ ..Default::default()
+ };
+
+ let mut bar = match verbosity {
+ Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
+ _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
+ _ => ProgressReport::new(file_ids.len() as u64),
+ };
+
+ file_ids.sort();
+ file_ids.dedup();
+
+ #[derive(Debug, Default)]
+ struct Acc {
+ tail_expr_syntax_hits: u64,
+ tail_expr_no_term: u64,
+ total_tail_exprs: u64,
+ error_codes: FxHashMap<String, u32>,
+ syntax_errors: u32,
+ }
+
+ let mut acc: Acc = Default::default();
+ bar.tick();
+ let mut sw = self.stop_watch();
+
+ for &file_id in &file_ids {
+ let sema = hir::Semantics::new(db);
+ let _ = db.parse(file_id);
+
+ let parse = sema.parse(file_id);
+ let file_txt = db.file_text(file_id);
+ let path = vfs.file_path(file_id).as_path().unwrap().to_owned();
+
+ for node in parse.syntax().descendants() {
+ let expr = match syntax::ast::Expr::cast(node.clone()) {
+ Some(it) => it,
+ None => continue,
+ };
+ let block = match syntax::ast::BlockExpr::cast(expr.syntax().clone()) {
+ Some(it) => it,
+ None => continue,
+ };
+ let target_ty = match sema.type_of_expr(&expr) {
+ Some(it) => it.adjusted(),
+ None => continue, // Failed to infer type
+ };
+
+ let expected_tail = match block.tail_expr() {
+ Some(it) => it,
+ None => continue,
+ };
+
+ if expected_tail.is_block_like() {
+ continue;
+ }
+
+ let range = sema.original_range(expected_tail.syntax()).range;
+ let original_text: String = db
+ .file_text(file_id)
+ .chars()
+ .skip(usize::from(range.start()))
+ .take(usize::from(range.end()) - usize::from(range.start()))
+ .collect();
+
+ let scope = match sema.scope(expected_tail.syntax()) {
+ Some(it) => it,
+ None => continue,
+ };
+
+ let ctx = hir::term_search::TermSearchCtx {
+ sema: &sema,
+ scope: &scope,
+ goal: target_ty,
+ config: hir::term_search::TermSearchConfig {
+ enable_borrowcheck: true,
+ ..Default::default()
+ },
+ };
+ let found_terms = hir::term_search::term_search(&ctx);
+
+ if found_terms.is_empty() {
+ acc.tail_expr_no_term += 1;
+ acc.total_tail_exprs += 1;
+ // println!("\n{}\n", &original_text);
+ continue;
+ };
+
+ fn trim(s: &str) -> String {
+ s.chars().filter(|c| !c.is_whitespace()).collect()
+ }
+
+ let todo = syntax::ast::make::ext::expr_todo().to_string();
+ let mut formatter = |_: &hir::Type| todo.clone();
+ let mut syntax_hit_found = false;
+ for term in found_terms {
+ let generated =
+ term.gen_source_code(&scope, &mut formatter, false, true).unwrap();
+ syntax_hit_found |= trim(&original_text) == trim(&generated);
+
+ // Validate if type-checks
+ let mut txt = file_txt.to_string();
+
+ let edit = ide::TextEdit::replace(range, generated.clone());
+ edit.apply(&mut txt);
+
+ if self.validate_term_search {
+ std::fs::write(&path, txt).unwrap();
+
+ let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap();
+ if let Some(err) = res.error() {
+ if err.contains("error: could not compile") {
+ if let Some(mut err_idx) = err.find("error[E") {
+ err_idx += 7;
+ let err_code = &err[err_idx..err_idx + 4];
+ match err_code {
+ "0282" => continue, // Byproduct of testing method
+ "0277" if generated.contains(&todo) => continue, // See https://github.com/rust-lang/rust/issues/69882
+ _ => (),
+ }
+ bar.println(err);
+ bar.println(generated);
+ acc.error_codes
+ .entry(err_code.to_owned())
+ .and_modify(|n| *n += 1)
+ .or_insert(1);
+ } else {
+ acc.syntax_errors += 1;
+ bar.println(format!("Syntax error: \n{}", err));
+ }
+ }
+ }
+ }
+ }
+
+ if syntax_hit_found {
+ acc.tail_expr_syntax_hits += 1;
+ }
+ acc.total_tail_exprs += 1;
+
+ let msg = move || {
+ format!(
+ "processing: {:<50}",
+ trim(&original_text).chars().take(50).collect::<String>()
+ )
+ };
+ if verbosity.is_spammy() {
+ bar.println(msg());
+ }
+ bar.set_message(msg);
+ }
+ // Revert file back to original state
+ if self.validate_term_search {
+ std::fs::write(&path, file_txt.to_string()).unwrap();
+ }
+
+ bar.inc(1);
+ }
+ let term_search_time = sw.elapsed();
+
+ bar.println(format!(
+ "Tail Expr syntactic hits: {}/{} ({}%)",
+ acc.tail_expr_syntax_hits,
+ acc.total_tail_exprs,
+ percentage(acc.tail_expr_syntax_hits, acc.total_tail_exprs)
+ ));
+ bar.println(format!(
+ "Tail Exprs found: {}/{} ({}%)",
+ acc.total_tail_exprs - acc.tail_expr_no_term,
+ acc.total_tail_exprs,
+ percentage(acc.total_tail_exprs - acc.tail_expr_no_term, acc.total_tail_exprs)
+ ));
+ if self.validate_term_search {
+ bar.println(format!(
+ "Tail Exprs total errors: {}, syntax errors: {}, error codes:",
+ acc.error_codes.values().sum::<u32>() + acc.syntax_errors,
+ acc.syntax_errors,
+ ));
+ for (err, count) in acc.error_codes {
+ bar.println(format!(
+ " E{err}: {count:>5} (https://doc.rust-lang.org/error_codes/E{err}.html)"
+ ));
+ }
+ }
+ bar.println(format!(
+ "Term search avg time: {}ms",
+ term_search_time.time.as_millis() as u64 / acc.total_tail_exprs
+ ));
+ bar.println(format!("{:<20} {}", "Term search:", term_search_time));
+ report_metric("term search time", term_search_time.time.as_millis() as u64, "ms");
+
+ bar.finish_and_clear();
+ }
+
fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
let mut sw = self.stop_watch();
let mut all = 0;
diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs
index 252b1e1a48..493e614dce 100644
--- a/crates/rust-analyzer/src/cli/flags.rs
+++ b/crates/rust-analyzer/src/cli/flags.rs
@@ -93,6 +93,11 @@ xflags::xflags! {
/// and annotations. This is useful for benchmarking the memory usage on a project that has
/// been worked on for a bit in a longer running session.
optional --run-all-ide-things
+ /// Run term search on all the tail expressions (of functions, block, if statements etc.)
+ optional --run-term-search
+ /// Validate term search by running `cargo check` on every response.
+ /// Note that this also temporarily modifies the files on disk, use with caution!
+ optional --validate-term-search
}
/// Run unit tests of the project using mir interpreter
@@ -218,6 +223,8 @@ pub struct AnalysisStats {
pub skip_data_layout: bool,
pub skip_const_eval: bool,
pub run_all_ide_things: bool,
+ pub run_term_search: bool,
+ pub validate_term_search: bool,
}
#[derive(Debug)]
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index bf3e71a6bd..1fda3d4c5c 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -286,6 +286,8 @@ config_data! {
"scope": "expr"
}
}"#,
+ /// Whether to enable term search based snippets like `Some(foo.bar().baz())`.
+ completion_termSearch_enable: bool = "false",
/// List of rust-analyzer diagnostics to disable.
diagnostics_disabled: FxHashSet<String> = "[]",
@@ -1535,6 +1537,7 @@ impl Config {
&& completion_item_edit_resolve(&self.caps),
enable_self_on_the_fly: self.data.completion_autoself_enable,
enable_private_editable: self.data.completion_privateEditable_enable,
+ enable_term_search: self.data.completion_termSearch_enable,
full_function_signatures: self.data.completion_fullFunctionSignatures_enable,
callable: match self.data.completion_callable_snippets {
CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index acc02d6447..f0eee77aff 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -132,6 +132,7 @@ fn integrated_completion_benchmark() {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: true,
+ enable_term_search: true,
full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
@@ -175,6 +176,7 @@ fn integrated_completion_benchmark() {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: true,
+ enable_term_search: true,
full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
@@ -216,6 +218,7 @@ fn integrated_completion_benchmark() {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: true,
+ enable_term_search: true,
full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 64f19f0b32..bc4666c122 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -123,6 +123,7 @@ pub(crate) fn completion_item_kind(
CompletionItemKind::Method => lsp_types::CompletionItemKind::METHOD,
CompletionItemKind::Snippet => lsp_types::CompletionItemKind::SNIPPET,
CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::REFERENCE,
+ CompletionItemKind::Expression => lsp_types::CompletionItemKind::SNIPPET,
CompletionItemKind::SymbolKind(symbol) => match symbol {
SymbolKind::Attribute => lsp_types::CompletionItemKind::FUNCTION,
SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT,
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 6e5e4127f4..1bc1ef8434 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -569,6 +569,26 @@ impl fmt::Display for NameOrNameRef {
}
}
+impl ast::AstNode for NameOrNameRef {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, SyntaxKind::NAME | SyntaxKind::NAME_REF)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ SyntaxKind::NAME => NameOrNameRef::Name(ast::Name { syntax }),
+ SyntaxKind::NAME_REF => NameOrNameRef::NameRef(ast::NameRef { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ NameOrNameRef::NameRef(it) => it.syntax(),
+ NameOrNameRef::Name(it) => it.syntax(),
+ }
+ }
+}
+
impl NameOrNameRef {
pub fn text(&self) -> TokenText<'_> {
match self {
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index 23a3a7e0af..f125792d12 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -60,6 +60,8 @@
//! try: infallible
//! unpin: sized
//! unsize: sized
+//! todo: panic
+//! unimplemented: panic
#![rustc_coherence_is_core]
@@ -927,6 +929,10 @@ pub mod fmt {
use crate::mem::transmute;
unsafe { Argument { formatter: transmute(f), value: transmute(x) } }
}
+
+ pub fn new_display<'b, T: Display>(x: &'b T) -> Argument<'_> {
+ Self::new(x, Display::fmt)
+ }
}
#[lang = "format_alignment"]
@@ -1438,6 +1444,33 @@ mod macros {
// endregion:fmt
+ // region:todo
+ #[macro_export]
+ #[allow_internal_unstable(core_panic)]
+ macro_rules! todo {
+ () => {
+ $crate::panicking::panic("not yet implemented")
+ };
+ ($($arg:tt)+) => {
+ $crate::panic!("not yet implemented: {}", $crate::format_args!($($arg)+))
+ };
+ }
+ // endregion:todo
+
+ // region:unimplemented
+ #[macro_export]
+ #[allow_internal_unstable(core_panic)]
+ macro_rules! unimplemented {
+ () => {
+ $crate::panicking::panic("not implemented")
+ };
+ ($($arg:tt)+) => {
+ $crate::panic!("not implemented: {}", $crate::format_args!($($arg)+))
+ };
+ }
+ // endregion:unimplemented
+
+
// region:derive
pub(crate) mod builtin {
#[rustc_builtin_macro]
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index a86ef70941..5669c8fa6b 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -344,6 +344,11 @@ Default:
Custom completion snippets.
--
+[[rust-analyzer.completion.termSearch.enable]]rust-analyzer.completion.termSearch.enable (default: `false`)::
++
+--
+Whether to enable term search based snippets like `Some(foo.bar().baz())`.
+--
[[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`)::
+
--
diff --git a/editors/code/package.json b/editors/code/package.json
index b474471e5a..5bb87fa5b7 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -902,6 +902,11 @@
},
"type": "object"
},
+ "rust-analyzer.completion.termSearch.enable": {
+ "markdownDescription": "Whether to enable term search based snippets like `Some(foo.bar().baz())`.",
+ "default": false,
+ "type": "boolean"
+ },
"rust-analyzer.diagnostics.disabled": {
"markdownDescription": "List of rust-analyzer diagnostics to disable.",
"default": [],