Unnamed repository; edit this file 'description' to name the repository.
Diffstat (limited to 'crates/ide-completion/src/context/analysis.rs')
-rw-r--r--crates/ide-completion/src/context/analysis.rs129
1 files changed, 103 insertions, 26 deletions
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index d6d3978385..65bae5b66e 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -2,8 +2,10 @@
use std::iter;
use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
-use ide_db::{RootDatabase, active_parameter::ActiveParameter};
-use itertools::Either;
+use ide_db::{
+ RootDatabase, active_parameter::ActiveParameter, syntax_helpers::node_ext::find_loops,
+};
+use itertools::{Either, Itertools};
use stdx::always;
use syntax::{
AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
@@ -88,15 +90,9 @@ pub(super) fn expand_and_analyze<'db>(
let original_offset = expansion.original_offset + relative_offset;
let token = expansion.original_file.token_at_offset(original_offset).left_biased()?;
- hir::attach_db(sema.db, || analyze(sema, expansion, original_token, &token)).map(
- |(analysis, expected, qualifier_ctx)| AnalysisResult {
- analysis,
- expected,
- qualifier_ctx,
- token,
- original_offset,
- },
- )
+ analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
+ AnalysisResult { analysis, expected, qualifier_ctx, token, original_offset }
+ })
}
fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Option<SyntaxToken> {
@@ -514,6 +510,21 @@ fn analyze<'db>(
colon_prefix,
extern_crate: p.ancestors().find_map(ast::ExternCrate::cast),
}
+ } else if p.kind() == SyntaxKind::TOKEN_TREE
+ && p.ancestors().any(|it| ast::Macro::can_cast(it.kind()))
+ {
+ if let Some([_ident, colon, _name, dollar]) = fake_ident_token
+ .siblings_with_tokens(Direction::Prev)
+ .filter(|it| !it.kind().is_trivia())
+ .take(4)
+ .collect_array()
+ && dollar.kind() == T![$]
+ && colon.kind() == T![:]
+ {
+ CompletionAnalysis::MacroSegment
+ } else {
+ return None;
+ }
} else {
return None;
}
@@ -600,10 +611,26 @@ fn expected_type_and_name<'db>(
Some(it) => it,
None => return ty,
};
- for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
+ let refs_level = top_syn
+ .ancestors()
+ .skip(1)
+ .map_while(Either::<ast::RefExpr, ast::PrefixExpr>::cast)
+ .take_while(|it| match it {
+ Either::Left(_) => true,
+ Either::Right(prefix) => prefix.op_kind() == Some(ast::UnaryOp::Deref),
+ })
+ .fold(0i32, |level, expr| match expr {
+ Either::Left(_) => level + 1,
+ Either::Right(_) => level - 1,
+ });
+ for _ in 0..refs_level {
cov_mark::hit!(expected_type_fn_param_ref);
ty = ty.strip_reference();
}
+ for _ in refs_level..0 {
+ cov_mark::hit!(expected_type_fn_param_deref);
+ ty = ty.add_reference(hir::Mutability::Shared);
+ }
ty
}
_ => ty,
@@ -657,7 +684,7 @@ fn expected_type_and_name<'db>(
cov_mark::hit!(expected_type_fn_param);
ActiveParameter::at_token(
sema,
- token.clone(),
+ token.clone(),
).map(|ap| {
let name = ap.ident().map(NameOrNameRef::Name);
(Some(ap.ty), name)
@@ -718,6 +745,23 @@ fn expected_type_and_name<'db>(
}.map(TypeInfo::original);
(ty, None)
},
+ ast::MatchArm(it) => {
+ let on_arrow = previous_non_trivia_token(token.clone()).is_some_and(|it| T![=>] == it.kind());
+ let in_body = it.expr().is_some_and(|it| it.syntax().text_range().contains_range(token.text_range()));
+ let match_expr = it.parent_match();
+
+ let ty = if on_arrow || in_body {
+ // match foo { ..., pat => $0 }
+ cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
+ cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
+ sema.type_of_expr(&match_expr.into())
+ } else {
+ // match foo { $0 }
+ cov_mark::hit!(expected_type_match_arm_without_leading_char);
+ match_expr.expr().and_then(|e| sema.type_of_expr(&e))
+ }.map(TypeInfo::original);
+ (ty, None)
+ },
ast::IfExpr(it) => {
let ty = if let Some(body) = it.then_branch()
&& token.text_range().end() > body.syntax().text_range().start()
@@ -752,6 +796,12 @@ fn expected_type_and_name<'db>(
});
(ty, None)
},
+ ast::BreakExpr(it) => {
+ let ty = it.break_token()
+ .and_then(|it| find_loops(sema, &it)?.next())
+ .and_then(|expr| sema.type_of_expr(&expr));
+ (ty.map(TypeInfo::original), None)
+ },
ast::ClosureExpr(it) => {
let ty = sema.type_of_expr(&it.into());
ty.and_then(|ty| ty.original.as_callable(sema.db))
@@ -1286,10 +1336,26 @@ fn classify_name_ref<'db>(
)
}
};
- let find_fn_self_param = |it| match it {
- ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))),
- ast::Item::MacroCall(_) => None,
- _ => Some(None),
+ let fn_self_param =
+ |fn_: ast::Fn| sema.to_def(&fn_).and_then(|it| it.self_param(sema.db));
+ let closure_this_param = |closure: ast::ClosureExpr| {
+ if closure.param_list()?.params().next()?.pat()?.syntax().text() != "this" {
+ return None;
+ }
+ sema.type_of_expr(&closure.into())
+ .and_then(|it| it.original.as_callable(sema.db))
+ .and_then(|it| it.params().into_iter().next())
+ };
+ let find_fn_self_param = |it: SyntaxNode| {
+ match_ast! {
+ match it {
+ ast::Fn(fn_) => Some(fn_self_param(fn_).map(Either::Left)),
+ ast::ClosureExpr(f) => closure_this_param(f).map(Either::Right).map(Some),
+ ast::MacroCall(_) => None,
+ ast::Item(_) => Some(None),
+ _ => None,
+ }
+ }
};
match find_node_in_file_compensated(sema, original_file, &expr) {
@@ -1302,7 +1368,6 @@ fn classify_name_ref<'db>(
let self_param = sema
.ancestors_with_macros(it.syntax().clone())
- .filter_map(ast::Item::cast)
.find_map(find_fn_self_param)
.flatten();
(innermost_ret_ty, self_param)
@@ -1321,9 +1386,8 @@ fn classify_name_ref<'db>(
let incomplete_expr_stmt =
it.parent().and_then(ast::ExprStmt::cast).map(|it| it.semicolon_token().is_none());
let before_else_kw = before_else_kw(it);
- let incomplete_let = it
- .parent()
- .and_then(ast::LetStmt::cast)
+ let incomplete_let = left_ancestors(it.parent())
+ .find_map(ast::LetStmt::cast)
.is_some_and(|it| it.semicolon_token().is_none())
|| after_incomplete_let && incomplete_expr_stmt.unwrap_or(true) && !before_else_kw;
let in_value = is_in_value(it);
@@ -1628,6 +1692,7 @@ fn classify_name_ref<'db>(
}
}
qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
+ qualifier_ctx.abi_node = error_node.children().find_map(ast::Abi::cast);
}
if let PathKind::Item { .. } = path_ctx.kind
@@ -1635,7 +1700,7 @@ fn classify_name_ref<'db>(
&& let Some(t) = top.first_token()
&& let Some(prev) =
t.prev_token().and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
- && ![T![;], T!['}'], T!['{']].contains(&prev.kind())
+ && ![T![;], T!['}'], T!['{'], T![']']].contains(&prev.kind())
{
// This was inferred to be an item position path, but it seems
// to be part of some other broken node which leaked into an item
@@ -1679,12 +1744,16 @@ fn pattern_context_for(
let mut param_ctx = None;
let mut missing_variants = vec![];
+ let is_pat_like = |kind| {
+ ast::Pat::can_cast(kind)
+ || ast::RecordPatField::can_cast(kind)
+ || ast::RecordPatFieldList::can_cast(kind)
+ };
- let (refutability, has_type_ascription) =
- pat
+ let (refutability, has_type_ascription) = pat
.syntax()
.ancestors()
- .find(|it| !ast::Pat::can_cast(it.kind()))
+ .find(|it| !is_pat_like(it.kind()))
.map_or((PatternRefutability::Irrefutable, false), |node| {
let refutability = match_ast! {
match node {
@@ -1877,6 +1946,13 @@ fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
Some((use_tree.path()?, true))
}
+fn left_ancestors(node: Option<SyntaxNode>) -> impl Iterator<Item = SyntaxNode> {
+ node.into_iter().flat_map(|node| {
+ let end = node.text_range().end();
+ node.ancestors().take_while(move |it| it.text_range().end() == end)
+ })
+}
+
fn is_in_token_of_for_loop(path: &ast::Path) -> bool {
// oh my ...
(|| {
@@ -2006,7 +2082,8 @@ fn prev_special_biased_token_at_trivia(mut token: SyntaxToken) -> SyntaxToken {
| T![|]
| T![return]
| T![break]
- | T![continue] = prev.kind()
+ | T![continue]
+ | T![lifetime_ident] = prev.kind()
{
token = prev
}