Unnamed repository; edit this file 'description' to name the repository.
Diffstat (limited to 'crates/ide-completion/src/context/analysis.rs')
| -rw-r--r-- | crates/ide-completion/src/context/analysis.rs | 281 |
1 files changed, 184 insertions, 97 deletions
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index b33a547dee..e761da7152 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -19,12 +19,15 @@ use syntax::{ match_ast, }; -use crate::context::{ - AttrCtx, BreakableKind, COMPLETION_MARKER, CompletionAnalysis, DotAccess, DotAccessExprCtx, - DotAccessKind, ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, - NameRefContext, NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx, PathKind, - PatternContext, PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, - TypeLocation, +use crate::{ + completions::postfix::is_in_condition, + context::{ + AttrCtx, BreakableKind, COMPLETION_MARKER, CompletionAnalysis, DotAccess, DotAccessExprCtx, + DotAccessKind, ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, + NameRefContext, NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx, + PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx, + TypeAscriptionTarget, TypeLocation, + }, }; #[derive(Debug)] @@ -85,9 +88,15 @@ pub(super) fn expand_and_analyze<'db>( let original_offset = expansion.original_offset + relative_offset; let token = expansion.original_file.token_at_offset(original_offset).left_biased()?; - analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| { - AnalysisResult { analysis, expected, qualifier_ctx, token, original_offset } - }) + hir::attach_db(sema.db, || analyze(sema, expansion, original_token, &token)).map( + |(analysis, expected, qualifier_ctx)| AnalysisResult { + analysis, + expected, + qualifier_ctx, + token, + original_offset, + }, + ) } fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Option<SyntaxToken> { @@ -556,10 +565,10 @@ fn analyze<'db>( /// Calculate the expected type and name of the cursor position. fn expected_type_and_name<'db>( sema: &Semantics<'db, RootDatabase>, - token: &SyntaxToken, + self_token: &SyntaxToken, name_like: &ast::NameLike, ) -> (Option<Type<'db>>, Option<NameOrNameRef>) { - let token = prev_special_biased_token_at_trivia(token.clone()); + let token = prev_special_biased_token_at_trivia(self_token.clone()); let mut node = match token.parent() { Some(it) => it, None => return (None, None), @@ -637,6 +646,9 @@ fn expected_type_and_name<'db>( .or_else(|| it.rhs().and_then(|rhs| sema.type_of_expr(&rhs))) .map(TypeInfo::original); (ty, None) + } else if let Some(ast::BinaryOp::LogicOp(_)) = it.op_kind() { + let ty = sema.type_of_expr(&it.clone().into()).map(TypeInfo::original); + (ty, None) } else { (None, None) } @@ -645,7 +657,7 @@ fn expected_type_and_name<'db>( cov_mark::hit!(expected_type_fn_param); ActiveParameter::at_token( sema, - token.clone(), + token.clone(), ).map(|ap| { let name = ap.ident().map(NameOrNameRef::Name); (Some(ap.ty), name) @@ -707,9 +719,13 @@ fn expected_type_and_name<'db>( (ty, None) }, ast::IfExpr(it) => { - let ty = it.condition() - .and_then(|e| sema.type_of_expr(&e)) - .map(TypeInfo::original); + let ty = if let Some(body) = it.then_branch() + && token.text_range().end() > body.syntax().text_range().start() + { + sema.type_of_expr(&body.into()) + } else { + it.condition().and_then(|e| sema.type_of_expr(&e)) + }.map(TypeInfo::original); (ty, None) }, ast::IdentPat(it) => { @@ -742,7 +758,15 @@ fn expected_type_and_name<'db>( .map(|c| (Some(c.return_type()), None)) .unwrap_or((None, None)) }, - ast::ParamList(_) => (None, None), + ast::ParamList(it) => { + let closure = it.syntax().parent().and_then(ast::ClosureExpr::cast); + let ty = closure + .filter(|_| it.syntax().text_range().end() <= self_token.text_range().start()) + .and_then(|it| sema.type_of_expr(&it.into())); + ty.and_then(|ty| ty.original.as_callable(sema.db)) + .map(|c| (Some(c.return_type()), None)) + .unwrap_or((None, None)) + }, ast::Stmt(_) => (None, None), ast::Item(_) => (None, None), _ => { @@ -867,44 +891,53 @@ fn classify_name_ref<'db>( return Some(make_res(kind)); } + let field_expr_handle = |receiver, node| { + let receiver = find_opt_node_in_file(original_file, receiver); + let receiver_is_ambiguous_float_literal = match &receiver { + Some(ast::Expr::Literal(l)) => matches! { + l.kind(), + ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().is_some_and(|it| it.text().ends_with('.')) + }, + _ => false, + }; + + let receiver_is_part_of_indivisible_expression = match &receiver { + Some(ast::Expr::IfExpr(_)) => { + let next_token_kind = + next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind()); + next_token_kind == Some(SyntaxKind::ELSE_KW) + } + _ => false, + }; + if receiver_is_part_of_indivisible_expression { + return None; + } + + let mut receiver_ty = receiver.as_ref().and_then(|it| sema.type_of_expr(it)); + if receiver_is_ambiguous_float_literal { + // `123.|` is parsed as a float but should actually be an integer. + always!(receiver_ty.as_ref().is_none_or(|receiver_ty| receiver_ty.original.is_float())); + receiver_ty = + Some(TypeInfo { original: hir::BuiltinType::i32().ty(sema.db), adjusted: None }); + } + + let kind = NameRefKind::DotAccess(DotAccess { + receiver_ty, + kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal }, + receiver, + ctx: DotAccessExprCtx { + in_block_expr: is_in_block(node), + in_breakable: is_in_breakable(node).unzip().0, + }, + }); + Some(make_res(kind)) + }; + let segment = match_ast! { match parent { ast::PathSegment(segment) => segment, ast::FieldExpr(field) => { - let receiver = find_opt_node_in_file(original_file, field.expr()); - let receiver_is_ambiguous_float_literal = match &receiver { - Some(ast::Expr::Literal(l)) => matches! { - l.kind(), - ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().is_some_and(|it| it.text().ends_with('.')) - }, - _ => false, - }; - - let receiver_is_part_of_indivisible_expression = match &receiver { - Some(ast::Expr::IfExpr(_)) => { - let next_token_kind = next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind()); - next_token_kind == Some(SyntaxKind::ELSE_KW) - }, - _ => false - }; - if receiver_is_part_of_indivisible_expression { - return None; - } - - let mut receiver_ty = receiver.as_ref().and_then(|it| sema.type_of_expr(it)); - if receiver_is_ambiguous_float_literal { - // `123.|` is parsed as a float but should actually be an integer. - always!(receiver_ty.as_ref().is_none_or(|receiver_ty| receiver_ty.original.is_float())); - receiver_ty = Some(TypeInfo { original: hir::BuiltinType::i32().ty(sema.db), adjusted: None }); - } - - let kind = NameRefKind::DotAccess(DotAccess { - receiver_ty, - kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal }, - receiver, - ctx: DotAccessExprCtx { in_block_expr: is_in_block(field.syntax()), in_breakable: is_in_breakable(field.syntax()) } - }); - return Some(make_res(kind)); + return field_expr_handle(field.expr(), field.syntax()); }, ast::ExternCrate(_) => { let kind = NameRefKind::ExternCrate; @@ -913,11 +946,14 @@ fn classify_name_ref<'db>( ast::MethodCallExpr(method) => { let receiver = find_opt_node_in_file(original_file, method.receiver()); let has_parens = has_parens(&method); + if !has_parens && let Some(res) = field_expr_handle(method.receiver(), method.syntax()) { + return Some(res) + } let kind = NameRefKind::DotAccess(DotAccess { receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)), - kind: DotAccessKind::Method { has_parens }, + kind: DotAccessKind::Method, receiver, - ctx: DotAccessExprCtx { in_block_expr: is_in_block(method.syntax()), in_breakable: is_in_breakable(method.syntax()) } + ctx: DotAccessExprCtx { in_block_expr: is_in_block(method.syntax()), in_breakable: is_in_breakable(method.syntax()).unzip().0 } }); return Some(make_res(kind)); }, @@ -963,10 +999,6 @@ fn classify_name_ref<'db>( } } }; - let after_if_expr = |node: SyntaxNode| { - let prev_expr = prev_expr(node); - matches!(prev_expr, Some(ast::Expr::IfExpr(_))) - }; let after_incomplete_let = |node: SyntaxNode| { prev_expr(node).and_then(|it| it.syntax().parent()).and_then(ast::LetStmt::cast) }; @@ -980,6 +1012,25 @@ fn classify_name_ref<'db>( .and_then(|next| next.first_token()) .is_some_and(|token| token.kind() == SyntaxKind::ELSE_KW) }; + let is_in_value = |it: &SyntaxNode| { + let Some(node) = it.parent() else { return false }; + let kind = node.kind(); + ast::LetStmt::can_cast(kind) + || ast::ArgList::can_cast(kind) + || ast::ArrayExpr::can_cast(kind) + || ast::ParenExpr::can_cast(kind) + || ast::BreakExpr::can_cast(kind) + || ast::ReturnExpr::can_cast(kind) + || ast::PrefixExpr::can_cast(kind) + || ast::FormatArgsArg::can_cast(kind) + || ast::RecordExprField::can_cast(kind) + || ast::BinExpr::cast(node.clone()) + .and_then(|expr| expr.rhs()) + .is_some_and(|expr| expr.syntax() == it) + || ast::IndexExpr::cast(node) + .and_then(|expr| expr.index()) + .is_some_and(|expr| expr.syntax() == it) + }; // We do not want to generate path completions when we are sandwiched between an item decl signature and its body. // ex. trait Foo $0 {} @@ -1202,29 +1253,11 @@ fn classify_name_ref<'db>( Some(res) }; - fn is_in_condition(it: &ast::Expr) -> bool { - (|| { - let parent = it.syntax().parent()?; - if let Some(expr) = ast::WhileExpr::cast(parent.clone()) { - Some(expr.condition()? == *it) - } else if let Some(expr) = ast::IfExpr::cast(parent.clone()) { - Some(expr.condition()? == *it) - } else if let Some(expr) = ast::BinExpr::cast(parent) - && expr.op_token()?.kind() == T![&&] - { - Some(is_in_condition(&expr.into())) - } else { - None - } - })() - .unwrap_or(false) - } - let make_path_kind_expr = |expr: ast::Expr| { let it = expr.syntax(); let in_block_expr = is_in_block(it); - let in_loop_body = is_in_breakable(it); - let after_if_expr = after_if_expr(it.clone()); + let (in_loop_body, innermost_breakable) = is_in_breakable(it).unzip(); + let after_if_expr = is_after_if_expr(it.clone()); let ref_expr_parent = path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast); let after_amp = non_trivia_sibling(it.clone().into(), Direction::Prev) @@ -1277,18 +1310,25 @@ fn classify_name_ref<'db>( None => (None, None), } }; + let innermost_breakable_ty = innermost_breakable + .and_then(ast::Expr::cast) + .and_then(|expr| find_node_in_file_compensated(sema, original_file, &expr)) + .and_then(|expr| sema.type_of_expr(&expr)) + .map(|ty| if ty.original.is_never() { ty.adjusted() } else { ty.original() }); let is_func_update = func_update_record(it); let in_condition = is_in_condition(&expr); let after_incomplete_let = after_incomplete_let(it.clone()).is_some(); let incomplete_expr_stmt = it.parent().and_then(ast::ExprStmt::cast).map(|it| it.semicolon_token().is_none()); + let before_else_kw = before_else_kw(it); let incomplete_let = it .parent() .and_then(ast::LetStmt::cast) .is_some_and(|it| it.semicolon_token().is_none()) - || after_incomplete_let && incomplete_expr_stmt.unwrap_or(true) && !before_else_kw(it); - let in_value = it.parent().and_then(Either::<ast::LetStmt, ast::ArgList>::cast).is_some(); - let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax()); + || after_incomplete_let && incomplete_expr_stmt.unwrap_or(true) && !before_else_kw; + let in_value = is_in_value(it); + let impl_ = fetch_immediate_impl_or_trait(sema, original_file, expr.syntax()) + .and_then(Either::left); let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) { Some(arm) => arm @@ -1302,11 +1342,13 @@ fn classify_name_ref<'db>( in_block_expr, in_breakable: in_loop_body, after_if_expr, + before_else_kw, in_condition, ref_expr_parent, after_amp, is_func_update, innermost_ret_ty, + innermost_breakable_ty, self_param, in_value, incomplete_let, @@ -1586,6 +1628,7 @@ fn classify_name_ref<'db>( } } qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast); + qualifier_ctx.abi_node = error_node.children().find_map(ast::Abi::cast); } if let PathKind::Item { .. } = path_ctx.kind @@ -1593,7 +1636,7 @@ fn classify_name_ref<'db>( && let Some(t) = top.first_token() && let Some(prev) = t.prev_token().and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev)) - && ![T![;], T!['}'], T!['{']].contains(&prev.kind()) + && ![T![;], T!['}'], T!['{'], T![']']].contains(&prev.kind()) { // This was inferred to be an item position path, but it seems // to be part of some other broken node which leaked into an item @@ -1637,12 +1680,16 @@ fn pattern_context_for( let mut param_ctx = None; let mut missing_variants = vec![]; + let is_pat_like = |kind| { + ast::Pat::can_cast(kind) + || ast::RecordPatField::can_cast(kind) + || ast::RecordPatFieldList::can_cast(kind) + }; - let (refutability, has_type_ascription) = - pat + let (refutability, has_type_ascription) = pat .syntax() .ancestors() - .find(|it| !ast::Pat::can_cast(it.kind())) + .find(|it| !is_pat_like(it.kind())) .map_or((PatternRefutability::Irrefutable, false), |node| { let refutability = match_ast! { match node { @@ -1736,31 +1783,34 @@ fn pattern_context_for( param_ctx, has_type_ascription, should_suggest_name, + after_if_expr: is_after_if_expr(pat.syntax().clone()), parent_pat: pat.syntax().parent().and_then(ast::Pat::cast), mut_token, ref_token, record_pat: None, - impl_: fetch_immediate_impl(sema, original_file, pat.syntax()), + impl_or_trait: fetch_immediate_impl_or_trait(sema, original_file, pat.syntax()), missing_variants, } } -fn fetch_immediate_impl( +fn fetch_immediate_impl_or_trait( sema: &Semantics<'_, RootDatabase>, original_file: &SyntaxNode, node: &SyntaxNode, -) -> Option<ast::Impl> { +) -> Option<Either<ast::Impl, ast::Trait>> { let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)? .filter_map(ast::Item::cast) .filter(|it| !matches!(it, ast::Item::MacroCall(_))); match ancestors.next()? { ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (), - ast::Item::Impl(it) => return Some(it), + ast::Item::Impl(it) => return Some(Either::Left(it)), + ast::Item::Trait(it) => return Some(Either::Right(it)), _ => return None, } match ancestors.next()? { - ast::Item::Impl(it) => Some(it), + ast::Item::Impl(it) => Some(Either::Left(it)), + ast::Item::Trait(it) => Some(Either::Right(it)), _ => None, } } @@ -1854,32 +1904,68 @@ fn is_in_token_of_for_loop(path: &ast::Path) -> bool { .unwrap_or(false) } -fn is_in_breakable(node: &SyntaxNode) -> BreakableKind { +fn is_in_breakable(node: &SyntaxNode) -> Option<(BreakableKind, SyntaxNode)> { node.ancestors() .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR) .find_map(|it| { let (breakable, loop_body) = match_ast! { match it { - ast::ForExpr(it) => (BreakableKind::For, it.loop_body()), - ast::WhileExpr(it) => (BreakableKind::While, it.loop_body()), - ast::LoopExpr(it) => (BreakableKind::Loop, it.loop_body()), - ast::BlockExpr(it) => return it.label().map(|_| BreakableKind::Block), + ast::ForExpr(it) => (BreakableKind::For, it.loop_body()?), + ast::WhileExpr(it) => (BreakableKind::While, it.loop_body()?), + ast::LoopExpr(it) => (BreakableKind::Loop, it.loop_body()?), + ast::BlockExpr(it) => return it.label().map(|_| (BreakableKind::Block, it.syntax().clone())), _ => return None, } }; - loop_body - .filter(|it| it.syntax().text_range().contains_range(node.text_range())) - .map(|_| breakable) + loop_body.syntax().text_range().contains_range(node.text_range()) + .then_some((breakable, it)) }) - .unwrap_or(BreakableKind::None) } fn is_in_block(node: &SyntaxNode) -> bool { + if has_in_newline_expr_first(node) { + return true; + }; node.parent() .map(|node| ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())) .unwrap_or(false) } +/// Similar to `has_parens`, heuristic sensing incomplete statement before ambiguous `Expr` +/// +/// Heuristic: +/// +/// If the `PathExpr` is left part of the `Expr` and there is a newline after the `PathExpr`, +/// it is considered that the `PathExpr` is not part of the `Expr`. +fn has_in_newline_expr_first(node: &SyntaxNode) -> bool { + if ast::PathExpr::can_cast(node.kind()) + && let Some(NodeOrToken::Token(next)) = node.next_sibling_or_token() + && next.kind() == SyntaxKind::WHITESPACE + && next.text().contains('\n') + && let Some(stmt_like) = node + .ancestors() + .take_while(|it| it.text_range().start() == node.text_range().start()) + .filter_map(Either::<ast::ExprStmt, ast::Expr>::cast) + .last() + { + stmt_like.syntax().parent().and_then(ast::StmtList::cast).is_some() + } else { + false + } +} + +fn is_after_if_expr(node: SyntaxNode) -> bool { + let node = match node.parent().and_then(Either::<ast::ExprStmt, ast::MatchArm>::cast) { + Some(stmt) => stmt.syntax().clone(), + None => node, + }; + let prev_sibling = + non_trivia_sibling(node.into(), Direction::Prev).and_then(NodeOrToken::into_node); + iter::successors(prev_sibling, |it| it.last_child_or_token()?.into_node()) + .find_map(ast::IfExpr::cast) + .is_some() +} + fn next_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> { let mut token = match e.into() { SyntaxElement::Node(n) => n.last_token()?, @@ -1922,6 +2008,7 @@ fn prev_special_biased_token_at_trivia(mut token: SyntaxToken) -> SyntaxToken { | T![|=] | T![&=] | T![^=] + | T![|] | T![return] | T![break] | T![continue] = prev.kind() |