Unnamed repository; edit this file 'description' to name the repository.
Diffstat (limited to 'crates/ide-completion/src/context/analysis.rs')
-rw-r--r--crates/ide-completion/src/context/analysis.rs19
1 files changed, 12 insertions, 7 deletions
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 3c4d489c0f..f5a50ae819 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -5,7 +5,7 @@ use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant};
use ide_db::{active_parameter::ActiveParameter, RootDatabase};
use itertools::Either;
use syntax::{
- algo::{ancestors_at_offset, find_node_at_offset, non_trivia_sibling},
+ algo::{self, ancestors_at_offset, find_node_at_offset, non_trivia_sibling},
ast::{
self, AttrKind, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName,
NameOrNameRef,
@@ -85,6 +85,11 @@ pub(super) fn expand_and_analyze(
})
}
+fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Option<SyntaxToken> {
+ let token = file.token_at_offset(offset).left_biased()?;
+ algo::skip_whitespace_token(token, Direction::Prev)
+}
+
/// Expand attributes and macro calls at the current cursor position for both the original file
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
/// and speculative states stay in sync.
@@ -125,9 +130,7 @@ fn expand(
// Left biased since there may already be an identifier token there, and we appended to it.
if !sema.might_be_inside_macro_call(&fake_ident_token)
- && original_file
- .token_at_offset(original_offset + relative_offset)
- .left_biased()
+ && token_at_offset_ignore_whitespace(&original_file, original_offset + relative_offset)
.is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
{
// Recursion base case.
@@ -143,9 +146,11 @@ fn expand(
let parent_item =
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
+ let original_node = token_at_offset_ignore_whitespace(&original_file, original_offset)
+ .and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
let ancestor_items = iter::successors(
Option::zip(
- find_node_at_offset::<ast::Item>(&original_file, original_offset),
+ original_node,
find_node_at_offset::<ast::Item>(
&speculative_file,
fake_ident_token.text_range().start(),
@@ -1590,11 +1595,11 @@ fn pattern_context_for(
}).map(|enum_| enum_.variants(sema.db))
})
}).map(|variants| variants.iter().filter_map(|variant| {
- let variant_name = variant.name(sema.db).unescaped().display(sema.db).to_string();
+ let variant_name = variant.name(sema.db);
let variant_already_present = match_arm_list.arms().any(|arm| {
arm.pat().and_then(|pat| {
- let pat_already_present = pat.syntax().to_string().contains(&variant_name);
+ let pat_already_present = pat.syntax().to_string().contains(variant_name.as_str());
pat_already_present.then_some(pat_already_present)
}).is_some()
});