Unnamed repository; edit this file 'description' to name the repository.
Improve macro descension API
Lukas Wirth 2023-12-05
parent 986577f · commit 5b8e386
-rw-r--r--crates/hir-expand/src/lib.rs2
-rw-r--r--crates/hir/src/lib.rs4
-rw-r--r--crates/hir/src/semantics.rs176
-rw-r--r--crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs9
-rw-r--r--crates/ide-assists/src/handlers/extract_function.rs8
-rw-r--r--crates/ide-db/src/helpers.rs4
-rw-r--r--crates/ide-db/src/search.rs8
-rw-r--r--crates/ide/src/call_hierarchy.rs4
-rw-r--r--crates/ide/src/doc_links.rs8
-rw-r--r--crates/ide/src/expand_macro.rs8
-rw-r--r--crates/ide/src/extend_selection.rs19
-rw-r--r--crates/ide/src/goto_declaration.rs4
-rw-r--r--crates/ide/src/goto_definition.rs4
-rw-r--r--crates/ide/src/goto_implementation.rs4
-rw-r--r--crates/ide/src/goto_type_definition.rs3
-rw-r--r--crates/ide/src/highlight_related.rs4
-rw-r--r--crates/ide/src/hover.rs12
-rw-r--r--crates/ide/src/moniker.rs4
-rw-r--r--crates/ide/src/references.rs4
-rw-r--r--crates/ide/src/signature_help.rs7
-rw-r--r--crates/ide/src/syntax_highlighting.rs18
21 files changed, 177 insertions, 137 deletions
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 602babcc99..71c98b2770 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -629,8 +629,6 @@ impl ExpansionInfo {
pub fn map_range_down<'a>(
&'a self,
span: SpanData,
- // FIXME: use this for range mapping, so that we can resolve inline format args
- _relative_token_offset: Option<TextSize>,
) -> Option<impl Iterator<Item = InMacroFile<SyntaxToken>> + 'a> {
let tokens = self
.exp_map
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 22e14b6181..53e60c5862 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -92,7 +92,9 @@ pub use crate::{
attrs::{resolve_doc_path_on, HasAttrs},
diagnostics::*,
has_source::HasSource,
- semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
+ semantics::{
+ DescendPreference, PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits,
+ },
};
// Be careful with these re-exports.
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index ed3d3f1a3b..7d3c89ddb6 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -2,7 +2,11 @@
mod source_to_def;
-use std::{cell::RefCell, fmt, iter, mem, ops};
+use std::{
+ cell::RefCell,
+ fmt, iter, mem,
+ ops::{self, ControlFlow},
+};
use base_db::{FileId, FileRange};
use either::Either;
@@ -39,6 +43,12 @@ use crate::{
TypeAlias, TypeParam, VariantDef,
};
+pub enum DescendPreference {
+ SameText,
+ SameKind,
+ None,
+}
+
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PathResolution {
/// An item
@@ -397,6 +407,7 @@ impl<'db> SemanticsImpl<'db> {
// This might not be the correct way to do this, but it works for now
let mut res = smallvec![];
let tokens = (|| {
+ // FIXME: the trivia skipping should not be necessary
let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
Some((first, last))
@@ -407,18 +418,19 @@ impl<'db> SemanticsImpl<'db> {
};
if first == last {
+ // node is just the token, so descend the token
self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
if let Some(node) = value.parent_ancestors().find_map(N::cast) {
res.push(node)
}
- false
+ ControlFlow::Continue(())
});
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(first, 0.into(), &mut |token| {
scratch.push(token);
- false
+ ControlFlow::Continue(())
});
let mut scratch = scratch.into_iter();
@@ -441,7 +453,7 @@ impl<'db> SemanticsImpl<'db> {
}
}
}
- false
+ ControlFlow::Continue(())
},
);
}
@@ -453,32 +465,43 @@ impl<'db> SemanticsImpl<'db> {
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self,
+ mode: DescendPreference,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
- let mut res = smallvec![];
- self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
- res.push(value);
- false
- });
- res
- }
-
- /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
- ///
- /// Returns the original non descended token if none of the mapped counterparts have the same text.
- pub fn descend_into_macros_with_same_text(
- &self,
- token: SyntaxToken,
- offset: TextSize,
- ) -> SmallVec<[SyntaxToken; 1]> {
- let text = token.text();
+ enum Dp<'t> {
+ SameText(&'t str),
+ SameKind(SyntaxKind),
+ None,
+ }
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let mode = match mode {
+ DescendPreference::SameText => Dp::SameText(token.text()),
+ DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
+ DescendPreference::None => Dp::None,
+ };
let mut res = smallvec![];
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
- if value.text() == text {
+ let is_a_match = match mode {
+ Dp::SameText(text) => value.text() == text,
+ Dp::SameKind(preferred_kind) => {
+ let kind = fetch_kind(&value);
+ kind == preferred_kind
+ // special case for derive macros
+ || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
+ }
+ Dp::None => true,
+ };
+ if is_a_match {
res.push(value);
}
- false
+ ControlFlow::Continue(())
});
if res.is_empty() {
res.push(token);
@@ -486,45 +509,48 @@ impl<'db> SemanticsImpl<'db> {
res
}
- pub fn descend_into_macros_with_kind_preference(
+ pub fn descend_into_macros_single(
&self,
+ mode: DescendPreference,
token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken {
+ enum Dp<'t> {
+ SameText(&'t str),
+ SameKind(SyntaxKind),
+ None,
+ }
let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() {
- kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
- node.parent().map_or(kind, |it| it.kind())
- }
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
_ => token.kind(),
},
None => token.kind(),
};
- let preferred_kind = fetch_kind(&token);
- let mut res = None;
+ let mode = match mode {
+ DescendPreference::SameText => Dp::SameText(token.text()),
+ DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
+ DescendPreference::None => Dp::None,
+ };
+ let mut res = token.clone();
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
- if fetch_kind(&value) == preferred_kind {
- res = Some(value);
- true
- } else {
- if let None = res {
- res = Some(value)
+ let is_a_match = match mode {
+ Dp::SameText(text) => value.text() == text,
+ Dp::SameKind(preferred_kind) => {
+ let kind = fetch_kind(&value);
+ kind == preferred_kind
+ // special case for derive macros
+ || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
}
- false
+ Dp::None => true,
+ };
+ if is_a_match {
+ res = value;
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(())
}
});
- res.unwrap_or(token)
- }
-
- /// Descend the token into its macro call if it is part of one, returning the token in the
- /// expansion that it is associated with. If `offset` points into the token's range, it will
- /// be considered for the mapping in case of inline format args.
- pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
- let mut res = token.clone();
- self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
- res = value;
- true
- });
res
}
@@ -535,7 +561,7 @@ impl<'db> SemanticsImpl<'db> {
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
// mapping, specifically for node downmapping
_offset: TextSize,
- f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
// FIXME: Clean this up
let _p = profile::span("descend_into_macros");
@@ -560,25 +586,24 @@ impl<'db> SemanticsImpl<'db> {
let def_map = sa.resolver.def_map();
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
- let mut process_expansion_for_token =
- |stack: &mut SmallVec<_>, macro_file, _token: InFile<&_>| {
- let expansion_info = cache
- .entry(macro_file)
- .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
+ let mut process_expansion_for_token = |stack: &mut SmallVec<_>, macro_file| {
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
- {
- let InFile { file_id, value } = expansion_info.expanded();
- self.cache(value, file_id);
- }
+ {
+ let InFile { file_id, value } = expansion_info.expanded();
+ self.cache(value, file_id);
+ }
- let mapped_tokens = expansion_info.map_range_down(span, None)?;
- let len = stack.len();
+ let mapped_tokens = expansion_info.map_range_down(span)?;
+ let len = stack.len();
- // requeue the tokens we got from mapping our current token down
- stack.extend(mapped_tokens.map(Into::into));
- // if the length changed we have found a mapping for the token
- (stack.len() != len).then_some(())
- };
+ // requeue the tokens we got from mapping our current token down
+ stack.extend(mapped_tokens.map(Into::into));
+ // if the length changed we have found a mapping for the token
+ (stack.len() != len).then_some(())
+ };
// Remap the next token in the queue into a macro call its in, if it is not being remapped
// either due to not being in a macro-call or because its unused push it into the result vec,
@@ -598,7 +623,7 @@ impl<'db> SemanticsImpl<'db> {
});
if let Some(call_id) = containing_attribute_macro_call {
let file_id = call_id.as_macro_file();
- return process_expansion_for_token(&mut stack, file_id, token.as_ref());
+ return process_expansion_for_token(&mut stack, file_id);
}
// Then check for token trees, that means we are either in a function-like macro or
@@ -624,7 +649,7 @@ impl<'db> SemanticsImpl<'db> {
it
}
};
- process_expansion_for_token(&mut stack, file_id, token.as_ref())
+ process_expansion_for_token(&mut stack, file_id)
} else if let Some(meta) = ast::Meta::cast(parent) {
// attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute
@@ -646,11 +671,7 @@ impl<'db> SemanticsImpl<'db> {
Some(call_id) => {
// resolved to a derive
let file_id = call_id.as_macro_file();
- return process_expansion_for_token(
- &mut stack,
- file_id,
- token.as_ref(),
- );
+ return process_expansion_for_token(&mut stack, file_id);
}
None => Some(adt),
}
@@ -682,11 +703,8 @@ impl<'db> SemanticsImpl<'db> {
def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
let mut res = None;
for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
- res = res.or(process_expansion_for_token(
- &mut stack,
- derive.as_macro_file(),
- token.as_ref(),
- ));
+ res =
+ res.or(process_expansion_for_token(&mut stack, derive.as_macro_file()));
}
res
} else {
@@ -695,7 +713,7 @@ impl<'db> SemanticsImpl<'db> {
})()
.is_none();
- if was_not_remapped && f(token) {
+ if was_not_remapped && f(token).is_break() {
break;
}
}
@@ -711,7 +729,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset)
- .map(move |token| self.descend_into_macros(token, offset))
+ .map(move |token| self.descend_into_macros(DescendPreference::None, token, offset))
.map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
})
diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
index 31a1ff496e..55f2fd9f6c 100644
--- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
+++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
@@ -1,4 +1,5 @@
use crate::{AssistContext, Assists};
+use hir::DescendPreference;
use ide_db::{
assists::{AssistId, AssistKind},
syntax_helpers::{
@@ -34,9 +35,11 @@ pub(crate) fn extract_expressions_from_format_string(
let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
- let expanded_t = ast::String::cast(
- ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()),
- )?;
+ let expanded_t = ast::String::cast(ctx.sema.descend_into_macros_single(
+ DescendPreference::SameKind,
+ fmt_string.syntax().clone(),
+ 0.into(),
+ ))?;
if !is_format_string(&expanded_t) {
return None;
}
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index 6b48d15881..9b892ac1e9 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -3,8 +3,8 @@ use std::iter;
use ast::make;
use either::Either;
use hir::{
- HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics,
- TypeInfo, TypeParam,
+ DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef,
+ PathResolution, Semantics, TypeInfo, TypeParam,
};
use ide_db::{
defs::{Definition, NameRefClass},
@@ -751,7 +751,9 @@ impl FunctionBody {
.descendants_with_tokens()
.filter_map(SyntaxElement::into_token)
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
- .flat_map(|t| sema.descend_into_macros(t, 0.into()))
+ .flat_map(|t| {
+ sema.descend_into_macros(DescendPreference::None, t, 0.into())
+ })
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
}
}
diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs
index 330af442f7..d4b031879d 100644
--- a/crates/ide-db/src/helpers.rs
+++ b/crates/ide-db/src/helpers.rs
@@ -3,7 +3,7 @@
use std::collections::VecDeque;
use base_db::{FileId, SourceDatabaseExt};
-use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics};
+use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics};
use syntax::{
ast::{self, make},
AstToken, SyntaxKind, SyntaxToken, TokenAtOffset,
@@ -117,7 +117,7 @@ pub fn get_definition(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
) -> Option<Definition> {
- for token in sema.descend_into_macros(token, 0.into()) {
+ for token in sema.descend_into_macros(DescendPreference::None, token, 0.into()) {
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() {
return Some(x);
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 2ce036c044..c1ed17503f 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -8,8 +8,8 @@ use std::mem;
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{
- AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, InRealFile, ModuleSource,
- Semantics, Visibility,
+ AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile,
+ InRealFile, ModuleSource, Semantics, Visibility,
};
use memchr::memmem::Finder;
use nohash_hasher::IntMap;
@@ -467,7 +467,9 @@ impl<'a> FindUsages<'a> {
// every textual hit. That function is notoriously
// expensive even for things that do not get down mapped
// into macros.
- sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent())
+ sema.descend_into_macros(DescendPreference::None, token, offset)
+ .into_iter()
+ .filter_map(|it| it.parent())
})
};
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs
index 6f41f51f80..70391cd847 100644
--- a/crates/ide/src/call_hierarchy.rs
+++ b/crates/ide/src/call_hierarchy.rs
@@ -1,6 +1,6 @@
//! Entry point for call-hierarchy
-use hir::Semantics;
+use hir::{DescendPreference, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
@@ -87,7 +87,7 @@ pub(crate) fn outgoing_calls(
})?;
let mut calls = CallLocations::default();
- sema.descend_into_macros(token, offset)
+ sema.descend_into_macros(DescendPreference::None, token, offset)
.into_iter()
.filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
.filter_map(|item| match item {
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index ac15b6aba6..97fa7dee30 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -12,7 +12,9 @@ use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions
use stdx::format_to;
use url::Url;
-use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs};
+use hir::{
+ db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, DescendPreference, HasAttrs,
+};
use ide_db::{
base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase},
defs::{Definition, NameClass, NameRefClass},
@@ -144,7 +146,7 @@ pub(crate) fn external_docs(
kind if kind.is_trivia() => 0,
_ => 1,
})?;
- let token = sema.descend_into_macros_single(token, offset);
+ let token = sema.descend_into_macros_single(DescendPreference::None, token, offset);
let node = token.parent()?;
let definition = match_ast! {
@@ -286,7 +288,7 @@ impl DocCommentToken {
let original_start = doc_token.text_range().start();
let relative_comment_offset = offset - original_start - prefix_len;
- sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| {
+ sema.descend_into_macros(DescendPreference::None,doc_token, offset).into_iter().find_map(|t| {
let (node, descended_prefix_len) = match_ast! {
match t {
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index a70f335ada..cc878dc719 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -1,4 +1,4 @@
-use hir::{HirFileIdExt, InFile, Semantics};
+use hir::{DescendPreference, HirFileIdExt, InFile, Semantics};
use ide_db::{
base_db::FileId, helpers::pick_best_token,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
@@ -40,8 +40,10 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
// struct Bar;
// ```
- let derive =
- sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| {
+ let derive = sema
+ .descend_into_macros(DescendPreference::None, tok.clone(), 0.into())
+ .into_iter()
+ .find_map(|descended| {
let hir_file = sema.hir_file_for(&descended.parent()?);
if !hir_file.is_derive_attr_pseudo_expansion(db) {
return None;
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs
index 9b2ff070c7..1cdbf7840e 100644
--- a/crates/ide/src/extend_selection.rs
+++ b/crates/ide/src/extend_selection.rs
@@ -1,6 +1,6 @@
use std::iter::successors;
-use hir::Semantics;
+use hir::{DescendPreference, Semantics};
use ide_db::RootDatabase;
use syntax::{
algo::{self, skip_trivia_token},
@@ -140,10 +140,16 @@ fn extend_tokens_from_range(
// compute original mapped token range
let extended = {
- let fst_expanded =
- sema.descend_into_macros_single(first_token.clone(), original_range.start());
- let lst_expanded =
- sema.descend_into_macros_single(last_token.clone(), original_range.end());
+ let fst_expanded = sema.descend_into_macros_single(
+ DescendPreference::None,
+ first_token.clone(),
+ original_range.start(),
+ );
+ let lst_expanded = sema.descend_into_macros_single(
+ DescendPreference::None,
+ last_token.clone(),
+ original_range.end(),
+ );
let mut lca =
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
lca = shallowest_node(&lca);
@@ -157,7 +163,8 @@ fn extend_tokens_from_range(
let validate = |offset: TextSize| {
let extended = &extended;
move |token: &SyntaxToken| -> bool {
- let expanded = sema.descend_into_macros_single(token.clone(), offset);
+ let expanded =
+ sema.descend_into_macros_single(DescendPreference::None, token.clone(), offset);
let parent = match expanded.parent() {
Some(it) => it,
None => return false,
diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs
index 7e0fab4260..ee94dff5fb 100644
--- a/crates/ide/src/goto_declaration.rs
+++ b/crates/ide/src/goto_declaration.rs
@@ -1,4 +1,4 @@
-use hir::{AsAssocItem, Semantics};
+use hir::{AsAssocItem, DescendPreference, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
RootDatabase,
@@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
let range = original_token.text_range();
let info: Vec<NavigationTarget> = sema
- .descend_into_macros(original_token, offset)
+ .descend_into_macros(DescendPreference::None, original_token, offset)
.iter()
.filter_map(|token| {
let parent = token.parent()?;
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index 816f1bebee..635f826862 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -4,7 +4,7 @@ use crate::{
doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
RangeInfo, TryToNav,
};
-use hir::{AsAssocItem, AssocItem, Semantics};
+use hir::{AsAssocItem, AssocItem, DescendPreference, Semantics};
use ide_db::{
base_db::{AnchoredPath, FileId, FileLoader},
defs::{Definition, IdentClass},
@@ -56,7 +56,7 @@ pub(crate) fn goto_definition(
});
}
let navs = sema
- .descend_into_macros(original_token.clone(), offset)
+ .descend_into_macros(DescendPreference::None, original_token.clone(), offset)
.into_iter()
.filter_map(|token| {
let parent = token.parent()?;
diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs
index 3593c5c7dd..6c1b9966a8 100644
--- a/crates/ide/src/goto_implementation.rs
+++ b/crates/ide/src/goto_implementation.rs
@@ -1,4 +1,4 @@
-use hir::{AsAssocItem, Impl, Semantics};
+use hir::{AsAssocItem, DescendPreference, Impl, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
@@ -34,7 +34,7 @@ pub(crate) fn goto_implementation(
})?;
let range = original_token.text_range();
let navs =
- sema.descend_into_macros(original_token, offset)
+ sema.descend_into_macros(DescendPreference::None, original_token, offset)
.into_iter()
.filter_map(|token| token.parent().and_then(ast::NameLike::cast))
.filter_map(|node| match &node {
diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs
index 955923d769..4bd8bfdacb 100644
--- a/crates/ide/src/goto_type_definition.rs
+++ b/crates/ide/src/goto_type_definition.rs
@@ -1,3 +1,4 @@
+use hir::DescendPreference;
use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase};
use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T};
@@ -37,7 +38,7 @@ pub(crate) fn goto_type_definition(
}
};
let range = token.text_range();
- sema.descend_into_macros(token, offset)
+ sema.descend_into_macros(DescendPreference::None,token, offset)
.into_iter()
.filter_map(|token| {
let ty = sema
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index a7f5ae92a4..620e59a71b 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -1,4 +1,4 @@
-use hir::Semantics;
+use hir::{DescendPreference, Semantics};
use ide_db::{
base_db::{FileId, FilePosition, FileRange},
defs::{Definition, IdentClass},
@@ -461,7 +461,7 @@ fn find_defs(
token: SyntaxToken,
offset: TextSize,
) -> FxHashSet<Definition> {
- sema.descend_into_macros(token, offset)
+ sema.descend_into_macros(DescendPreference::None, token, offset)
.into_iter()
.filter_map(|token| IdentClass::classify_token(sema, &token))
.map(IdentClass::definitions_no_ops)
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index e0b64fe798..7f2783df3d 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -6,7 +6,7 @@ mod tests;
use std::iter;
use either::Either;
-use hir::{db::DefDatabase, HasSource, LangItem, Semantics};
+use hir::{db::DefDatabase, DescendPreference, HasSource, LangItem, Semantics};
use ide_db::{
base_db::FileRange,
defs::{Definition, IdentClass, NameRefClass, OperatorClass},
@@ -161,11 +161,11 @@ fn hover_simple(
// prefer descending the same token kind in attribute expansions, in normal macros text
// equivalency is more important
- let descended = if in_attr {
- [sema.descend_into_macros_with_kind_preference(original_token.clone(), offset)].into()
- } else {
- sema.descend_into_macros_with_same_text(original_token.clone(), offset)
- };
+ let descended = sema.descend_into_macros(
+ if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText },
+ original_token.clone(),
+ offset,
+ );
let descended = || descended.iter();
let result = descended()
diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs
index 2ca2b5b1d5..28d455f4e5 100644
--- a/crates/ide/src/moniker.rs
+++ b/crates/ide/src/moniker.rs
@@ -1,7 +1,7 @@
//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
//! for LSIF and LSP.
-use hir::{AsAssocItem, AssocItemContainer, Crate, Semantics};
+use hir::{AsAssocItem, AssocItemContainer, Crate, DescendPreference, Semantics};
use ide_db::{
base_db::{CrateOrigin, FilePosition, LangCrateOrigin},
defs::{Definition, IdentClass},
@@ -99,7 +99,7 @@ pub(crate) fn moniker(
});
}
let navs = sema
- .descend_into_macros(original_token.clone(), offset)
+ .descend_into_macros(DescendPreference::None, original_token.clone(), offset)
.into_iter()
.filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index f387bbf6b0..2285ab1993 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -9,7 +9,7 @@
//! at the index that the match starts at and its tree parent is
//! resolved to the search element definition, we get a reference.
-use hir::{PathResolution, Semantics};
+use hir::{DescendPreference, PathResolution, Semantics};
use ide_db::{
base_db::FileId,
defs::{Definition, NameClass, NameRefClass},
@@ -126,7 +126,7 @@ pub(crate) fn find_defs<'a>(
)
});
token.map(|token| {
- sema.descend_into_macros_with_same_text(token, offset)
+ sema.descend_into_macros(DescendPreference::SameText, token, offset)
.into_iter()
.filter_map(|it| ast::NameLike::cast(it.parent()?))
.filter_map(move |name_like| {
diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs
index e020b52e17..c95f999461 100644
--- a/crates/ide/src/signature_help.rs
+++ b/crates/ide/src/signature_help.rs
@@ -4,7 +4,10 @@
use std::collections::BTreeSet;
use either::Either;
-use hir::{AssocItem, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait};
+use hir::{
+ AssocItem, DescendPreference, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics,
+ Trait,
+};
use ide_db::{
active_parameter::{callable_for_node, generic_def_for_node},
base_db::FilePosition,
@@ -79,7 +82,7 @@ pub(crate) fn signature_help(
// if the cursor is sandwiched between two space tokens and the call is unclosed
// this prevents us from leaving the CallExpression
.and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
- let token = sema.descend_into_macros_single(token, offset);
+ let token = sema.descend_into_macros_single(DescendPreference::None, token, offset);
for node in token.parent_ancestors() {
match_ast! {
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index dd72484b38..b2db6bb5c4 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -13,7 +13,7 @@ mod html;
#[cfg(test)]
mod tests;
-use hir::{Name, Semantics};
+use hir::{DescendPreference, Name, Semantics};
use ide_db::{FxHashMap, RootDatabase, SymbolKind};
use syntax::{
ast::{self, IsString},
@@ -393,14 +393,14 @@ fn traverse(
// Attempt to descend tokens into macro-calls.
let res = match element {
NodeOrToken::Token(token) if token.kind() != COMMENT => {
- let token = match attr_or_derive_item {
- Some(AttrOrDerive::Attr(_)) => {
- sema.descend_into_macros_with_kind_preference(token, 0.into())
- }
- Some(AttrOrDerive::Derive(_)) | None => {
- sema.descend_into_macros_single(token, 0.into())
- }
- };
+ let token = sema.descend_into_macros_single(
+ match attr_or_derive_item {
+ Some(AttrOrDerive::Attr(_)) => DescendPreference::SameKind,
+ Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None,
+ },
+ token,
+ 0.into(),
+ );
match token.parent().and_then(ast::NameLike::cast) {
// Remap the token into the wrapping single token nodes
Some(parent) => match (token.kind(), parent.syntax().kind()) {