Unnamed repository; edit this file 'description' to name the repository.
Diffstat (limited to 'crates/hir/src/semantics.rs')
-rw-r--r--crates/hir/src/semantics.rs106
1 files changed, 50 insertions, 56 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 7dc84a0353..a869029d09 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -20,12 +20,12 @@ use hir_def::{
};
use hir_expand::{
attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
- HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
+ InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
-use span::Span;
+use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
@@ -132,6 +132,7 @@ pub struct SemanticsImpl<'db> {
/// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
+ // So we might wanna move them out into something specific for semantic highlighting
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
@@ -620,47 +621,47 @@ impl<'db> SemanticsImpl<'db> {
let parent = token.parent()?;
let file_id = self.find_file(&parent).file_id.file_id()?;
+ let mut cache = self.expansion_info_cache.borrow_mut();
+
// iterate related crates and find all include! invocations that include_file_id matches
for (invoc, _) in self
.db
.relevant_crates(file_id)
.iter()
.flat_map(|krate| self.db.include_macro_invoc(*krate))
- .filter(|(_, include_file_id)| *include_file_id == file_id)
+ .filter(|&(_, include_file_id)| include_file_id == file_id)
{
- // find file_id which original calls include!
- let Some(callnode) = invoc.as_file().original_call_node(self.db.upcast()) else {
- continue;
- };
-
- // call .parse to avoid panic in .find_file
- let _ = self.parse(callnode.file_id);
- let Some(sa) = self.analyze_no_infer(&callnode.value) else { continue };
-
- let expinfo = invoc.as_macro_file().expansion_info(self.db.upcast());
- {
- let InMacroFile { file_id, value } = expinfo.expanded();
- self.cache(value, file_id.into());
- }
+ let macro_file = invoc.as_macro_file();
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
- // map token to the corresponding span in include! macro file
- let Some((_, span)) =
- expinfo.exp_map.iter().find(|(_, x)| x.range == token.text_range())
+ // Create the source analyzer for the macro call scope
+ let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
else {
continue;
};
+ {
+ let InMacroFile { file_id: macro_file, value } = expansion_info.expanded();
+ self.cache(value, macro_file.into());
+ }
// get mapped token in the include! macro file
- let Some(InMacroFile { file_id: _, value: mapped_tokens }) =
- expinfo.map_range_down(span)
+ let span = span::SpanData {
+ range: token.text_range(),
+ anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
+ ctx: SyntaxContextId::ROOT,
+ };
+ let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
+ expansion_info.map_range_down(span)
else {
continue;
};
// if we find one, then return
- if let Some(t) = mapped_tokens.into_iter().next() {
- return Some((sa, invoc.as_file(), t, span));
- };
+ if let Some(t) = mapped_tokens.next() {
+ return Some((sa, file_id.into(), t, span));
+ }
}
None
@@ -672,44 +673,37 @@ impl<'db> SemanticsImpl<'db> {
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros");
- let mut include_macro_file_id_and_span = None;
- let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
- Some(it) => it,
- None => {
- // if we cannot find a source analyzer for this token, then we try to find out whether this file is included from other file
- let Some((it, macro_file_id, mapped_token, s)) = self.is_from_include_file(token)
- else {
- return;
- };
-
- include_macro_file_id_and_span = Some((macro_file_id, s));
- token = mapped_token;
- it
- }
- };
-
- let span = if let Some((_, s)) = include_macro_file_id_and_span {
- s
- } else {
- match sa.file_id.file_id() {
- Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
+ let (sa, span, file_id) =
+ match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
+ Some(sa) => match sa.file_id.file_id() {
+ Some(file_id) => (
+ sa,
+ self.db.real_span_map(file_id).span_for_range(token.text_range()),
+ file_id.into(),
+ ),
+ None => {
+ stdx::never!();
+ return;
+ }
+ },
None => {
- stdx::never!();
- return;
+ // if we cannot find a source analyzer for this token, then we try to find out
+ // whether this file is an included file and treat that as the include input
+ let Some((it, macro_file_id, mapped_token, s)) =
+ self.is_from_include_file(token)
+ else {
+ return;
+ };
+ token = mapped_token;
+ (it, s, macro_file_id)
}
- }
- };
+ };
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map();
- let mut stack: Vec<(_, SmallVec<[_; 2]>)> =
- if let Some((macro_file_id, _)) = include_macro_file_id_and_span {
- vec![(macro_file_id, smallvec![token])]
- } else {
- vec![(sa.file_id, smallvec![token])]
- };
+ let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let expansion_info = cache