Unnamed repository; edit this file 'description' to name the repository.
Diffstat (limited to 'crates/hir/src/semantics.rs')
-rw-r--r--crates/hir/src/semantics.rs107
1 files changed, 90 insertions, 17 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index fdb94a6d5a..a869029d09 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -25,6 +25,7 @@ use hir_expand::{
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
+use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
@@ -131,6 +132,7 @@ pub struct SemanticsImpl<'db> {
/// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
+ // So we might wanna move them out into something specific for semantic highlighting
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
@@ -607,29 +609,102 @@ impl<'db> SemanticsImpl<'db> {
res
}
- fn descend_into_macros_impl(
+ // return:
+ // SourceAnalyzer(file_id that original call include!)
+ // macro file id
+ // token in include! macro mapped from token in params
+ // span for the mapped token
+ fn is_from_include_file(
&self,
token: SyntaxToken,
- f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
- ) {
- let _p = profile::span("descend_into_macros");
- let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
- Some(it) => it,
- None => return,
- };
+ ) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> {
+ let parent = token.parent()?;
+ let file_id = self.find_file(&parent).file_id.file_id()?;
+
+ let mut cache = self.expansion_info_cache.borrow_mut();
+
+ // iterate related crates and find all include! invocations that include_file_id matches
+ for (invoc, _) in self
+ .db
+ .relevant_crates(file_id)
+ .iter()
+ .flat_map(|krate| self.db.include_macro_invoc(*krate))
+ .filter(|&(_, include_file_id)| include_file_id == file_id)
+ {
+ let macro_file = invoc.as_macro_file();
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
- let span = match sa.file_id.file_id() {
- Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
- None => {
- stdx::never!();
- return;
+ // Create the source analyzer for the macro call scope
+ let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
+ else {
+ continue;
+ };
+ {
+ let InMacroFile { file_id: macro_file, value } = expansion_info.expanded();
+ self.cache(value, macro_file.into());
}
- };
+
+ // get mapped token in the include! macro file
+ let span = span::SpanData {
+ range: token.text_range(),
+ anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
+ ctx: SyntaxContextId::ROOT,
+ };
+ let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
+ expansion_info.map_range_down(span)
+ else {
+ continue;
+ };
+
+ // if we find one, then return
+ if let Some(t) = mapped_tokens.next() {
+ return Some((sa, file_id.into(), t, span));
+ }
+ }
+
+ None
+ }
+
+ fn descend_into_macros_impl(
+ &self,
+ mut token: SyntaxToken,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
+ ) {
+ let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros");
+ let (sa, span, file_id) =
+ match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
+ Some(sa) => match sa.file_id.file_id() {
+ Some(file_id) => (
+ sa,
+ self.db.real_span_map(file_id).span_for_range(token.text_range()),
+ file_id.into(),
+ ),
+ None => {
+ stdx::never!();
+ return;
+ }
+ },
+ None => {
+ // if we cannot find a source analyzer for this token, then we try to find out
+ // whether this file is an included file and treat that as the include input
+ let Some((it, macro_file_id, mapped_token, s)) =
+ self.is_from_include_file(token)
+ else {
+ return;
+ };
+ token = mapped_token;
+ (it, s, macro_file_id)
+ }
+ };
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map();
+ let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
+
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let expansion_info = cache
.entry(macro_file)
@@ -651,8 +726,6 @@ impl<'db> SemanticsImpl<'db> {
res
};
- let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
-
while let Some((file_id, mut tokens)) = stack.pop() {
while let Some(token) = tokens.pop() {
let was_not_remapped = (|| {
@@ -1222,7 +1295,7 @@ impl<'db> SemanticsImpl<'db> {
offset: Option<TextSize>,
infer_body: bool,
) -> Option<SourceAnalyzer> {
- let _p = profile::span("Semantics::analyze_impl");
+ let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl");
let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?;