Unnamed repository; edit this file 'description' to name the repository.
Diffstat (limited to 'crates/hir/src/semantics.rs')
-rw-r--r--crates/hir/src/semantics.rs16
1 files changed, 9 insertions, 7 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index e792e159ac..6c70cc4baf 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -131,7 +131,7 @@ pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
/// Rootnode to HirFileId cache
- cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
+ root_to_file_cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
// So we might wanna move them out into something specific for semantic highlighting
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
@@ -294,7 +294,7 @@ impl<'db> SemanticsImpl<'db> {
SemanticsImpl {
db,
s2d_cache: Default::default(),
- cache: Default::default(),
+ root_to_file_cache: Default::default(),
expansion_info_cache: Default::default(),
macro_call_cache: Default::default(),
}
@@ -690,6 +690,7 @@ impl<'db> SemanticsImpl<'db> {
exp_info
});
+ // FIXME: uncached parse
// Create the source analyzer for the macro call scope
let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
else {
@@ -722,7 +723,7 @@ impl<'db> SemanticsImpl<'db> {
mut token: SyntaxToken,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
- let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros").entered();
+ let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros_impl").entered();
let (sa, span, file_id) =
match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(sa) => match sa.file_id.file_id() {
@@ -1025,6 +1026,7 @@ impl<'db> SemanticsImpl<'db> {
None => {
let call_node = file_id.macro_file()?.call_node(db);
// cache the node
+ // FIXME: uncached parse
self.parse_or_expand(call_node.file_id);
Some(call_node)
}
@@ -1370,7 +1372,7 @@ impl<'db> SemanticsImpl<'db> {
offset: Option<TextSize>,
infer_body: bool,
) -> Option<SourceAnalyzer> {
- let _p = tracing::span!(tracing::Level::INFO, "Semantics::analyze_impl").entered();
+ let _p = tracing::span!(tracing::Level::INFO, "SemanticsImpl::analyze_impl").entered();
let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
@@ -1397,7 +1399,7 @@ impl<'db> SemanticsImpl<'db> {
fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
assert!(root_node.parent().is_none());
- let mut cache = self.cache.borrow_mut();
+ let mut cache = self.root_to_file_cache.borrow_mut();
let prev = cache.insert(root_node, file_id);
assert!(prev.is_none() || prev == Some(file_id))
}
@@ -1407,7 +1409,7 @@ impl<'db> SemanticsImpl<'db> {
}
fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
- let cache = self.cache.borrow();
+ let cache = self.root_to_file_cache.borrow();
cache.get(root_node).copied()
}
@@ -1427,7 +1429,7 @@ impl<'db> SemanticsImpl<'db> {
known nodes: {}\n\n",
node,
root_node,
- self.cache
+ self.root_to_file_cache
.borrow()
.keys()
.map(|it| format!("{it:?}"))