Unnamed repository; edit this file 'description' to name the repository.
Auto merge of #16461 - Veykril:expansion-info, r=Veykril
internal: Remove unnecessary usages of ExpansionInfo And some follow up simplifications to https://github.com/rust-lang/rust-analyzer/pull/16439
bors 2024-01-31
parent e48bc04 · parent d252247 · commit 2661c27
-rw-r--r--crates/hir-expand/src/db.rs3
-rw-r--r--crates/hir-expand/src/files.rs44
-rw-r--r--crates/hir-expand/src/lib.rs75
-rw-r--r--crates/hir-expand/src/span_map.rs9
-rw-r--r--crates/hir/src/semantics.rs106
-rw-r--r--crates/ide-db/src/search.rs12
-rw-r--r--crates/ide/src/goto_definition.rs32
7 files changed, 175 insertions, 106 deletions
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 19dda651d7..f220284fae 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -80,6 +80,9 @@ pub trait ExpandDatabase: SourceDatabase {
#[salsa::invoke(SpanMap::new)]
fn span_map(&self, file_id: HirFileId) -> SpanMap;
+ #[salsa::transparent]
+ #[salsa::invoke(crate::span_map::expansion_span_map)]
+ fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
#[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index d0a1bef11c..707daf0402 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -5,7 +5,7 @@ use either::Either;
use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId};
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
-use crate::{db, ExpansionInfo, MacroFileIdExt};
+use crate::{db, map_node_range_up, span_for_offset, MacroFileIdExt};
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
///
@@ -147,7 +147,7 @@ impl InFile<&SyntaxNode> {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
- ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -163,12 +163,15 @@ impl InFile<&SyntaxNode> {
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ pub fn original_file_range_with_macro_call_body(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
- ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -193,7 +196,7 @@ impl InFile<&SyntaxNode> {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
- ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range())
}
}
}
@@ -215,7 +218,7 @@ impl InFile<&SyntaxNode> {
}
let (FileRange { file_id, range }, ctx) =
- ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
+ map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -246,8 +249,11 @@ impl InFile<SyntaxToken> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
- let (range, ctxt) = ExpansionInfo::new(db, mac_file)
- .span_for_offset(db, self.value.text_range().start());
+ let (range, ctxt) = span_for_offset(
+ db,
+ &db.expansion_span_map(mac_file),
+ self.value.text_range().start(),
+ );
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -269,8 +275,11 @@ impl InFile<SyntaxToken> {
Some(FileRange { file_id, range: self.value.text_range() })
}
HirFileIdRepr::MacroFile(mac_file) => {
- let (range, ctxt) = ExpansionInfo::new(db, mac_file)
- .span_for_offset(db, self.value.text_range().start());
+ let (range, ctxt) = span_for_offset(
+ db,
+ &db.expansion_span_map(mac_file),
+ self.value.text_range().start(),
+ );
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
@@ -286,7 +295,7 @@ impl InFile<SyntaxToken> {
impl InMacroFile<TextSize> {
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
- ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
+ span_for_offset(db, &db.expansion_span_map(self.file_id), self.value)
}
}
@@ -300,7 +309,7 @@ impl InFile<TextRange> {
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
}
HirFileIdRepr::MacroFile(mac_file) => {
- match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
@@ -315,7 +324,7 @@ impl InFile<TextRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
HirFileIdRepr::MacroFile(mac_file) => {
- match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some((it, SyntaxContextId::ROOT)) => it,
_ => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
@@ -335,7 +344,7 @@ impl InFile<TextRange> {
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
- ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value)
+ map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
}
}
}
@@ -355,8 +364,11 @@ impl<N: AstNode> InFile<N> {
return None;
}
- let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
- .map_node_range_up(db, self.value.syntax().text_range())?;
+ let (FileRange { file_id, range }, ctx) = map_node_range_up(
+ db,
+ &db.expansion_span_map(file_id),
+ self.value.syntax().text_range(),
+ )?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 6cc654f033..bd25052490 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -677,6 +677,10 @@ impl ExpansionInfo {
Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?))
}
+ pub fn call_file(&self) -> HirFileId {
+ self.arg.file_id
+ }
+
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
pub fn map_range_down(
&self,
@@ -697,13 +701,7 @@ impl ExpansionInfo {
offset: TextSize,
) -> (FileRange, SyntaxContextId) {
debug_assert!(self.expanded.value.text_range().contains(offset));
- let span = self.exp_map.span_at(offset);
- let anchor_offset = db
- .ast_id_map(span.anchor.file_id.into())
- .get_erased(span.anchor.ast_id)
- .text_range()
- .start();
- (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
+ span_for_offset(db, &self.exp_map, offset)
}
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
@@ -713,27 +711,7 @@ impl ExpansionInfo {
range: TextRange,
) -> Option<(FileRange, SyntaxContextId)> {
debug_assert!(self.expanded.value.text_range().contains_range(range));
- let mut spans = self.exp_map.spans_for_range(range);
- let Span { range, anchor, ctx } = spans.next()?;
- let mut start = range.start();
- let mut end = range.end();
-
- for span in spans {
- if span.anchor != anchor || span.ctx != ctx {
- return None;
- }
- start = start.min(span.range.start());
- end = end.max(span.range.end());
- }
- let anchor_offset =
- db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
- Some((
- FileRange {
- file_id: anchor.file_id,
- range: TextRange::new(start, end) + anchor_offset,
- },
- ctx,
- ))
+ map_node_range_up(db, &self.exp_map, range)
}
/// Maps up the text range out of the expansion into is macro call.
@@ -822,6 +800,47 @@ impl ExpansionInfo {
}
}
+/// Maps up the text range out of the expansion hierarchy back into the original file its from.
+pub fn map_node_range_up(
+ db: &dyn ExpandDatabase,
+ exp_map: &ExpansionSpanMap,
+ range: TextRange,
+) -> Option<(FileRange, SyntaxContextId)> {
+ let mut spans = exp_map.spans_for_range(range);
+ let Span { range, anchor, ctx } = spans.next()?;
+ let mut start = range.start();
+ let mut end = range.end();
+
+ for span in spans {
+ if span.anchor != anchor || span.ctx != ctx {
+ return None;
+ }
+ start = start.min(span.range.start());
+ end = end.max(span.range.end());
+ }
+ let anchor_offset =
+ db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
+ Some((
+ FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset },
+ ctx,
+ ))
+}
+
+/// Looks up the span at the given offset.
+pub fn span_for_offset(
+ db: &dyn ExpandDatabase,
+ exp_map: &ExpansionSpanMap,
+ offset: TextSize,
+) -> (FileRange, SyntaxContextId) {
+ let span = exp_map.span_at(offset);
+ let anchor_offset = db
+ .ast_id_map(span.anchor.file_id.into())
+ .get_erased(span.anchor.ast_id)
+ .text_range()
+ .start();
+ (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
+}
+
/// In Rust, macros expand token trees to token trees. When we want to turn a
/// token tree into an AST node, we need to figure out what kind of AST node we
/// want: something like `foo` can be a type, an expression, or a pattern.
diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs
index 8e624f5585..4a60a94856 100644
--- a/crates/hir-expand/src/span_map.rs
+++ b/crates/hir-expand/src/span_map.rs
@@ -1,5 +1,5 @@
//! Span maps for real files and macro expansions.
-use span::{FileId, HirFileId, HirFileIdRepr, Span};
+use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span};
use syntax::{AstNode, TextRange};
use triomphe::Arc;
@@ -94,3 +94,10 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<Rea
tree.syntax().text_range().end(),
))
}
+
+pub(crate) fn expansion_span_map(
+ db: &dyn ExpandDatabase,
+ file_id: MacroFileId,
+) -> Arc<ExpansionSpanMap> {
+ db.parse_macro_expansion(file_id).value.1
+}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 7dc84a0353..a869029d09 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -20,12 +20,12 @@ use hir_def::{
};
use hir_expand::{
attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
- HirFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
+ InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
-use span::Span;
+use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
@@ -132,6 +132,7 @@ pub struct SemanticsImpl<'db> {
/// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
+ // So we might wanna move them out into something specific for semantic highlighting
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
@@ -620,47 +621,47 @@ impl<'db> SemanticsImpl<'db> {
let parent = token.parent()?;
let file_id = self.find_file(&parent).file_id.file_id()?;
+ let mut cache = self.expansion_info_cache.borrow_mut();
+
// iterate related crates and find all include! invocations that include_file_id matches
for (invoc, _) in self
.db
.relevant_crates(file_id)
.iter()
.flat_map(|krate| self.db.include_macro_invoc(*krate))
- .filter(|(_, include_file_id)| *include_file_id == file_id)
+ .filter(|&(_, include_file_id)| include_file_id == file_id)
{
- // find file_id which original calls include!
- let Some(callnode) = invoc.as_file().original_call_node(self.db.upcast()) else {
- continue;
- };
-
- // call .parse to avoid panic in .find_file
- let _ = self.parse(callnode.file_id);
- let Some(sa) = self.analyze_no_infer(&callnode.value) else { continue };
-
- let expinfo = invoc.as_macro_file().expansion_info(self.db.upcast());
- {
- let InMacroFile { file_id, value } = expinfo.expanded();
- self.cache(value, file_id.into());
- }
+ let macro_file = invoc.as_macro_file();
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
- // map token to the corresponding span in include! macro file
- let Some((_, span)) =
- expinfo.exp_map.iter().find(|(_, x)| x.range == token.text_range())
+ // Create the source analyzer for the macro call scope
+ let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
else {
continue;
};
+ {
+ let InMacroFile { file_id: macro_file, value } = expansion_info.expanded();
+ self.cache(value, macro_file.into());
+ }
// get mapped token in the include! macro file
- let Some(InMacroFile { file_id: _, value: mapped_tokens }) =
- expinfo.map_range_down(span)
+ let span = span::SpanData {
+ range: token.text_range(),
+ anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
+ ctx: SyntaxContextId::ROOT,
+ };
+ let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
+ expansion_info.map_range_down(span)
else {
continue;
};
// if we find one, then return
- if let Some(t) = mapped_tokens.into_iter().next() {
- return Some((sa, invoc.as_file(), t, span));
- };
+ if let Some(t) = mapped_tokens.next() {
+ return Some((sa, file_id.into(), t, span));
+ }
}
None
@@ -672,44 +673,37 @@ impl<'db> SemanticsImpl<'db> {
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros");
- let mut include_macro_file_id_and_span = None;
- let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
- Some(it) => it,
- None => {
- // if we cannot find a source analyzer for this token, then we try to find out whether this file is included from other file
- let Some((it, macro_file_id, mapped_token, s)) = self.is_from_include_file(token)
- else {
- return;
- };
-
- include_macro_file_id_and_span = Some((macro_file_id, s));
- token = mapped_token;
- it
- }
- };
-
- let span = if let Some((_, s)) = include_macro_file_id_and_span {
- s
- } else {
- match sa.file_id.file_id() {
- Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
+ let (sa, span, file_id) =
+ match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
+ Some(sa) => match sa.file_id.file_id() {
+ Some(file_id) => (
+ sa,
+ self.db.real_span_map(file_id).span_for_range(token.text_range()),
+ file_id.into(),
+ ),
+ None => {
+ stdx::never!();
+ return;
+ }
+ },
None => {
- stdx::never!();
- return;
+ // if we cannot find a source analyzer for this token, then we try to find out
+ // whether this file is an included file and treat that as the include input
+ let Some((it, macro_file_id, mapped_token, s)) =
+ self.is_from_include_file(token)
+ else {
+ return;
+ };
+ token = mapped_token;
+ (it, s, macro_file_id)
}
- }
- };
+ };
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map();
- let mut stack: Vec<(_, SmallVec<[_; 2]>)> =
- if let Some((macro_file_id, _)) = include_macro_file_id_and_span {
- vec![(macro_file_id, smallvec![token])]
- } else {
- vec![(sa.file_id, smallvec![token])]
- };
+ let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let expansion_info = cache
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index a521ab61a2..006d8882c1 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -304,14 +304,18 @@ impl Definition {
DefWithBody::InTypeConst(_) => return SearchScope::empty(),
};
return match def {
- Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
+ Some(def) => SearchScope::file_range(
+ def.as_ref().original_file_range_with_macro_call_body(db),
+ ),
None => SearchScope::single_file(file_id),
};
}
if let Definition::SelfType(impl_) = self {
return match impl_.source(db).map(|src| src.syntax().cloned()) {
- Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
+ Some(def) => SearchScope::file_range(
+ def.as_ref().original_file_range_with_macro_call_body(db),
+ ),
None => SearchScope::single_file(file_id),
};
}
@@ -328,7 +332,9 @@ impl Definition {
hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()),
};
return match def {
- Some(def) => SearchScope::file_range(def.as_ref().original_file_range_full(db)),
+ Some(def) => SearchScope::file_range(
+ def.as_ref().original_file_range_with_macro_call_body(db),
+ ),
None => SearchScope::single_file(file_id),
};
}
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index 073e003618..4fed1f9158 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -241,9 +241,8 @@ mod tests {
fn goto_def_in_included_file() {
check(
r#"
+//- minicore:include
//- /main.rs
-#[rustc_builtin_macro]
-macro_rules! include {}
include!("a.rs");
@@ -264,6 +263,35 @@ fn foo() {
}
#[test]
+ fn goto_def_in_included_file_nested() {
+ check(
+ r#"
+//- minicore:include
+//- /main.rs
+
+macro_rules! passthrough {
+ ($($tt:tt)*) => { $($tt)* }
+}
+
+passthrough!(include!("a.rs"));
+
+fn main() {
+ foo();
+}
+
+//- /a.rs
+fn func_in_include() {
+ //^^^^^^^^^^^^^^^
+}
+
+fn foo() {
+ func_in_include$0();
+}
+"#,
+ );
+ }
+
+ #[test]
fn goto_def_if_items_same_name() {
check(
r#"