Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--crates/base-db/src/span.rs6
-rw-r--r--crates/hir-def/src/lib.rs7
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe.rs57
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs30
-rw-r--r--crates/hir-def/src/macro_expansion_tests/proc_macros.rs59
-rw-r--r--crates/hir-def/src/nameres/collector.rs13
-rw-r--r--crates/hir-def/src/nameres/tests.rs4
-rw-r--r--crates/hir-def/src/test_db.rs4
-rw-r--r--crates/hir-expand/src/attrs.rs30
-rw-r--r--crates/hir-expand/src/db.rs8
-rw-r--r--crates/hir-ty/src/test_db.rs4
-rw-r--r--crates/hir-ty/src/tests/macros.rs12
-rw-r--r--crates/ide-db/src/lib.rs1
-rw-r--r--crates/mbe/src/syntax_bridge.rs12
-rw-r--r--crates/mbe/src/token_map.rs8
15 files changed, 197 insertions, 58 deletions
diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs
index f430a36ddf..600aa07933 100644
--- a/crates/base-db/src/span.rs
+++ b/crates/base-db/src/span.rs
@@ -16,6 +16,12 @@ pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
pub struct SyntaxContextId(InternId);
crate::impl_intern_key!(SyntaxContextId);
+impl fmt::Display for SyntaxContextId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.0.as_u32())
+ }
+}
+
impl SyntaxContext for SyntaxContextId {
const DUMMY: Self = Self::ROOT;
// veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 0da605819f..65f1dcc850 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -1322,6 +1322,7 @@ fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId,
derive_pos: u32,
+ call_site: SyntaxContextId,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
@@ -1336,8 +1337,7 @@ fn derive_macro_as_call_id(
derive_index: derive_pos,
derive_attr_index,
},
- //FIXME
- SyntaxContextId::ROOT,
+ call_site,
);
Ok((macro_id, def_id, call_id))
}
@@ -1367,8 +1367,7 @@ fn attr_macro_as_call_id(
attr_args: Arc::new(arg),
invoc_attr_index: macro_attr.id,
},
- //FIXME
- SyntaxContextId::ROOT,
+ macro_attr.ctxt,
)
}
intern::impl_internable!(
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index af4b3e12b9..dcecec4e8e 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -23,10 +23,12 @@ macro_rules! f {
};
}
-// +tokenids
+// +spans
f!(struct MyTraitMap2);
"#,
- // FIXME: #SpanAnchor(FileId(0), 1)@91..92 why is there whitespace annotated with a span here?
+ // FIXME: #SpanAnchor(FileId(0), 1)@91..92\2# why is there whitespace annotated with a span
+ // here? Presumably because the leading `::` is getting two spans instead of one? Sounds
+ // liek glueing might be failing here
expect![[r#"
macro_rules! f {
( struct $ident:ident ) => {
@@ -36,9 +38,9 @@ macro_rules! f {
};
}
-struct#SpanAnchor(FileId(0), 1)@58..64 MyTraitMap2#SpanAnchor(FileId(0), 2)@23..34 {#SpanAnchor(FileId(0), 1)@72..73
- map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 #SpanAnchor(FileId(0), 1)@91..92::#SpanAnchor(FileId(0), 1)@92..93std#SpanAnchor(FileId(0), 1)@93..96::#SpanAnchor(FileId(0), 1)@97..98collections#SpanAnchor(FileId(0), 1)@98..109::#SpanAnchor(FileId(0), 1)@110..111HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123
-}#SpanAnchor(FileId(0), 1)@132..133
+struct#FileId(0):[email protected]\2# MyTraitMap2#FileId(0):[email protected]\0# {#FileId(0):[email protected]\2#
+ map#FileId(0):[email protected]\2#:#FileId(0):[email protected]\2# #FileId(0):[email protected]\2#::#FileId(0):[email protected]\2#std#FileId(0):[email protected]\2#::#FileId(0):[email protected]\2#collections#FileId(0):[email protected]\2#::#FileId(0):[email protected]\2#HashSet#FileId(0):[email protected]\2#<#FileId(0):[email protected]\2#(#FileId(0):[email protected]\2#)#FileId(0):[email protected]\2#>#FileId(0):[email protected]\2#,#FileId(0):[email protected]\2#
+}#FileId(0):[email protected]\2#
"#]],
);
}
@@ -49,18 +51,19 @@ fn token_mapping_floats() {
// (and related issues)
check(
r#"
-// +tokenids
+// +spans
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
-// +tokenids
+// +spans
f! {
fn main() {
1;
1.0;
+ ((1,),).0.0;
let x = 1;
}
}
@@ -68,18 +71,19 @@ f! {
"#,
expect![[r#"
-// +tokenids
+// +spans
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
-fn#SpanAnchor(FileId(0), 2)@22..24 main#SpanAnchor(FileId(0), 2)@25..29(#SpanAnchor(FileId(0), 2)@29..30)#SpanAnchor(FileId(0), 2)@30..31 {#SpanAnchor(FileId(0), 2)@32..33
- 1#SpanAnchor(FileId(0), 2)@42..43;#SpanAnchor(FileId(0), 2)@43..44
- 1.0#SpanAnchor(FileId(0), 2)@53..56;#SpanAnchor(FileId(0), 2)@56..57
- let#SpanAnchor(FileId(0), 2)@66..69 x#SpanAnchor(FileId(0), 2)@70..71 =#SpanAnchor(FileId(0), 2)@72..73 1#SpanAnchor(FileId(0), 2)@74..75;#SpanAnchor(FileId(0), 2)@75..76
-}#SpanAnchor(FileId(0), 2)@81..82
+fn#FileId(0):[email protected]\0# main#FileId(0):[email protected]\0#(#FileId(0):[email protected]\0#)#FileId(0):[email protected]\0# {#FileId(0):[email protected]\0#
+ 1#FileId(0):[email protected]\0#;#FileId(0):[email protected]\0#
+ 1.0#FileId(0):[email protected]\0#;#FileId(0):[email protected]\0#
+ (#FileId(0):[email protected]\0#(#FileId(0):[email protected]\0#1#FileId(0):[email protected]\0#,#FileId(0):[email protected]\0# )#FileId(0):[email protected]\0#,#FileId(0):[email protected]\0# )#FileId(0):[email protected]\0#.#FileId(0):[email protected]\0#0#FileId(0):[email protected]\0#.#FileId(0):[email protected]\0#0#FileId(0):[email protected]\0#;#FileId(0):[email protected]\0#
+ let#FileId(0):[email protected]\0# x#FileId(0):[email protected]\0# =#FileId(0):[email protected]\0# 1#FileId(0):[email protected]\0#;#FileId(0):[email protected]\0#
+}#FileId(0):[email protected]\0#
"#]],
@@ -123,7 +127,7 @@ macro_rules! identity {
}
fn main(foo: ()) {
- format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+ format_args/*+spans*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
}
"#,
@@ -137,7 +141,7 @@ macro_rules! identity {
}
fn main(foo: ()) {
- builtin#SpanAnchor(FileId(0), 0)@0..0 ##SpanAnchor(FileId(0), 0)@0..0format_args#SpanAnchor(FileId(0), 0)@0..0 (#SpanAnchor(FileId(0), 6)@25..26"{} {} {}"#SpanAnchor(FileId(0), 6)@26..36,#SpanAnchor(FileId(0), 6)@36..37 format_args#SpanAnchor(FileId(0), 6)@38..49!#SpanAnchor(FileId(0), 6)@49..50(#SpanAnchor(FileId(0), 6)@50..51"{}"#SpanAnchor(FileId(0), 6)@51..55,#SpanAnchor(FileId(0), 6)@55..56 0#SpanAnchor(FileId(0), 6)@57..58)#SpanAnchor(FileId(0), 6)@58..59,#SpanAnchor(FileId(0), 6)@59..60 foo#SpanAnchor(FileId(0), 6)@61..64,#SpanAnchor(FileId(0), 6)@64..65 identity#SpanAnchor(FileId(0), 6)@66..74!#SpanAnchor(FileId(0), 6)@74..75(#SpanAnchor(FileId(0), 6)@75..7610#SpanAnchor(FileId(0), 6)@76..78)#SpanAnchor(FileId(0), 6)@78..79,#SpanAnchor(FileId(0), 6)@79..80 "bar"#SpanAnchor(FileId(0), 6)@81..86)#SpanAnchor(FileId(0), 6)@86..87
+ builtin#FileId(0):[email protected]\0# ##FileId(0):[email protected]\0#format_args#FileId(0):[email protected]\0# (#FileId(0):[email protected]\0#"{} {} {}"#FileId(0):[email protected]\0#,#FileId(0):[email protected]\0# format_args#FileId(0):[email protected]\0#!#FileId(0):[email protected]\0#(#FileId(0):[email protected]\0#"{}"#FileId(0):[email protected]\0#,#FileId(0):[email protected]\0# 0#FileId(0):[email protected]\0#)#FileId(0):[email protected]\0#,#FileId(0):[email protected]\0# foo#FileId(0):[email protected]\0#,#FileId(0):[email protected]\0# identity#FileId(0):[email protected]\0#!#FileId(0):[email protected]\0#(#FileId(0):[email protected]\0#10#FileId(0):[email protected]\0#)#FileId(0):[email protected]\0#,#FileId(0):[email protected]\0# "bar"#FileId(0):[email protected]\0#)#FileId(0):[email protected]\0#
}
"##]],
@@ -145,6 +149,29 @@ fn main(foo: ()) {
}
#[test]
+fn token_mapping_across_files() {
+ check(
+ r#"
+//- /lib.rs
+#[macro_use]
+mod foo;
+
+mk_struct/*+spans*/!(Foo with u32);
+//- /foo.rs
+macro_rules! mk_struct {
+ ($foo:ident with $ty:ty) => { struct $foo($ty); }
+}
+"#,
+ expect![[r#"
+#[macro_use]
+mod foo;
+
+struct#FileId(1):[email protected]\2# Foo#FileId(0):[email protected]\0#(#FileId(1):[email protected]\2#u32#FileId(0):[email protected]\0#)#FileId(1):[email protected]\2#;#FileId(1):[email protected]\2#
+"#]],
+ );
+}
+
+#[test]
fn float_field_access_macro_input() {
check(
r#"
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index d4902c52e7..f770d2832e 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -103,11 +103,11 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
for (call, exp) in expansions.into_iter().rev() {
let mut tree = false;
let mut expect_errors = false;
- let mut show_token_ids = false;
+ let mut show_spans = false;
for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
tree |= comment.to_string().contains("+tree");
expect_errors |= comment.to_string().contains("+errors");
- show_token_ids |= comment.to_string().contains("+tokenids");
+ show_spans |= comment.to_string().contains("+spans");
}
let mut expn_text = String::new();
@@ -128,10 +128,8 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
parse.syntax_node(),
);
}
- let pp = pretty_print_macro_expansion(
- parse.syntax_node(),
- show_token_ids.then_some(&*token_map),
- );
+ let pp =
+ pretty_print_macro_expansion(parse.syntax_node(), show_spans.then_some(&*token_map));
let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp);
format_to!(expn_text, "{}", pp);
@@ -166,9 +164,18 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
_ => None,
};
+
if let Some(src) = src {
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
- let pp = pretty_print_macro_expansion(src.value, None);
+ let call = src.file_id.call_node(&db).expect("macro file");
+ let mut show_spans = false;
+ for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) {
+ show_spans |= comment.to_string().contains("+spans");
+ }
+ let pp = pretty_print_macro_expansion(
+ src.value,
+ show_spans.then_some(&db.span_map(src.file_id)),
+ );
format_to!(expanded_text, "\n{}", pp)
}
}
@@ -250,7 +257,14 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&SpanMap>) -> Stri
format_to!(res, "{}", token);
if let Some(map) = map {
if let Some(span) = map.span_for_range(token.text_range()) {
- format_to!(res, "#{:?}@{:?}", span.anchor, span.range);
+ format_to!(
+ res,
+ "#{:?}:{:?}@{:?}\\{}#",
+ span.anchor.file_id,
+ span.anchor.ast_id.into_raw(),
+ span.range,
+ span.ctx
+ );
}
}
}
diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index 822bdcc122..29374945f6 100644
--- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -94,6 +94,41 @@ fn foo() {
}
#[test]
+fn macro_rules_in_attr() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
+ check(
+ r#"
+//- proc_macros: identity
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+id! {
+ #[proc_macros::identity]
+ impl Foo for WrapBj {
+ async fn foo(&self) {
+ self.id().await;
+ }
+ }
+}
+"#,
+ expect![[r#"
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+#[proc_macros::identity] impl Foo for WrapBj {
+ async fn foo(&self ) {
+ self .id().await ;
+ }
+}
+"#]],
+ );
+}
+
+#[test]
fn float_parsing_panic() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
check(
@@ -127,3 +162,27 @@ macro_rules! id {
"#]],
);
}
+
+#[test]
+fn float_attribute_mapping() {
+ check(
+ r#"
+//- proc_macros: identity
+//+spans
+#[proc_macros::identity]
+fn foo(&self) {
+ self.0. 1;
+}
+"#,
+ expect![[r#"
+//+spans
+#[proc_macros::identity]
+fn foo(&self) {
+ self.0. 1;
+}
+
+fn#FileId(0):[email protected]\0# foo#FileId(0):[email protected]\0#(#FileId(0):[email protected]\0#&#FileId(0):[email protected]\0#self#FileId(0):[email protected]\0# )#FileId(0):[email protected]\0# {#FileId(0):[email protected]\0#
+ self#FileId(0):[email protected]\0# .#FileId(0):[email protected]\0#0#FileId(0):[email protected]\0#.#FileId(0):[email protected]\0#1#FileId(0):[email protected]\0#;#FileId(0):[email protected]\0#
+}#FileId(0):[email protected]\0#"#]],
+ );
+}
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 360bf0f93e..fef1360422 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -219,6 +219,7 @@ enum MacroDirectiveKind {
ast_id: AstIdWithPath<ast::Adt>,
derive_attr: AttrId,
derive_pos: usize,
+ call_site: SyntaxContextId,
},
Attr {
ast_id: AstIdWithPath<ast::Item>,
@@ -324,7 +325,7 @@ impl DefCollector<'_> {
.parse_path_comma_token_tree(self.db.upcast())
.into_iter()
.flatten()
- .filter_map(|feat| match feat.segments() {
+ .filter_map(|(feat, _)| match feat.segments() {
[name] => Some(name.to_smol_str()),
_ => None,
});
@@ -1139,12 +1140,13 @@ impl DefCollector<'_> {
return false;
}
}
- MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
let id = derive_macro_as_call_id(
self.db,
ast_id,
*derive_attr,
*derive_pos as u32,
+ *call_site,
self.def_map.krate,
resolver,
);
@@ -1242,7 +1244,7 @@ impl DefCollector<'_> {
match attr.parse_path_comma_token_tree(self.db.upcast()) {
Some(derive_macros) => {
let mut len = 0;
- for (idx, path) in derive_macros.enumerate() {
+ for (idx, (path, call_site)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id,
@@ -1251,6 +1253,7 @@ impl DefCollector<'_> {
ast_id,
derive_attr: attr.id,
derive_pos: idx,
+ call_site,
},
container: directive.container,
});
@@ -1438,7 +1441,7 @@ impl DefCollector<'_> {
));
}
}
- MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Derive {
@@ -1828,7 +1831,7 @@ impl ModCollector<'_, '_> {
);
return;
};
- for path in paths {
+ for (path, _) in paths {
if let Some(name) = path.as_ident() {
single_imports.push(name.clone());
}
diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs
index e7cc44b04d..b2ffbbe4c5 100644
--- a/crates/hir-def/src/nameres/tests.rs
+++ b/crates/hir-def/src/nameres/tests.rs
@@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::{expect, Expect};
use triomphe::Arc;
-use crate::{db::DefDatabase, test_db::TestDB};
-
-use super::DefMap;
+use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
let db = TestDB::with_files(ra_fixture);
diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs
index db64e7cd53..f4a6b61f7a 100644
--- a/crates/hir-def/src/test_db.rs
+++ b/crates/hir-def/src/test_db.rs
@@ -7,7 +7,7 @@ use base_db::{
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, SourceDatabase,
Upcast,
};
-use hir_expand::{db::ExpandDatabase, hygiene::SyntaxContextData, InFile};
+use hir_expand::{db::ExpandDatabase, InFile};
use rustc_hash::FxHashSet;
use syntax::{algo, ast, AstNode};
use triomphe::Arc;
@@ -34,7 +34,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
- this.intern_syntax_context(SyntaxContextData::root());
+ this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 5ce12d2f6e..76c787721b 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -2,13 +2,16 @@
use std::{fmt, ops};
use ::tt::SpanAnchor as _;
-use base_db::{span::SpanAnchor, CrateId};
+use base_db::{
+ span::{SpanAnchor, SyntaxContextId},
+ CrateId,
+};
use cfg::CfgExpr;
use either::Either;
use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
-use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
+use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc;
use crate::{
@@ -54,6 +57,9 @@ impl RawAttrs {
id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))),
+ ctxt: hygiene
+ .span_for_range(comment.syntax().text_range())
+ .map_or(SyntaxContextId::ROOT, |s| s.ctx),
}),
})
.collect::<Vec<_>>();
@@ -191,6 +197,7 @@ pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>,
+ pub ctxt: SyntaxContextId,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -235,7 +242,14 @@ impl Attr {
} else {
None
};
- Some(Attr { id, path, input })
+ Some(Attr {
+ id,
+ path,
+ input,
+ ctxt: hygiene
+ .span_for_range(ast.syntax().text_range())
+ .map_or(SyntaxContextId::ROOT, |s| s.ctx),
+ })
}
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
@@ -284,9 +298,8 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
- ) -> Option<impl Iterator<Item = ModPath> + 'a> {
+ ) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
let args = self.token_tree_value()?;
- dbg!(args);
if args.delimiter.kind != DelimiterKind::Parenthesis {
return None;
@@ -298,6 +311,11 @@ impl Attr {
if tts.is_empty() {
return None;
}
+ // FIXME: Absolutely wrong
+ let call_site = match tts.first().unwrap() {
+ tt::TokenTree::Leaf(l) => l.span().ctx,
+ tt::TokenTree::Subtree(s) => s.delimiter.open.ctx,
+ };
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here.
let subtree = tt::Subtree {
@@ -313,7 +331,7 @@ impl Attr {
return None;
}
let path = meta.path()?;
- ModPath::from_src(db, path, &span_map)
+ Some((ModPath::from_src(db, path, &span_map)?, call_site))
});
Some(paths)
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 7dd69099a6..e176bef78b 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -103,7 +103,7 @@ pub trait ExpandDatabase: SourceDatabase {
&self,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<SpanMap>)>;
- // TODO: transparent?
+ // FIXME: This always allocates one for non macro files which is wasteful.
#[salsa::transparent]
fn span_map(&self, file_id: HirFileId) -> Arc<SpanMap>;
@@ -117,6 +117,8 @@ pub trait ExpandDatabase: SourceDatabase {
#[salsa::interned]
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
#[salsa::transparent]
+ fn setup_syntax_context_root(&self) -> ();
+ #[salsa::transparent]
#[salsa::invoke(hygiene::apply_mark)]
fn apply_mark(
&self,
@@ -770,3 +772,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
Ok(())
}
}
+
+fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
+ db.intern_syntax_context(SyntaxContextData::root());
+}
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index a3383b2b5d..6f4aef22d2 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -7,7 +7,7 @@ use base_db::{
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir_def::{db::DefDatabase, ModuleId};
-use hir_expand::{db::ExpandDatabase, hygiene::SyntaxContextData};
+use hir_expand::db::ExpandDatabase;
use nohash_hasher::IntMap;
use rustc_hash::FxHashSet;
use syntax::TextRange;
@@ -30,7 +30,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
- this.intern_syntax_context(SyntaxContextData::root());
+ this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}
diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs
index d16e0eb013..1e10a6feca 100644
--- a/crates/hir-ty/src/tests/macros.rs
+++ b/crates/hir-ty/src/tests/macros.rs
@@ -63,10 +63,10 @@ fn infer_macros_expanded() {
}
"#,
expect![[r#"
- !0..17 '{Foo(v...,2,])}': Foo
+ !0..21 '{Foo(v...2),])}': Foo
!1..4 'Foo': Foo({unknown}) -> Foo
- !1..16 'Foo(vec![1,2,])': Foo
- !5..15 'vec![1,2,]': {unknown}
+ !1..20 'Foo(ve...(2),])': Foo
+ !5..19 'vec![(1),(2),]': {unknown}
155..181 '{ ...,2); }': ()
165..166 'x': Foo
"#]],
@@ -96,10 +96,10 @@ fn infer_legacy_textual_scoped_macros_expanded() {
}
"#,
expect![[r#"
- !0..17 '{Foo(v...,2,])}': Foo
+ !0..21 '{Foo(v...2),])}': Foo
!1..4 'Foo': Foo({unknown}) -> Foo
- !1..16 'Foo(vec![1,2,])': Foo
- !5..15 'vec![1,2,]': {unknown}
+ !1..20 'Foo(ve...(2),])': Foo
+ !5..19 'vec![(1),(2),]': {unknown}
194..250 '{ ...,2); }': ()
204..205 'x': Foo
227..228 'y': {unknown}
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index 957c9ad26c..38c9a3538f 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -144,6 +144,7 @@ impl RootDatabase {
db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
db.update_parse_query_lru_capacity(lru_capacity);
+ db.setup_syntax_context_root();
db
}
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index b843db510e..f47123336b 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -717,24 +717,29 @@ where
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.
fn float_split(&mut self, has_pseudo_dot: bool) {
- // TODO: FIXME this breaks the hygiene map
- let (text, _span) = match self.cursor.token_tree() {
+ let (text, span) = match self.cursor.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
(lit.text.as_str(), lit.span)
}
_ => unreachable!(),
};
+ // FIXME: Span splitting
match text.split_once('.') {
Some((left, right)) => {
assert!(!left.is_empty());
+
self.inner.start_node(SyntaxKind::NAME_REF);
self.inner.token(SyntaxKind::INT_NUMBER, left);
self.inner.finish_node();
+ let range = TextRange::at(self.text_pos, TextSize::of(left));
+ self.token_map.insert(range, span);
// here we move the exit up, the original exit has been deleted in process
self.inner.finish_node();
self.inner.token(SyntaxKind::DOT, ".");
+ let range = TextRange::at(range.end(), TextSize::of("."));
+ self.token_map.insert(range, span);
if has_pseudo_dot {
assert!(right.is_empty(), "{left}.{right}");
@@ -742,11 +747,14 @@ where
assert!(!right.is_empty(), "{left}.{right}");
self.inner.start_node(SyntaxKind::NAME_REF);
self.inner.token(SyntaxKind::INT_NUMBER, right);
+ let range = TextRange::at(range.end(), TextSize::of(right));
+ self.token_map.insert(range, span);
self.inner.finish_node();
// the parser creates an unbalanced start node, we are required to close it here
self.inner.finish_node();
}
+ self.text_pos += TextSize::of(text);
}
None => unreachable!(),
}
diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs
index c825bd01bc..dfbf54410b 100644
--- a/crates/mbe/src/token_map.rs
+++ b/crates/mbe/src/token_map.rs
@@ -2,6 +2,7 @@
use std::hash::Hash;
+use stdx::never;
use syntax::TextRange;
use tt::Span;
@@ -59,11 +60,10 @@ impl<S: Span> TokenMap<S> {
.max_by_key(|(_, _, intersection)| intersection.len())
.map(|(_, &s, _)| s)
.or_else(|| {
- if self.real_file {
- None
- } else {
- panic!("no span for range {range:?} in {:#?}", self.span_map)
+ if !self.real_file {
+ never!("no span for range {:?} in {:#?}", range, self.span_map);
}
+ None
})
}