Unnamed repository; edit this file 'description' to name the repository.
Re-enable proc-macros
Lukas Wirth 2023-11-28
parent 98cfdde · commit b98597f
-rw-r--r--Cargo.lock3
-rw-r--r--crates/base-db/src/fixture.rs12
-rw-r--r--crates/base-db/src/input.rs3
-rw-r--r--crates/base-db/src/span.rs2
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs5
-rw-r--r--crates/hir-expand/src/db.rs55
-rw-r--r--crates/hir-expand/src/fixup.rs273
-rw-r--r--crates/hir-expand/src/proc_macro.rs8
-rw-r--r--crates/load-cargo/src/lib.rs24
-rw-r--r--crates/mbe/src/expander/transcriber.rs2
-rw-r--r--crates/mbe/src/lib.rs3
-rw-r--r--crates/mbe/src/syntax_bridge.rs105
-rw-r--r--crates/proc-macro-api/Cargo.toml7
-rw-r--r--crates/proc-macro-api/src/lib.rs39
-rw-r--r--crates/proc-macro-api/src/msg.rs127
-rw-r--r--crates/proc-macro-api/src/msg/flat.rs381
-rw-r--r--crates/proc-macro-srv-cli/src/main.rs6
-rw-r--r--crates/proc-macro-srv/src/dylib.rs10
-rw-r--r--crates/proc-macro-srv/src/lib.rs38
-rw-r--r--crates/proc-macro-srv/src/proc_macros.rs39
-rw-r--r--crates/proc-macro-srv/src/server.rs47
-rw-r--r--crates/proc-macro-srv/src/server/token_stream.rs64
-rw-r--r--crates/proc-macro-srv/src/tests/utils.rs25
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs2
24 files changed, 787 insertions, 493 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 775231f3ea..c71ca2f212 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1255,6 +1255,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
name = "proc-macro-api"
version = "0.0.0"
dependencies = [
+ "base-db",
+ "indexmap",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap2",
"object 0.32.0",
"paths",
diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs
index 7236b56f6d..cfba01a032 100644
--- a/crates/base-db/src/fixture.rs
+++ b/crates/base-db/src/fixture.rs
@@ -543,6 +543,9 @@ impl ProcMacroExpander for IdentityProcMacroExpander {
subtree: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
@@ -557,6 +560,9 @@ impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
_: &Subtree<SpanData>,
attrs: Option<&Subtree<SpanData>>,
_: &Env,
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
attrs
.cloned()
@@ -572,6 +578,9 @@ impl ProcMacroExpander for MirrorProcMacroExpander {
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let mut token_trees = vec![];
@@ -599,6 +608,9 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
return Ok(traverse(input));
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 0b04a91f62..12b449932d 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -262,6 +262,9 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
subtree: &tt::Subtree<SpanData>,
attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
}
diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs
index 607b8027ca..d2b40ecdd2 100644
--- a/crates/base-db/src/span.rs
+++ b/crates/base-db/src/span.rs
@@ -38,6 +38,8 @@ impl SyntaxContextId {
// currently (which kind of makes sense but we need it here!)
pub const SELF_REF: Self =
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
+ /// Used syntax fixups
+ pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) });
pub fn is_root(self) -> bool {
self == Self::ROOT
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 355b82a5f4..bcbf4047ca 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -16,7 +16,7 @@ mod proc_macros;
use std::{iter, ops::Range, sync};
-use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
+use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase};
use expect_test::Expect;
use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId};
use stdx::format_to;
@@ -307,6 +307,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
subtree: &Subtree,
_: Option<&Subtree>,
_: &base_db::Env,
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
) -> Result<Subtree, base_db::ProcMacroExpansionError> {
let (parse, _) =
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 601a754abb..1d55aaf170 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -233,7 +233,17 @@ pub fn expand_speculative(
let speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::UNSPECIFIED;
- expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
+ let call_site = loc.span(db);
+ expander.expand(
+ db,
+ loc.def.krate,
+ loc.krate,
+ &tt,
+ attr_arg.as_ref(),
+ call_site,
+ call_site,
+ call_site,
+ )
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
@@ -398,17 +408,23 @@ fn macro_arg(
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
};
let censor = censor_for_macro_input(&loc, &syntax);
- // let mut fixups = fixup::fixup_syntax(&node);
- // fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- // let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
- // &node,
- // fixups.token_map,
- // fixups.next_id,
- // fixups.replace,
- // fixups.append,
- // );
-
- let mut tt = mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor);
+ let mut tt = match loc.kind {
+ MacroCallKind::FnLike { .. } => {
+ mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor)
+ }
+ MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
+ // let mut fixups = crate::fixup::fixup_syntax(&syntax);
+ // fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
+ // let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ // &node,
+ // fixups.token_map,
+ // fixups.next_id,
+ // fixups.replace,
+ // fixups.append,
+ // );
+ mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor)
+ }
+ };
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
@@ -658,8 +674,19 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
_ => None,
};
- let ExpandResult { value: tt, err } =
- expander.expand(db, loc.def.krate, loc.krate, &macro_arg, attr_arg);
+ let call_site = loc.span(db);
+ let ExpandResult { value: tt, err } = expander.expand(
+ db,
+ loc.def.krate,
+ loc.krate,
+ &macro_arg,
+ attr_arg,
+ // FIXME
+ call_site,
+ call_site,
+ // FIXME
+ call_site,
+ );
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index e6e8d8c029..326ff1dec4 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -2,25 +2,30 @@
//! fix up syntax errors in the code we're passing to them.
use std::mem;
-use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
+use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
+ FileId,
+};
+use la_arena::RawIdx;
+use mbe::TokenMap;
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use syntax::{
ast::{self, AstNode, HasLoopBody},
- match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
+ match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
-use tt::token_id::Subtree;
+use tt::Spacing;
+
+use crate::tt::{Ident, Leaf, Punct, Subtree};
/// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map.
#[derive(Debug, Default)]
pub(crate) struct SyntaxFixups {
- pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
+ pub(crate) replace: FxHashMap<SyntaxElement, Vec<()>>,
pub(crate) undo_info: SyntaxFixupUndoInfo,
- pub(crate) token_map: TokenMap,
- pub(crate) next_id: u32,
}
/// This is the information needed to reverse the fixups.
@@ -29,21 +34,25 @@ pub struct SyntaxFixupUndoInfo {
original: Box<[Subtree]>,
}
-const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
+// censoring -> just don't convert the node
+// replacement -> censor + append
+// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
+// to remove later
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut replace = FxHashMap::<SyntaxElement, _>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
- let mut token_map = TokenMap::default();
- let mut next_id = 0;
+ let dummy_range = TextRange::empty(TextSize::new(0));
+ let dummy_anchor =
+ SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) };
+ let fake_span =
+ SpanData { range: dummy_range, anchor: dummy_anchor, ctx: SyntaxContextId::FAKE };
while let Some(event) = preorder.next() {
- let node = match event {
- syntax::WalkEvent::Enter(node) => node,
- syntax::WalkEvent::Leave(_) => continue,
- };
+ let syntax::WalkEvent::Enter(node) = event else { continue };
+ /* TODO
if can_handle_error(&node) && has_error_to_handle(&node) {
// the node contains an error node, we have to completely replace it by something valid
let (original_tree, new_tmap, new_next_id) =
@@ -68,6 +77,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
preorder.skip_subtree();
continue;
}
+ */
// In some other situations, we can fix things by just appending some tokens.
let end_range = TextRange::empty(node.text_range().end());
match_ast! {
@@ -76,36 +86,32 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
if it.name_ref().is_none() {
// incomplete field access: some_expr.|
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span
+ }),
]);
}
},
ast::ExprStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::SEMICOLON,
- text: ";".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ Leaf::Punct(Punct {
+ char: ';',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
]);
}
},
ast::LetStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::SEMICOLON,
- text: ";".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ Leaf::Punct(Punct {
+ char: ';',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
]);
}
},
@@ -117,28 +123,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(if_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span
+ }),
]);
}
if it.then_branch().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
]);
}
},
@@ -150,46 +153,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(while_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span
+ }),
]);
}
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
]);
}
},
ast::LoopExpr(it) => {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
]);
}
},
@@ -201,29 +200,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue
};
append.insert(match_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID
- },
+ span: fake_span
+ }),
]);
}
if it.match_arm_list().is_none() {
// No match arms
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
]);
}
},
@@ -234,10 +230,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
};
let [pat, in_token, iter] = [
- (SyntaxKind::UNDERSCORE, "_"),
- (SyntaxKind::IN_KW, "in"),
- (SyntaxKind::IDENT, "__ra_fixup")
- ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
+ "_",
+ "in",
+ "__ra_fixup"
+ ].map(|text|
+ Leaf::Ident(Ident {
+ text: text.into(),
+ span: fake_span
+ }),
+ );
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
append.insert(for_token.into(), vec![pat, in_token, iter]);
@@ -248,18 +249,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span
+ }),
]);
}
},
@@ -270,8 +270,6 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
SyntaxFixups {
append,
replace,
- token_map,
- next_id,
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
}
}
@@ -288,40 +286,33 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
}
-pub(crate) fn reverse_fixups(
- tt: &mut Subtree,
- token_map: &TokenMap,
- undo_info: &SyntaxFixupUndoInfo,
-) {
+pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
let tts = std::mem::take(&mut tt.token_trees);
tt.token_trees = tts
.into_iter()
+ // delete all fake nodes
.filter(|tt| match tt {
- tt::TokenTree::Leaf(leaf) => {
- token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
- }
- tt::TokenTree::Subtree(st) => {
- token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
- }
- })
- .flat_map(|tt| match tt {
- tt::TokenTree::Subtree(mut tt) => {
- reverse_fixups(&mut tt, token_map, undo_info);
- SmallVec::from_const([tt.into()])
- }
- tt::TokenTree::Leaf(leaf) => {
- if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
- let original = undo_info.original[id.0 as usize].clone();
- if original.delimiter.kind == tt::DelimiterKind::Invisible {
- original.token_trees.into()
- } else {
- SmallVec::from_const([original.into()])
- }
- } else {
- SmallVec::from_const([leaf.into()])
- }
- }
+ tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE,
+ tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE,
})
+ // .flat_map(|tt| match tt { TODO
+ // tt::TokenTree::Subtree(mut tt) => {
+ // reverse_fixups(&mut tt, undo_info);
+ // SmallVec::from_const([tt.into()])
+ // }
+ // tt::TokenTree::Leaf(leaf) => {
+ // if let Some(id) = leaf.span().anchor {
+ // let original = undo_info.original[id.0 as usize].clone();
+ // if original.delimiter.kind == tt::DelimiterKind::Invisible {
+ // original.token_trees.into()
+ // } else {
+ // SmallVec::from_const([original.into()])
+ // }
+ // } else {
+ // SmallVec::from_const([leaf.into()])
+ // }
+ // }
+ // })
.collect();
}
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index 41675c630d..04b5b7b0b6 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -1,6 +1,6 @@
//! Proc Macro Expander stub
-use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
+use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use stdx::never;
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
@@ -33,6 +33,9 @@ impl ProcMacroExpander {
calling_crate: CrateId,
tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>,
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id {
ProcMacroId(DUMMY_ID) => {
@@ -68,7 +71,8 @@ impl ProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
- match proc_macro.expander.expand(tt, attr_arg, env) {
+ match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
+ {
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {
// Don't discard the item in case something unexpected happened while expanding attributes
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 4d13190949..ed4175c458 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -376,16 +376,16 @@ impl ProcMacroExpander for Expander {
subtree: &tt::Subtree<SpanData>,
attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
- let _ = (subtree, attrs, env);
-
- // let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
- // match self.0.expand(subtree, attrs, env) {
- // Ok(Ok(subtree)) => Ok(subtree),
- // Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
- // Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
- // }
- todo!()
+ let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
+ match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) {
+ Ok(Ok(subtree)) => Ok(subtree),
+ Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
+ Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
+ }
}
}
@@ -399,6 +399,9 @@ impl ProcMacroExpander for IdentityExpander {
subtree: &tt::Subtree<SpanData>,
_: Option<&tt::Subtree<SpanData>>,
_: &Env,
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
@@ -414,6 +417,9 @@ impl ProcMacroExpander for EmptyExpander {
_: &tt::Subtree<SpanData>,
_: Option<&tt::Subtree<SpanData>>,
_: &Env,
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
Ok(tt::Subtree::empty())
}
diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs
index c8b326fa6c..8aedd73140 100644
--- a/crates/mbe/src/expander/transcriber.rs
+++ b/crates/mbe/src/expander/transcriber.rs
@@ -51,7 +51,7 @@ impl<S: Span> Bindings<S> {
marker(&mut span);
let subtree = tt::Subtree {
delimiter: tt::Delimiter {
- // TODO split span
+ // FIXME split span
open: span,
close: span,
kind: delimiter.kind,
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 1b13b39f01..fdf97ad538 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -34,7 +34,8 @@ pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext};
pub use crate::{
syntax_bridge::{
- map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
+ map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree,
+ parse_to_token_tree_static_span, syntax_node_to_token_tree,
syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SpanMapper,
},
token_map::TokenMap,
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 5d802ba86c..c61c526286 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -63,7 +63,7 @@ pub(crate) mod dummy_test_span_utils {
/// Convert the syntax node to a `TokenTree` (what macro
/// will consume).
-/// TODO: Flesh out the doc comment more thoroughly
+/// FIXME: Flesh out the doc comment more thoroughly
pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>(
node: &SyntaxNode,
map: SpanMap,
@@ -179,6 +179,19 @@ where
Some(convert_tokens(&mut conv))
}
+/// Convert a string to a `TokenTree`
+pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
+where
+ S: Span,
+{
+ let lexed = parser::LexedStr::new(text);
+ if lexed.errors().next().is_some() {
+ return None;
+ }
+ let mut conv = StaticRawConverter { lexed, pos: 0, span };
+ Some(convert_tokens(&mut conv))
+}
+
/// Split token tree with separate expr: $($e:expr)SEP*
pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> {
if tt.token_trees.is_empty() {
@@ -213,12 +226,10 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
res
}
-fn convert_tokens<Anchor, Ctx, C>(conv: &mut C) -> tt::Subtree<SpanData<Anchor, Ctx>>
+fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
where
- C: TokenConverter<Anchor, Ctx>,
- Ctx: SyntaxContext,
- SpanData<Anchor, Ctx>: Span,
- Anchor: Copy,
+ C: TokenConverter<S>,
+ S: Span,
{
let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] };
let mut stack = NonEmptyVec::new(entry);
@@ -452,6 +463,12 @@ struct RawConverter<'a, Anchor> {
pos: usize,
anchor: Anchor,
}
+/// A raw token (straight from lexer) converter that gives every token the same span.
+struct StaticRawConverter<'a, S> {
+ lexed: parser::LexedStr<'a>,
+ pos: usize,
+ span: S,
+}
trait SrcToken<Ctx>: std::fmt::Debug {
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
@@ -461,20 +478,16 @@ trait SrcToken<Ctx>: std::fmt::Debug {
fn to_text(&self, ctx: &Ctx) -> SmolStr;
}
-trait TokenConverter<Anchor, Ctx>: Sized {
+trait TokenConverter<S>: Sized {
type Token: SrcToken<Self>;
- fn convert_doc_comment(
- &self,
- token: &Self::Token,
- span: SpanData<Anchor, Ctx>,
- ) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>>;
+ fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>>;
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
fn peek(&self) -> Option<Self::Token>;
- fn span_for(&self, range: TextRange) -> SpanData<Anchor, Ctx>;
+ fn span_for(&self, range: TextRange) -> S;
}
impl<Anchor> SrcToken<RawConverter<'_, Anchor>> for usize {
@@ -491,7 +504,22 @@ impl<Anchor> SrcToken<RawConverter<'_, Anchor>> for usize {
}
}
-impl<Anchor: Copy, Ctx: SyntaxContext> TokenConverter<Anchor, Ctx> for RawConverter<'_, Anchor>
+impl<S: Span> SrcToken<StaticRawConverter<'_, S>> for usize {
+ fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
+ ctx.lexed.kind(*self)
+ }
+
+ fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option<char> {
+ ctx.lexed.text(*self).chars().next()
+ }
+
+ fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr {
+ ctx.lexed.text(*self).into()
+ }
+}
+
+impl<Anchor: Copy, Ctx: SyntaxContext> TokenConverter<SpanData<Anchor, Ctx>>
+ for RawConverter<'_, Anchor>
where
SpanData<Anchor, Ctx>: Span,
{
@@ -530,6 +558,41 @@ where
}
}
+impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
+where
+ S: Span,
+{
+ type Token = usize;
+
+ fn convert_doc_comment(&self, &token: &usize, span: S) -> Option<Vec<tt::TokenTree<S>>> {
+ let text = self.lexed.text(token);
+ convert_doc_comment(&doc_comment(text), span)
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ let token = self.pos;
+ self.pos += 1;
+ let range = self.lexed.text_range(token);
+ let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?);
+
+ Some((token, range))
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ Some(self.pos)
+ }
+
+ fn span_for(&self, _: TextRange) -> S {
+ self.span
+ }
+}
+
struct Converter<SpanMap> {
current: Option<SyntaxToken>,
preorder: PreorderWithTokens,
@@ -596,17 +659,13 @@ impl<SpanMap> SrcToken<Converter<SpanMap>> for SynToken {
}
}
-impl<Anchor: Copy, Ctx, SpanMap> TokenConverter<Anchor, Ctx> for Converter<SpanMap>
+impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap>
where
- SpanData<Anchor, Ctx>: Span,
- SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
+ S: Span,
+ SpanMap: SpanMapper<S>,
{
type Token = SynToken;
- fn convert_doc_comment(
- &self,
- token: &Self::Token,
- span: SpanData<Anchor, Ctx>,
- ) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
+ fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
convert_doc_comment(token.token(), span)
}
@@ -661,7 +720,7 @@ where
Some(token)
}
- fn span_for(&self, range: TextRange) -> SpanData<Anchor, Ctx> {
+ fn span_for(&self, range: TextRange) -> S {
self.map.span_for(range)
}
}
diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml
index 4c87c89add..2cbbc9489a 100644
--- a/crates/proc-macro-api/Cargo.toml
+++ b/crates/proc-macro-api/Cargo.toml
@@ -25,6 +25,7 @@ tracing.workspace = true
triomphe.workspace = true
memmap2 = "0.5.4"
snap = "1.1.0"
+indexmap = "2.1.0"
# local deps
paths.workspace = true
@@ -32,5 +33,7 @@ tt.workspace = true
stdx.workspace = true
profile.workspace = true
text-size.workspace = true
-# Intentionally *not* depend on anything salsa-related
-# base-db.workspace = true
+# Ideally this crate would not depend on salsa things, but we need span information here which wraps
+# InternIds for the syntax context
+base-db.workspace = true
+la-arena.workspace = true
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index 7a3580f814..9fc5a53d93 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -11,6 +11,8 @@ pub mod msg;
mod process;
mod version;
+use base_db::span::SpanData;
+use indexmap::IndexSet;
use paths::AbsPathBuf;
use std::{fmt, io, sync::Mutex};
use triomphe::Arc;
@@ -18,7 +20,7 @@ use triomphe::Arc;
use serde::{Deserialize, Serialize};
use crate::{
- msg::{flat::SerializableSpan, ExpandMacro, FlatTree, PanicMessage},
+ msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS},
process::ProcMacroProcessSrv,
};
@@ -132,32 +134,49 @@ impl ProcMacro {
self.kind
}
- pub fn expand<const L: usize, S: SerializableSpan<L>>(
+ pub fn expand(
&self,
- subtree: &tt::Subtree<S>,
- attr: Option<&tt::Subtree<S>>,
+ subtree: &tt::Subtree<SpanData>,
+ attr: Option<&tt::Subtree<SpanData>>,
env: Vec<(String, String)>,
- ) -> Result<Result<tt::Subtree<S>, PanicMessage>, ServerError> {
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
+ ) -> Result<Result<tt::Subtree<SpanData>, PanicMessage>, ServerError> {
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version();
let current_dir = env
.iter()
.find(|(name, _)| name == "CARGO_MANIFEST_DIR")
.map(|(_, value)| value.clone());
+ let mut span_data_table = IndexSet::default();
+ let def_site = span_data_table.insert_full(def_site).0;
+ let call_site = span_data_table.insert_full(call_site).0;
+ let mixed_site = span_data_table.insert_full(mixed_site).0;
let task = ExpandMacro {
- macro_body: FlatTree::new(subtree, version),
+ macro_body: FlatTree::new(subtree, version, &mut span_data_table),
macro_name: self.name.to_string(),
- attributes: attr.map(|subtree| FlatTree::new(subtree, version)),
+ attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
lib: self.dylib_path.to_path_buf().into(),
env,
current_dir,
+ has_global_spans: ExpnGlobals {
+ serialize: version >= HAS_GLOBAL_SPANS,
+ def_site,
+ call_site,
+ mixed_site,
+ },
};
- let request = msg::Request::ExpandMacro(task);
- let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
+ let response = self
+ .process
+ .lock()
+ .unwrap_or_else(|e| e.into_inner())
+ .send_task(msg::Request::ExpandMacro(task))?;
+
match response {
msg::Response::ExpandMacro(it) => {
- Ok(it.map(|tree| FlatTree::to_subtree(tree, version)))
+ Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table)))
}
msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
Err(ServerError { message: "unexpected response".to_string(), io: None })
diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs
index 4e6984f61b..ddac514ff7 100644
--- a/crates/proc-macro-api/src/msg.rs
+++ b/crates/proc-macro-api/src/msg.rs
@@ -10,21 +10,15 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::ProcMacroKind;
-pub use crate::msg::flat::FlatTree;
+pub use crate::msg::flat::{FlatTree, TokenId};
// The versions of the server protocol
pub const NO_VERSION_CHECK_VERSION: u32 = 0;
pub const VERSION_CHECK_VERSION: u32 = 1;
pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2;
-/// This version changes how spans are encoded, kind of. Prior to this version,
-/// spans were represented as a single u32 which effectively forced spans to be
-/// token ids. Starting with this version, the span fields are still u32,
-/// but if the size of the span is greater than 1 then the span data is encoded in
-/// an additional vector where the span represents the offset into that vector.
-/// This allows encoding bigger spans while supporting the previous versions.
-pub const VARIABLE_SIZED_SPANS: u32 = 2;
+pub const HAS_GLOBAL_SPANS: u32 = 3;
-pub const CURRENT_API_VERSION: u32 = VARIABLE_SIZED_SPANS;
+pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS;
#[derive(Debug, Serialize, Deserialize)]
pub enum Request {
@@ -66,6 +60,24 @@ pub struct ExpandMacro {
pub env: Vec<(String, String)>,
pub current_dir: Option<String>,
+ /// marker for serde skip stuff
+ #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
+ pub has_global_spans: ExpnGlobals,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct ExpnGlobals {
+ #[serde(skip_serializing)]
+ pub serialize: bool,
+ pub def_site: usize,
+ pub call_site: usize,
+ pub mixed_site: usize,
+}
+
+impl ExpnGlobals {
+ fn skip_serializing_if(&self) -> bool {
+ !self.serialize
+ }
}
pub trait Message: Serialize + DeserializeOwned {
@@ -120,38 +132,89 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
Ok(())
}
-/*TODO
-
#[cfg(test)]
mod tests {
- use tt::{
- Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, SpanAnchor, Subtree,
- TokenId, TokenTree,
+ use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
+ FileId,
};
+ use la_arena::RawIdx;
+ use text_size::{TextRange, TextSize};
+ use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree};
use super::*;
- fn fixture_token_tree() -> Subtree<TokenId> {
- let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() };
- subtree
- .token_trees
- .push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into()));
- subtree
- .token_trees
- .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into()));
+ fn fixture_token_tree() -> Subtree<SpanData> {
+ let anchor =
+ SpanAnchor { file_id: FileId(0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) };
+ let mut subtree = Subtree {
+ delimiter: Delimiter {
+ open: SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ close: SpanData {
+ range: TextRange::empty(TextSize::new(13)),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ kind: DelimiterKind::Invisible,
+ },
+ token_trees: Vec::new(),
+ };
+ subtree.token_trees.push(TokenTree::Leaf(
+ Ident {
+ text: "struct".into(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .into(),
+ ));
+ subtree.token_trees.push(TokenTree::Leaf(
+ Ident {
+ text: "Foo".into(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(5), TextSize::of("Foo")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .into(),
+ ));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
text: "Foo".into(),
- span: TokenId::DUMMY,
+
+ span: SpanData {
+ range: TextRange::at(TextSize::new(8), TextSize::of("Foo")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
})));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
char: '@',
- span: TokenId::DUMMY,
+ span: SpanData {
+ range: TextRange::at(TextSize::new(11), TextSize::of('@')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
spacing: Spacing::Joint,
})));
subtree.token_trees.push(TokenTree::Subtree(Subtree {
delimiter: Delimiter {
- open: TokenId(2),
- close: TokenId::DUMMY,
+ open: SpanData {
+ range: TextRange::at(TextSize::new(12), TextSize::of('{')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ close: SpanData {
+ range: TextRange::at(TextSize::new(13), TextSize::of('}')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
kind: DelimiterKind::Brace,
},
token_trees: vec![],
@@ -162,20 +225,26 @@ mod tests {
#[test]
fn test_proc_macro_rpc_works() {
let tt = fixture_token_tree();
+ let mut span_data_table = Default::default();
let task = ExpandMacro {
- macro_body: FlatTree::new(&tt, CURRENT_API_VERSION),
+ macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
lib: std::env::current_dir().unwrap(),
env: Default::default(),
current_dir: Default::default(),
+ has_global_spans: ExpnGlobals {
+ serialize: true,
+ def_site: 0,
+ call_site: 0,
+ mixed_site: 0,
+ },
};
let json = serde_json::to_string(&task).unwrap();
// println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
- assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION));
+ assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
}
}
-*/
diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs
index f29aac5295..baf8bbad4b 100644
--- a/crates/proc-macro-api/src/msg/flat.rs
+++ b/crates/proc-macro-api/src/msg/flat.rs
@@ -37,40 +37,40 @@
use std::collections::{HashMap, VecDeque};
+use base_db::span::SpanData;
+use indexmap::IndexSet;
use serde::{Deserialize, Serialize};
-use text_size::TextRange;
-use tt::{Span, SyntaxContext};
-use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS};
+use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
-pub trait SerializableSpan<const L: usize>: Span {
- fn into_u32(self) -> [u32; L];
- fn from_u32(input: [u32; L]) -> Self;
+pub type SpanDataIndexMap = IndexSet<SpanData>;
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(pub u32);
+
+impl std::fmt::Debug for TokenId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl TokenId {
+ pub const DEF_SITE: Self = TokenId(0);
+ pub const CALL_SITE: Self = TokenId(0);
+ pub const MIXED_SITE: Self = TokenId(0);
}
-// impl SerializableSpan<1> for tt::TokenId {
-// fn into_u32(self) -> [u32; 1] {
-// [self.0]
-// }
-// fn from_u32([input]: [u32; 1]) -> Self {
-// tt::TokenId(input)
-// }
-// }
-
-impl<Anchor, Ctx> SerializableSpan<3> for tt::SpanData<Anchor, Ctx>
-where
- Anchor: From<u32> + Into<u32>,
- Self: Span,
- Ctx: SyntaxContext,
-{
- fn into_u32(self) -> [u32; 3] {
- [self.anchor.into(), self.range.start().into(), self.range.end().into()]
+
+impl tt::Span for TokenId {
+ const DUMMY: Self = TokenId(!0);
+
+ type Anchor = ();
+
+ fn anchor(self) -> Self::Anchor {
+ ()
}
- fn from_u32([file_id, start, end]: [u32; 3]) -> Self {
- tt::SpanData {
- anchor: file_id.into(),
- range: TextRange::new(start.into(), end.into()),
- ctx: Ctx::DUMMY,
- }
+
+ fn mk(_: Self::Anchor, _: text_size::TextRange) -> Self {
+ Self::DUMMY
}
}
@@ -82,82 +82,41 @@ pub struct FlatTree {
ident: Vec<u32>,
token_tree: Vec<u32>,
text: Vec<String>,
- #[serde(skip_serializing_if = "SpanMap::do_serialize")]
- #[serde(default)]
- span_map: SpanMap,
-}
-
-#[derive(Serialize, Deserialize, Debug)]
-struct SpanMap {
- #[serde(skip_serializing)]
- serialize: bool,
- span_size: u32,
- spans: Vec<u32>,
-}
-
-impl Default for SpanMap {
- fn default() -> Self {
- Self { serialize: false, span_size: 1, spans: Default::default() }
- }
-}
-
-impl SpanMap {
- fn serialize_span<const L: usize, S: SerializableSpan<L>>(&mut self, span: S) -> u32 {
- let u32s = span.into_u32();
- if L == 1 {
- u32s[0]
- } else {
- let offset = self.spans.len() as u32;
- self.spans.extend(u32s);
- offset
- }
- }
- fn deserialize_span<const L: usize, S: SerializableSpan<L>>(&self, offset: u32) -> S {
- S::from_u32(if L == 1 {
- [offset].as_ref().try_into().unwrap()
- } else {
- self.spans[offset as usize..][..L].try_into().unwrap()
- })
- }
}
-impl SpanMap {
- fn do_serialize(&self) -> bool {
- self.serialize
- }
-}
-
-struct SubtreeRepr<const L: usize, S> {
- open: S,
- close: S,
+struct SubtreeRepr {
+ open: TokenId,
+ close: TokenId,
kind: tt::DelimiterKind,
tt: [u32; 2],
}
-struct LiteralRepr<const L: usize, S> {
- id: S,
+struct LiteralRepr {
+ id: TokenId,
text: u32,
}
-struct PunctRepr<const L: usize, S> {
- id: S,
+struct PunctRepr {
+ id: TokenId,
char: char,
spacing: tt::Spacing,
}
-struct IdentRepr<const L: usize, S> {
- id: S,
+struct IdentRepr {
+ id: TokenId,
text: u32,
}
impl FlatTree {
- pub fn new<const L: usize, S: SerializableSpan<L>>(
- subtree: &tt::Subtree<S>,
+ pub fn new(
+ subtree: &tt::Subtree<SpanData>,
version: u32,
+ span_data_table: &mut SpanDataIndexMap,
) -> FlatTree {
let mut w = Writer {
string_table: HashMap::new(),
work: VecDeque::new(),
+ span_data_table,
subtree: Vec::new(),
literal: Vec::new(),
@@ -167,78 +126,111 @@ impl FlatTree {
text: Vec::new(),
};
w.write(subtree);
- assert!(L == 1 || version >= VARIABLE_SIZED_SPANS);
- let mut span_map = SpanMap {
- serialize: version >= VARIABLE_SIZED_SPANS && L != 1,
- span_size: L as u32,
- spans: Vec::new(),
- };
- return FlatTree {
+
+ FlatTree {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
- write_vec(&mut span_map, w.subtree, SubtreeRepr::write_with_close_span)
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
} else {
- write_vec(&mut span_map, w.subtree, SubtreeRepr::write)
+ write_vec(w.subtree, SubtreeRepr::write)
},
- literal: write_vec(&mut span_map, w.literal, LiteralRepr::write),
- punct: write_vec(&mut span_map, w.punct, PunctRepr::write),
- ident: write_vec(&mut span_map, w.ident, IdentRepr::write),
+ literal: write_vec(w.literal, LiteralRepr::write),
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: write_vec(w.ident, IdentRepr::write),
token_tree: w.token_tree,
text: w.text,
- span_map,
+ }
+ }
+
+ pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
+ let mut w = Writer {
+ string_table: HashMap::new(),
+ work: VecDeque::new(),
+ span_data_table: &mut (),
+
+ subtree: Vec::new(),
+ literal: Vec::new(),
+ punct: Vec::new(),
+ ident: Vec::new(),
+ token_tree: Vec::new(),
+ text: Vec::new(),
};
+ w.write(subtree);
- fn write_vec<T, F: Fn(T, &mut SpanMap) -> [u32; N], const N: usize>(
- map: &mut SpanMap,
- xs: Vec<T>,
- f: F,
- ) -> Vec<u32> {
- xs.into_iter().flat_map(|it| f(it, map)).collect()
+ FlatTree {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
+ } else {
+ write_vec(w.subtree, SubtreeRepr::write)
+ },
+ literal: write_vec(w.literal, LiteralRepr::write),
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: write_vec(w.ident, IdentRepr::write),
+ token_tree: w.token_tree,
+ text: w.text,
}
}
- pub fn to_subtree<const L: usize, S: SerializableSpan<L>>(
+ pub fn to_subtree_resolved(
self,
version: u32,
- ) -> tt::Subtree<S> {
- assert!((version >= VARIABLE_SIZED_SPANS || L == 1) && L as u32 == self.span_map.span_size);
- return Reader {
+ span_data_table: &SpanDataIndexMap,
+ ) -> tt::Subtree<SpanData> {
+ Reader {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
- read_vec(&self.span_map, self.subtree, SubtreeRepr::read_with_close_span)
+ read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
- read_vec(&self.span_map, self.subtree, SubtreeRepr::read)
+ read_vec(self.subtree, SubtreeRepr::read)
},
- literal: read_vec(&self.span_map, self.literal, LiteralRepr::read),
- punct: read_vec(&self.span_map, self.punct, PunctRepr::read),
- ident: read_vec(&self.span_map, self.ident, IdentRepr::read),
+ literal: read_vec(self.literal, LiteralRepr::read),
+ punct: read_vec(self.punct, PunctRepr::read),
+ ident: read_vec(self.ident, IdentRepr::read),
token_tree: self.token_tree,
text: self.text,
+ span_data_table,
}
- .read();
-
- fn read_vec<T, F: Fn([u32; N], &SpanMap) -> T, const N: usize>(
- map: &SpanMap,
- xs: Vec<u32>,
- f: F,
- ) -> Vec<T> {
- let mut chunks = xs.chunks_exact(N);
- let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap(), map)).collect();
- assert!(chunks.remainder().is_empty());
- res
+ .read()
+ }
+
+ pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
+ Reader {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ read_vec(self.subtree, SubtreeRepr::read_with_close_span)
+ } else {
+ read_vec(self.subtree, SubtreeRepr::read)
+ },
+ literal: read_vec(self.literal, LiteralRepr::read),
+ punct: read_vec(self.punct, PunctRepr::read),
+ ident: read_vec(self.ident, IdentRepr::read),
+ token_tree: self.token_tree,
+ text: self.text,
+ span_data_table: &(),
}
+ .read()
}
}
-impl<const L: usize, S: SerializableSpan<L>> SubtreeRepr<L, S> {
- fn write(self, map: &mut SpanMap) -> [u32; 4] {
+fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
+ let mut chunks = xs.chunks_exact(N);
+ let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
+ assert!(chunks.remainder().is_empty());
+ res
+}
+
+fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
+ xs.into_iter().flat_map(f).collect()
+}
+
+impl SubtreeRepr {
+ fn write(self) -> [u32; 4] {
let kind = match self.kind {
tt::DelimiterKind::Invisible => 0,
tt::DelimiterKind::Parenthesis => 1,
tt::DelimiterKind::Brace => 2,
tt::DelimiterKind::Bracket => 3,
};
- [map.serialize_span(self.open), kind, self.tt[0], self.tt[1]]
+ [self.open.0, kind, self.tt[0], self.tt[1]]
}
- fn read([open, kind, lo, len]: [u32; 4], map: &SpanMap) -> Self {
+ fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
let kind = match kind {
0 => tt::DelimiterKind::Invisible,
1 => tt::DelimiterKind::Parenthesis,
@@ -246,24 +238,18 @@ impl<const L: usize, S: SerializableSpan<L>> SubtreeRepr<L, S> {
3 => tt::DelimiterKind::Bracket,
other => panic!("bad kind {other}"),
};
- SubtreeRepr { open: map.deserialize_span(open), close: S::DUMMY, kind, tt: [lo, len] }
+ SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] }
}
- fn write_with_close_span(self, map: &mut SpanMap) -> [u32; 5] {
+ fn write_with_close_span(self) -> [u32; 5] {
let kind = match self.kind {
tt::DelimiterKind::Invisible => 0,
tt::DelimiterKind::Parenthesis => 1,
tt::DelimiterKind::Brace => 2,
tt::DelimiterKind::Bracket => 3,
};
- [
- map.serialize_span(self.open),
- map.serialize_span(self.close),
- kind,
- self.tt[0],
- self.tt[1],
- ]
+ [self.open.0, self.close.0, kind, self.tt[0], self.tt[1]]
}
- fn read_with_close_span([open, close, kind, lo, len]: [u32; 5], map: &SpanMap) -> Self {
+ fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr {
let kind = match kind {
0 => tt::DelimiterKind::Invisible,
1 => tt::DelimiterKind::Parenthesis,
@@ -271,64 +257,86 @@ impl<const L: usize, S: SerializableSpan<L>> SubtreeRepr<L, S> {
3 => tt::DelimiterKind::Bracket,
other => panic!("bad kind {other}"),
};
- SubtreeRepr {
- open: map.deserialize_span(open),
- close: map.deserialize_span(close),
- kind,
- tt: [lo, len],
- }
+ SubtreeRepr { open: TokenId(open), close: TokenId(close), kind, tt: [lo, len] }
}
}
-impl<const L: usize, S: SerializableSpan<L>> LiteralRepr<L, S> {
- fn write(self, map: &mut SpanMap) -> [u32; 2] {
- [map.serialize_span(self.id), self.text]
+impl LiteralRepr {
+ fn write(self) -> [u32; 2] {
+ [self.id.0, self.text]
}
- fn read([id, text]: [u32; 2], map: &SpanMap) -> Self {
- LiteralRepr { id: map.deserialize_span(id), text }
+ fn read([id, text]: [u32; 2]) -> LiteralRepr {
+ LiteralRepr { id: TokenId(id), text }
}
}
-impl<const L: usize, S: SerializableSpan<L>> PunctRepr<L, S> {
- fn write(self, map: &mut SpanMap) -> [u32; 3] {
+impl PunctRepr {
+ fn write(self) -> [u32; 3] {
let spacing = match self.spacing {
tt::Spacing::Alone => 0,
tt::Spacing::Joint => 1,
};
- [map.serialize_span(self.id), self.char as u32, spacing]
+ [self.id.0, self.char as u32, spacing]
}
- fn read([id, char, spacing]: [u32; 3], map: &SpanMap) -> Self {
+ fn read([id, char, spacing]: [u32; 3]) -> PunctRepr {
let spacing = match spacing {
0 => tt::Spacing::Alone,
1 => tt::Spacing::Joint,
other => panic!("bad spacing {other}"),
};
- PunctRepr { id: map.deserialize_span(id), char: char.try_into().unwrap(), spacing }
+ PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing }
}
}
-impl<const L: usize, S: SerializableSpan<L>> IdentRepr<L, S> {
- fn write(self, map: &mut SpanMap) -> [u32; 2] {
- [map.serialize_span(self.id), self.text]
+impl IdentRepr {
+ fn write(self) -> [u32; 2] {
+ [self.id.0, self.text]
}
- fn read(data: [u32; 2], map: &SpanMap) -> Self {
- IdentRepr { id: map.deserialize_span(data[0]), text: data[1] }
+ fn read(data: [u32; 2]) -> IdentRepr {
+ IdentRepr { id: TokenId(data[0]), text: data[1] }
}
}
-struct Writer<'a, const L: usize, S> {
+trait Span: Copy {
+ type Table;
+ fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId;
+ fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self;
+}
+
+impl Span for TokenId {
+ type Table = ();
+ fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId {
+ token_id
+ }
+
+ fn span_for_token_id((): &Self::Table, id: TokenId) -> Self {
+ id
+ }
+}
+impl Span for SpanData {
+ type Table = IndexSet<SpanData>;
+ fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId {
+ TokenId(table.insert_full(span).0 as u32)
+ }
+ fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self {
+ *table.get_index(id.0 as usize).unwrap_or_else(|| &table[0])
+ }
+}
+
+struct Writer<'a, 'span, S: Span> {
work: VecDeque<(usize, &'a tt::Subtree<S>)>,
string_table: HashMap<&'a str, u32>,
+ span_data_table: &'span mut S::Table,
- subtree: Vec<SubtreeRepr<L, S>>,
- literal: Vec<LiteralRepr<L, S>>,
- punct: Vec<PunctRepr<L, S>>,
- ident: Vec<IdentRepr<L, S>>,
+ subtree: Vec<SubtreeRepr>,
+ literal: Vec<LiteralRepr>,
+ punct: Vec<PunctRepr>,
+ ident: Vec<IdentRepr>,
token_tree: Vec<u32>,
text: Vec<String>,
}
-impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
+impl<'a, 'span, S: Span> Writer<'a, 'span, S> {
fn write(&mut self, root: &'a tt::Subtree<S>) {
self.enqueue(root);
while let Some((idx, subtree)) = self.work.pop_front() {
@@ -336,6 +344,10 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
}
}
+ fn token_id_of(&mut self, span: S) -> TokenId {
+ S::token_id_of(self.span_data_table, span)
+ }
+
fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree<S>) {
let mut first_tt = self.token_tree.len();
let n_tt = subtree.token_trees.len();
@@ -353,22 +365,21 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
tt::Leaf::Literal(lit) => {
let idx = self.literal.len() as u32;
let text = self.intern(&lit.text);
- self.literal.push(LiteralRepr { id: lit.span, text });
+ let id = self.token_id_of(lit.span);
+ self.literal.push(LiteralRepr { id, text });
idx << 2 | 0b01
}
tt::Leaf::Punct(punct) => {
let idx = self.punct.len() as u32;
- self.punct.push(PunctRepr {
- char: punct.char,
- spacing: punct.spacing,
- id: punct.span,
- });
+ let id = self.token_id_of(punct.span);
+ self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, id });
idx << 2 | 0b10
}
tt::Leaf::Ident(ident) => {
let idx = self.ident.len() as u32;
let text = self.intern(&ident.text);
- self.ident.push(IdentRepr { id: ident.span, text });
+ let id = self.token_id_of(ident.span);
+ self.ident.push(IdentRepr { id, text });
idx << 2 | 0b11
}
},
@@ -380,8 +391,8 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
fn enqueue(&mut self, subtree: &'a tt::Subtree<S>) -> u32 {
let idx = self.subtree.len();
- let open = subtree.delimiter.open;
- let close = subtree.delimiter.close;
+ let open = self.token_id_of(subtree.delimiter.open);
+ let close = self.token_id_of(subtree.delimiter.close);
let delimiter_kind = subtree.delimiter.kind;
self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
self.work.push_back((idx, subtree));
@@ -398,23 +409,29 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
}
}
-struct Reader<const L: usize, S> {
- subtree: Vec<SubtreeRepr<L, S>>,
- literal: Vec<LiteralRepr<L, S>>,
- punct: Vec<PunctRepr<L, S>>,
- ident: Vec<IdentRepr<L, S>>,
+struct Reader<'span, S: Span> {
+ subtree: Vec<SubtreeRepr>,
+ literal: Vec<LiteralRepr>,
+ punct: Vec<PunctRepr>,
+ ident: Vec<IdentRepr>,
token_tree: Vec<u32>,
text: Vec<String>,
+ span_data_table: &'span S::Table,
}
-impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
+impl<'span, S: Span> Reader<'span, S> {
pub(crate) fn read(self) -> tt::Subtree<S> {
let mut res: Vec<Option<tt::Subtree<S>>> = vec![None; self.subtree.len()];
+ let read_span = |id| S::span_for_token_id(self.span_data_table, id);
for i in (0..self.subtree.len()).rev() {
let repr = &self.subtree[i];
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
let s = tt::Subtree {
- delimiter: tt::Delimiter { open: repr.open, close: repr.close, kind: repr.kind },
+ delimiter: tt::Delimiter {
+ open: read_span(repr.open),
+ close: read_span(repr.close),
+ kind: repr.kind,
+ },
token_trees: token_trees
.iter()
.copied()
@@ -429,7 +446,7 @@ impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
let repr = &self.literal[idx];
tt::Leaf::Literal(tt::Literal {
text: self.text[repr.text as usize].as_str().into(),
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
@@ -438,7 +455,7 @@ impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
tt::Leaf::Punct(tt::Punct {
char: repr.char,
spacing: repr.spacing,
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
@@ -446,7 +463,7 @@ impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
let repr = &self.ident[idx];
tt::Leaf::Ident(tt::Ident {
text: self.text[repr.text as usize].as_str().into(),
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs
index ea65c33604..50ce586fc4 100644
--- a/crates/proc-macro-srv-cli/src/main.rs
+++ b/crates/proc-macro-srv-cli/src/main.rs
@@ -18,14 +18,12 @@ fn main() -> std::io::Result<()> {
run()
}
-#[cfg(not(FALSE))]
-#[cfg(not(feature = "sysroot-abi"))]
+#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
fn run() -> io::Result<()> {
panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled");
}
-#[cfg(FALSE)]
-#[cfg(feature = "sysroot-abi")]
+#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
fn run() -> io::Result<()> {
use proc_macro_api::msg::{self, Message};
diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs
index dd05e250c2..80bce3af1a 100644
--- a/crates/proc-macro-srv/src/dylib.rs
+++ b/crates/proc-macro-srv/src/dylib.rs
@@ -11,7 +11,7 @@ use libloading::Library;
use memmap2::Mmap;
use object::Object;
use paths::AbsPath;
-use proc_macro_api::{read_dylib_info, ProcMacroKind};
+use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind};
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
@@ -152,8 +152,14 @@ impl Expander {
macro_name: &str,
macro_body: &crate::tt::Subtree,
attributes: Option<&crate::tt::Subtree>,
+ def_site: TokenId,
+ call_site: TokenId,
+ mixed_site: TokenId,
) -> Result<crate::tt::Subtree, String> {
- let result = self.inner.proc_macros.expand(macro_name, macro_body, attributes);
+ let result = self
+ .inner
+ .proc_macros
+ .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site);
result.map_err(|e| e.as_str().unwrap_or_else(|| "<unknown error>".to_string()))
}
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index bd0d1b79fa..32a07a8477 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -10,7 +10,6 @@
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
-#![cfg(FALSE)] // TODO
#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
@@ -32,12 +31,25 @@ use std::{
time::SystemTime,
};
+use ::tt::Span;
use proc_macro_api::{
- msg::{self, CURRENT_API_VERSION},
+ msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION, HAS_GLOBAL_SPANS},
ProcMacroKind,
};
-use ::tt::token_id as tt;
+mod tt {
+ pub use proc_macro_api::msg::TokenId;
+
+ pub use ::tt::*;
+
+ pub type Subtree = ::tt::Subtree<TokenId>;
+ pub type TokenTree = ::tt::TokenTree<TokenId>;
+ pub type Delimiter = ::tt::Delimiter<TokenId>;
+ pub type Leaf = ::tt::Leaf<TokenId>;
+ pub type Literal = ::tt::Literal<TokenId>;
+ pub type Punct = ::tt::Punct<TokenId>;
+ pub type Ident = ::tt::Ident<TokenId>;
+}
// see `build.rs`
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
@@ -71,16 +83,28 @@ impl ProcMacroSrv {
None => None,
};
- let macro_body = task.macro_body.to_subtree(CURRENT_API_VERSION);
- let attributes = task.attributes.map(|it| it.to_subtree(CURRENT_API_VERSION));
+ let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans;
+ let def_site = TokenId(def_site as u32);
+ let call_site = TokenId(call_site as u32);
+ let mixed_site = TokenId(mixed_site as u32);
+
+ let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION);
+ let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION));
let result = thread::scope(|s| {
let thread = thread::Builder::new()
.stack_size(EXPANDER_STACK_SIZE)
.name(task.macro_name.clone())
.spawn_scoped(s, || {
expander
- .expand(&task.macro_name, &macro_body, attributes.as_ref())
- .map(|it| msg::FlatTree::new(&it, CURRENT_API_VERSION))
+ .expand(
+ &task.macro_name,
+ &macro_body,
+ attributes.as_ref(),
+ def_site,
+ call_site,
+ mixed_site,
+ )
+ .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION))
});
let res = match thread {
Ok(handle) => handle.join(),
diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs
index 3c6f320331..4f87fa281b 100644
--- a/crates/proc-macro-srv/src/proc_macros.rs
+++ b/crates/proc-macro-srv/src/proc_macros.rs
@@ -1,7 +1,7 @@
//! Proc macro ABI
use libloading::Library;
-use proc_macro_api::{ProcMacroKind, RustCInfo};
+use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo};
use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt};
@@ -45,6 +45,9 @@ impl ProcMacros {
macro_name: &str,
macro_body: &tt::Subtree,
attributes: Option<&tt::Subtree>,
+ def_site: TokenId,
+ call_site: TokenId,
+ mixed_site: TokenId,
) -> Result<tt::Subtree, crate::PanicMessage> {
let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone());
@@ -59,34 +62,56 @@ impl ProcMacros {
} if *trait_name == macro_name => {
let res = client.run(
&proc_macro::bridge::server::SameThread,
- crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
+ crate::server::RustAnalyzer {
+ interner: &SYMBOL_INTERNER,
+ call_site,
+ def_site,
+ mixed_site,
+ },
parsed_body,
true,
);
- return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
+ return res
+ .map(|it| it.into_subtree(call_site))
+ .map_err(crate::PanicMessage::from);
}
proc_macro::bridge::client::ProcMacro::Bang { name, client }
if *name == macro_name =>
{
let res = client.run(
&proc_macro::bridge::server::SameThread,
- crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
+ crate::server::RustAnalyzer {
+ interner: &SYMBOL_INTERNER,
+ call_site,
+ def_site,
+ mixed_site,
+ },
parsed_body,
true,
);
- return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
+ return res
+ .map(|it| it.into_subtree(call_site))
+ .map_err(crate::PanicMessage::from);
}
proc_macro::bridge::client::ProcMacro::Attr { name, client }
if *name == macro_name =>
{
let res = client.run(
&proc_macro::bridge::server::SameThread,
- crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
+ crate::server::RustAnalyzer {
+ interner: &SYMBOL_INTERNER,
+
+ call_site,
+ def_site,
+ mixed_site,
+ },
parsed_attributes,
parsed_body,
true,
);
- return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
+ return res
+ .map(|it| it.into_subtree(call_site))
+ .map_err(crate::PanicMessage::from);
}
_ => continue,
}
diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs
index fe18451d38..fc080eccc0 100644
--- a/crates/proc-macro-srv/src/server.rs
+++ b/crates/proc-macro-srv/src/server.rs
@@ -11,6 +11,7 @@
use proc_macro::bridge::{self, server};
mod token_stream;
+use proc_macro_api::msg::TokenId;
pub use token_stream::TokenStream;
use token_stream::TokenStreamBuilder;
@@ -43,6 +44,9 @@ pub struct FreeFunctions;
pub struct RustAnalyzer {
// FIXME: store span information here.
pub(crate) interner: SymbolInternerRef,
+ pub call_site: TokenId,
+ pub def_site: TokenId,
+ pub mixed_site: TokenId,
}
impl server::Types for RustAnalyzer {
@@ -69,7 +73,7 @@ impl server::FreeFunctions for RustAnalyzer {
kind: bridge::LitKind::Err,
symbol: Symbol::intern(self.interner, s),
suffix: None,
- span: tt::TokenId::unspecified(),
+ span: self.call_site,
})
}
@@ -83,7 +87,7 @@ impl server::TokenStream for RustAnalyzer {
stream.is_empty()
}
fn from_str(&mut self, src: &str) -> Self::TokenStream {
- src.parse().expect("cannot parse string")
+ Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string")
}
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
@@ -280,7 +284,7 @@ impl server::Span for RustAnalyzer {
}
fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
// FIXME stub
- tt::TokenId::unspecified()
+ self.call_site
}
/// Recent feature, not yet in the proc_macro
///
@@ -317,15 +321,15 @@ impl server::Span for RustAnalyzer {
}
fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
// FIXME handle span
- tt::TokenId::unspecified()
+ self.call_site
}
fn end(&mut self, _self_: Self::Span) -> Self::Span {
- tt::TokenId::unspecified()
+ self.call_site
}
fn start(&mut self, _self_: Self::Span) -> Self::Span {
- tt::TokenId::unspecified()
+ self.call_site
}
fn line(&mut self, _span: Self::Span) -> usize {
@@ -349,9 +353,9 @@ impl server::Symbol for RustAnalyzer {
impl server::Server for RustAnalyzer {
fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
bridge::ExpnGlobals {
- def_site: Span::unspecified(),
- call_site: Span::unspecified(),
- mixed_site: Span::unspecified(),
+ def_site: self.def_site,
+ call_site: self.call_site,
+ mixed_site: self.mixed_site,
}
}
@@ -422,6 +426,8 @@ impl LiteralFormatter {
#[cfg(test)]
mod tests {
+ use ::tt::Span;
+
use super::*;
#[test]
@@ -430,16 +436,16 @@ mod tests {
token_trees: vec![
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "struct".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId::DUMMY,
})),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "T".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId::DUMMY,
})),
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
+ open: tt::TokenId::DUMMY,
+ close: tt::TokenId::DUMMY,
kind: tt::DelimiterKind::Brace,
},
token_trees: vec![],
@@ -452,33 +458,32 @@ mod tests {
#[test]
fn test_ra_server_from_str() {
- use std::str::FromStr;
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
+ open: tt::TokenId::DUMMY,
+ close: tt::TokenId::DUMMY,
kind: tt::DelimiterKind::Parenthesis,
},
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "a".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId::DUMMY,
}))],
});
- let t1 = TokenStream::from_str("(a)").unwrap();
+ let t1 = TokenStream::from_str("(a)", tt::TokenId::DUMMY).unwrap();
assert_eq!(t1.token_trees.len(), 1);
assert_eq!(t1.token_trees[0], subtree_paren_a);
- let t2 = TokenStream::from_str("(a);").unwrap();
+ let t2 = TokenStream::from_str("(a);", tt::TokenId::DUMMY).unwrap();
assert_eq!(t2.token_trees.len(), 2);
assert_eq!(t2.token_trees[0], subtree_paren_a);
- let underscore = TokenStream::from_str("_").unwrap();
+ let underscore = TokenStream::from_str("_", tt::TokenId::DUMMY).unwrap();
assert_eq!(
underscore.token_trees[0],
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "_".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId::DUMMY,
}))
);
}
diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs
index 2589d8b64d..36be882503 100644
--- a/crates/proc-macro-srv/src/server/token_stream.rs
+++ b/crates/proc-macro-srv/src/server/token_stream.rs
@@ -1,5 +1,7 @@
//! TokenStream implementation used by sysroot ABI
+use proc_macro_api::msg::TokenId;
+
use crate::tt::{self, TokenTree};
#[derive(Debug, Default, Clone)]
@@ -20,8 +22,15 @@ impl TokenStream {
}
}
- pub(crate) fn into_subtree(self) -> tt::Subtree {
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees }
+ pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: tt::Delimiter {
+ open: call_site,
+ close: call_site,
+ kind: tt::DelimiterKind::Invisible,
+ },
+ token_trees: self.token_trees,
+ }
}
pub(super) fn is_empty(&self) -> bool {
@@ -84,7 +93,7 @@ pub(super) struct TokenStreamBuilder {
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
pub(super) mod token_stream {
- use std::str::FromStr;
+ use proc_macro_api::msg::TokenId;
use super::{tt, TokenStream, TokenTree};
@@ -109,14 +118,15 @@ pub(super) mod token_stream {
///
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later.
- impl FromStr for TokenStream {
- type Err = LexError;
+ #[rustfmt::skip]
+ impl /*FromStr for*/ TokenStream {
+ // type Err = LexError;
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- let (subtree, _token_map) =
- mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+ pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result<TokenStream, LexError> {
+ let subtree =
+ mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?;
- let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site);
Ok(TokenStream::with_subtree(subtree))
}
}
@@ -127,43 +137,39 @@ pub(super) mod token_stream {
}
}
- fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ fn subtree_replace_token_ids_with_call_site(
+ subtree: tt::Subtree,
+ call_site: TokenId,
+ ) -> tt::Subtree {
tt::Subtree {
- delimiter: tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
- ..subtree.delimiter
- },
+ delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter },
token_trees: subtree
.token_trees
.into_iter()
- .map(token_tree_replace_token_ids_with_unspecified)
+ .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site))
.collect(),
}
}
- fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ fn token_tree_replace_token_ids_with_call_site(
+ tt: tt::TokenTree,
+ call_site: TokenId,
+ ) -> tt::TokenTree {
match tt {
tt::TokenTree::Leaf(leaf) => {
- tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site))
}
tt::TokenTree::Subtree(subtree) => {
- tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site))
}
}
}
- fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf {
match leaf {
- tt::Leaf::Literal(lit) => {
- tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit })
- }
- tt::Leaf::Punct(punct) => {
- tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct })
- }
- tt::Leaf::Ident(ident) => {
- tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident })
- }
+ tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }),
+ tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }),
+ tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }),
}
}
}
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index 49b4d973b6..ccfefafb2c 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -1,18 +1,19 @@
//! utils used in proc-macro tests
use expect_test::Expect;
-use std::str::FromStr;
+use proc_macro_api::msg::TokenId;
+use tt::Span;
use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv};
-fn parse_string(code: &str) -> Option<crate::server::TokenStream> {
+fn parse_string(code: &str, call_site: TokenId) -> Option<crate::server::TokenStream> {
// This is a bit strange. We need to parse a string into a token stream into
// order to create a tt::SubTree from it in fixtures. `into_subtree` is
// implemented by all the ABIs we have so we arbitrarily choose one ABI to
// write a `parse_string` function for and use that. The tests don't really
// care which ABI we're using as the `into_subtree` function isn't part of
// the ABI and shouldn't change between ABI versions.
- crate::server::TokenStream::from_str(code).ok()
+ crate::server::TokenStream::from_str(code, call_site).ok()
}
pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) {
@@ -24,12 +25,22 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e
}
fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
+ let call_site = TokenId::DUMMY;
let path = proc_macro_test_dylib_path();
let expander = dylib::Expander::new(&path).unwrap();
- let fixture = parse_string(input).unwrap();
- let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
-
- let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap();
+ let fixture = parse_string(input, call_site).unwrap();
+ let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site));
+
+ let res = expander
+ .expand(
+ macro_name,
+ &fixture.into_subtree(call_site),
+ attr.as_ref(),
+ TokenId::DUMMY,
+ TokenId::DUMMY,
+ TokenId::DUMMY,
+ )
+ .unwrap();
expect.assert_eq(&format!("{res:?}"));
}
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index a29e18811d..a865d9e4ab 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -56,7 +56,7 @@ fn integrated_highlighting_benchmark() {
analysis.highlight_as_html(file_id, false).unwrap();
}
- profile::init_from("*>1");
+ profile::init_from("*>100");
{
let _it = stdx::timeit("change");