Unnamed repository; edit this file 'description' to name the repository.
Merge pull request #21097 from Veykril/push-zpqupukpkrts
proc-macro-srv: Reimplement token trees via immutable trees
Lukas Wirth 5 months ago
parent 55ee7ec · parent 9b767f3 · commit cf4b1fa
-rw-r--r--Cargo.lock16
-rw-r--r--crates/hir-def/Cargo.toml1
-rw-r--r--crates/hir-ty/Cargo.toml2
-rw-r--r--crates/hir/Cargo.toml1
-rw-r--r--crates/ide-db/Cargo.toml1
-rw-r--r--crates/proc-macro-api/Cargo.toml4
-rw-r--r--crates/proc-macro-api/src/legacy_protocol.rs5
-rw-r--r--crates/proc-macro-api/src/legacy_protocol/msg.rs2
-rw-r--r--crates/proc-macro-api/src/legacy_protocol/msg/flat.rs375
-rw-r--r--crates/proc-macro-api/src/lib.rs7
-rw-r--r--crates/proc-macro-srv-cli/Cargo.toml2
-rw-r--r--crates/proc-macro-srv-cli/build.rs1
-rw-r--r--crates/proc-macro-srv-cli/src/main.rs6
-rw-r--r--crates/proc-macro-srv-cli/src/main_loop.rs23
-rw-r--r--crates/proc-macro-srv/Cargo.toml5
-rw-r--r--crates/proc-macro-srv/build.rs2
-rw-r--r--crates/proc-macro-srv/src/bridge.rs12
-rw-r--r--crates/proc-macro-srv/src/dylib.rs10
-rw-r--r--crates/proc-macro-srv/src/dylib/proc_macros.rs33
-rw-r--r--crates/proc-macro-srv/src/dylib/version.rs5
-rw-r--r--crates/proc-macro-srv/src/lib.rs41
-rw-r--r--crates/proc-macro-srv/src/server_impl.rs210
-rw-r--r--crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs164
-rw-r--r--crates/proc-macro-srv/src/server_impl/token_id.rs134
-rw-r--r--crates/proc-macro-srv/src/server_impl/token_stream.rs170
-rw-r--r--crates/proc-macro-srv/src/tests/mod.rs738
-rw-r--r--crates/proc-macro-srv/src/tests/utils.rs53
-rw-r--r--crates/proc-macro-srv/src/token_stream.rs745
-rw-r--r--crates/rust-analyzer/Cargo.toml2
-rw-r--r--crates/rust-analyzer/build.rs1
-rw-r--r--crates/tt/src/lib.rs52
-rw-r--r--xtask/src/install.rs13
-rw-r--r--xtask/src/tidy.rs1
33 files changed, 1647 insertions, 1190 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 891deadeeb..4de8d09dca 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -772,7 +772,6 @@ dependencies = [
"hir-def",
"hir-expand",
"hir-ty",
- "indexmap",
"intern",
"itertools 0.14.0",
"ra-ap-rustc_type_ir",
@@ -824,7 +823,6 @@ dependencies = [
"syntax-bridge",
"test-fixture",
"test-utils",
- "text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"thin-vec",
"tracing",
"triomphe",
@@ -864,7 +862,6 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
- "bitflags 2.9.4",
"cov-mark",
"either",
"ena",
@@ -890,7 +887,6 @@ dependencies = [
"rustc_apfloat",
"salsa",
"salsa-macros",
- "scoped-tls",
"smallvec",
"span",
"stdx",
@@ -1085,7 +1081,6 @@ dependencies = [
"expect-test",
"fst",
"hir",
- "indexmap",
"itertools 0.14.0",
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"macros",
@@ -1825,6 +1820,7 @@ dependencies = [
"indexmap",
"intern",
"paths",
+ "proc-macro-srv",
"rustc-hash 2.1.1",
"serde",
"serde_derive",
@@ -1849,9 +1845,7 @@ dependencies = [
"proc-macro-test",
"ra-ap-rustc_lexer",
"span",
- "syntax-bridge",
"temp-dir",
- "tt",
]
[[package]]
@@ -2291,14 +2285,12 @@ dependencies = [
"ide-db",
"ide-ssr",
"indexmap",
- "intern",
"itertools 0.14.0",
"load-cargo",
"lsp-server 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types",
"memchr",
"mimalloc",
- "nohash-hasher",
"num_cpus",
"oorandom",
"parking_lot",
@@ -2492,12 +2484,6 @@ dependencies = [
]
[[package]]
-name = "scoped-tls"
-version = "1.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
-
-[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index 4add777b6b..e174ca5a3b 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -27,7 +27,6 @@ tracing = { workspace = true, features = ["attributes"] }
smallvec.workspace = true
triomphe.workspace = true
rustc_apfloat = "0.2.3"
-text-size.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
query-group.workspace = true
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index 902fcd9c2f..8adf95bd04 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -16,14 +16,12 @@ doctest = false
cov-mark = "2.0.0"
itertools.workspace = true
arrayvec.workspace = true
-bitflags.workspace = true
smallvec.workspace = true
ena = "0.14.3"
either.workspace = true
oorandom = "11.1.5"
tracing = { workspace = true, features = ["attributes"] }
rustc-hash.workspace = true
-scoped-tls = "1.0.1"
la-arena.workspace = true
triomphe.workspace = true
typed-arena = "2.0.2"
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index a83997522d..24b2bd9150 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -20,7 +20,6 @@ itertools.workspace = true
smallvec.workspace = true
tracing = { workspace = true, features = ["attributes"] }
triomphe.workspace = true
-indexmap.workspace = true
ra-ap-rustc_type_ir.workspace = true
diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml
index ac28e323c6..f1f9d85cf9 100644
--- a/crates/ide-db/Cargo.toml
+++ b/crates/ide-db/Cargo.toml
@@ -22,7 +22,6 @@ rustc-hash.workspace = true
either.workspace = true
itertools.workspace = true
arrayvec.workspace = true
-indexmap.workspace = true
memchr = "2.7.5"
salsa.workspace = true
salsa-macros.workspace = true
diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml
index dac8e09435..63745b9f74 100644
--- a/crates/proc-macro-api/Cargo.toml
+++ b/crates/proc-macro-api/Cargo.toml
@@ -24,10 +24,14 @@ indexmap.workspace = true
paths = { workspace = true, features = ["serde1"] }
tt.workspace = true
stdx.workspace = true
+proc-macro-srv = {workspace = true, optional = true}
# span = {workspace = true, default-features = false} does not work
span = { path = "../span", version = "0.0.0", default-features = false}
intern.workspace = true
+[features]
+sysroot-abi = ["proc-macro-srv", "proc-macro-srv/sysroot-abi"]
+
[lints]
workspace = true
diff --git a/crates/proc-macro-api/src/legacy_protocol.rs b/crates/proc-macro-api/src/legacy_protocol.rs
index ee96b899fe..0a72052cc5 100644
--- a/crates/proc-macro-api/src/legacy_protocol.rs
+++ b/crates/proc-macro-api/src/legacy_protocol.rs
@@ -95,9 +95,10 @@ pub(crate) fn expand(
let mixed_site = span_data_table.insert_full(mixed_site).0;
let task = ExpandMacro {
data: ExpandMacroData {
- macro_body: FlatTree::new(subtree, version, &mut span_data_table),
+ macro_body: FlatTree::from_subtree(subtree, version, &mut span_data_table),
macro_name: proc_macro.name.to_string(),
- attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
+ attributes: attr
+ .map(|subtree| FlatTree::from_subtree(subtree, version, &mut span_data_table)),
has_global_spans: ExpnGlobals {
serialize: version >= version::HAS_GLOBAL_SPANS,
def_site,
diff --git a/crates/proc-macro-api/src/legacy_protocol/msg.rs b/crates/proc-macro-api/src/legacy_protocol/msg.rs
index b795c45589..487f50b145 100644
--- a/crates/proc-macro-api/src/legacy_protocol/msg.rs
+++ b/crates/proc-macro-api/src/legacy_protocol/msg.rs
@@ -297,7 +297,7 @@ mod tests {
let mut span_data_table = Default::default();
let task = ExpandMacro {
data: ExpandMacroData {
- macro_body: FlatTree::new(tt.view(), v, &mut span_data_table),
+ macro_body: FlatTree::from_subtree(tt.view(), v, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
has_global_spans: ExpnGlobals {
diff --git a/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs b/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
index fb3542d24f..7f19506048 100644
--- a/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
+++ b/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
@@ -8,8 +8,7 @@
//! about performance here a bit.
//!
//! So what this module does is dumping a `tt::TopSubtree` into a bunch of flat
-//! array of numbers. See the test in the parent module to get an example
-//! output.
+//! array of numbers.
//!
//! ```json
//! {
@@ -35,6 +34,9 @@
//! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
//! the time being.
+#[cfg(feature = "sysroot-abi")]
+use proc_macro_srv::TokenStream;
+
use std::collections::VecDeque;
use intern::Symbol;
@@ -120,12 +122,12 @@ struct IdentRepr {
}
impl FlatTree {
- pub fn new(
+ pub fn from_subtree(
subtree: tt::SubtreeView<'_, Span>,
version: u32,
span_data_table: &mut SpanDataIndexMap,
) -> FlatTree {
- let mut w = Writer::<Span> {
+ let mut w = Writer::<Span, _> {
string_table: FxHashMap::default(),
work: VecDeque::new(),
span_data_table,
@@ -138,7 +140,7 @@ impl FlatTree {
text: Vec::new(),
version,
};
- w.write(subtree);
+ w.write_subtree(subtree);
FlatTree {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
@@ -162,11 +164,97 @@ impl FlatTree {
}
}
- pub fn new_raw<T: SpanTransformer<Table = ()>>(
- subtree: tt::SubtreeView<'_, T::Span>,
+ pub fn to_subtree_resolved(
+ self,
version: u32,
+ span_data_table: &SpanDataIndexMap,
+ ) -> tt::TopSubtree<Span> {
+ Reader::<Span> {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ read_vec(self.subtree, SubtreeRepr::read_with_close_span)
+ } else {
+ read_vec(self.subtree, SubtreeRepr::read)
+ },
+ literal: if version >= EXTENDED_LEAF_DATA {
+ read_vec(self.literal, LiteralRepr::read_with_kind)
+ } else {
+ read_vec(self.literal, LiteralRepr::read)
+ },
+ punct: read_vec(self.punct, PunctRepr::read),
+ ident: if version >= EXTENDED_LEAF_DATA {
+ read_vec(self.ident, IdentRepr::read_with_rawness)
+ } else {
+ read_vec(self.ident, IdentRepr::read)
+ },
+ token_tree: self.token_tree,
+ text: self.text,
+ span_data_table,
+ version,
+ }
+ .read_subtree()
+ }
+}
+
+#[cfg(feature = "sysroot-abi")]
+impl FlatTree {
+ pub fn from_tokenstream(
+ tokenstream: proc_macro_srv::TokenStream<Span>,
+ version: u32,
+ call_site: Span,
+ span_data_table: &mut SpanDataIndexMap,
) -> FlatTree {
- let mut w = Writer::<T> {
+ let mut w = Writer::<Span, _> {
+ string_table: FxHashMap::default(),
+ work: VecDeque::new(),
+ span_data_table,
+
+ subtree: Vec::new(),
+ literal: Vec::new(),
+ punct: Vec::new(),
+ ident: Vec::new(),
+ token_tree: Vec::new(),
+ text: Vec::new(),
+ version,
+ };
+ let group = proc_macro_srv::Group {
+ delimiter: proc_macro_srv::Delimiter::None,
+ stream: Some(tokenstream),
+ span: proc_macro_srv::DelimSpan {
+ open: call_site,
+ close: call_site,
+ entire: call_site,
+ },
+ };
+ w.write_tokenstream(&group);
+
+ FlatTree {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
+ } else {
+ write_vec(w.subtree, SubtreeRepr::write)
+ },
+ literal: if version >= EXTENDED_LEAF_DATA {
+ write_vec(w.literal, LiteralRepr::write_with_kind)
+ } else {
+ write_vec(w.literal, LiteralRepr::write)
+ },
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: if version >= EXTENDED_LEAF_DATA {
+ write_vec(w.ident, IdentRepr::write_with_rawness)
+ } else {
+ write_vec(w.ident, IdentRepr::write)
+ },
+ token_tree: w.token_tree,
+ text: w.text,
+ }
+ }
+
+ pub fn from_tokenstream_raw<T: SpanTransformer<Table = ()>>(
+ tokenstream: proc_macro_srv::TokenStream<T::Span>,
+ call_site: T::Span,
+ version: u32,
+ ) -> FlatTree {
+ let mut w = Writer::<T, _> {
string_table: FxHashMap::default(),
work: VecDeque::new(),
span_data_table: &mut (),
@@ -179,7 +267,16 @@ impl FlatTree {
text: Vec::new(),
version,
};
- w.write(subtree);
+ let group = proc_macro_srv::Group {
+ delimiter: proc_macro_srv::Delimiter::None,
+ stream: Some(tokenstream),
+ span: proc_macro_srv::DelimSpan {
+ open: call_site,
+ close: call_site,
+ entire: call_site,
+ },
+ };
+ w.write_tokenstream(&group);
FlatTree {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
@@ -203,12 +300,11 @@ impl FlatTree {
}
}
- pub fn to_subtree_resolved(
+ pub fn to_tokenstream_unresolved<T: SpanTransformer<Table = ()>>(
self,
version: u32,
- span_data_table: &SpanDataIndexMap,
- ) -> tt::TopSubtree<Span> {
- Reader::<Span> {
+ ) -> proc_macro_srv::TokenStream<T::Span> {
+ Reader::<T> {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
@@ -227,17 +323,18 @@ impl FlatTree {
},
token_tree: self.token_tree,
text: self.text,
- span_data_table,
+ span_data_table: &(),
version,
}
- .read()
+ .read_tokenstream()
}
- pub fn to_subtree_unresolved<T: SpanTransformer<Table = ()>>(
+ pub fn to_tokenstream_resolved(
self,
version: u32,
- ) -> tt::TopSubtree<T::Span> {
- Reader::<T> {
+ span_data_table: &SpanDataIndexMap,
+ ) -> proc_macro_srv::TokenStream<Span> {
+ Reader::<Span> {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
@@ -256,10 +353,10 @@ impl FlatTree {
},
token_tree: self.token_tree,
text: self.text,
- span_data_table: &(),
+ span_data_table,
version,
}
- .read()
+ .read_tokenstream()
}
}
@@ -391,8 +488,8 @@ impl SpanTransformer for Span {
}
}
-struct Writer<'a, 'span, S: SpanTransformer> {
- work: VecDeque<(usize, tt::iter::TtIter<'a, S::Span>)>,
+struct Writer<'a, 'span, S: SpanTransformer, W> {
+ work: VecDeque<(usize, W)>,
string_table: FxHashMap<std::borrow::Cow<'a, str>, u32>,
span_data_table: &'span mut S::Table,
version: u32,
@@ -405,8 +502,8 @@ struct Writer<'a, 'span, S: SpanTransformer> {
text: Vec<String>,
}
-impl<'a, T: SpanTransformer> Writer<'a, '_, T> {
- fn write(&mut self, root: tt::SubtreeView<'a, T::Span>) {
+impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
+ fn write_subtree(&mut self, root: tt::SubtreeView<'a, T::Span>) {
let subtree = root.top_subtree();
self.enqueue(subtree, root.iter());
while let Some((idx, subtree)) = self.work.pop_front() {
@@ -414,10 +511,6 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T> {
}
}
- fn token_id_of(&mut self, span: T::Span) -> SpanId {
- T::token_id_of(self.span_data_table, span)
- }
-
fn subtree(&mut self, idx: usize, subtree: tt::iter::TtIter<'a, T::Span>) {
let mut first_tt = self.token_tree.len();
let n_tt = subtree.clone().count(); // FIXME: `count()` walks over the entire iterator.
@@ -502,6 +595,12 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T> {
self.work.push_back((idx, contents));
idx as u32
}
+}
+
+impl<'a, T: SpanTransformer, U> Writer<'a, '_, T, U> {
+ fn token_id_of(&mut self, span: T::Span) -> SpanId {
+ T::token_id_of(self.span_data_table, span)
+ }
pub(crate) fn intern(&mut self, text: &'a str) -> u32 {
let table = &mut self.text;
@@ -522,6 +621,105 @@ impl<'a, T: SpanTransformer> Writer<'a, '_, T> {
}
}
+#[cfg(feature = "sysroot-abi")]
+impl<'a, T: SpanTransformer> Writer<'a, '_, T, &'a proc_macro_srv::Group<T::Span>> {
+ fn write_tokenstream(&mut self, root: &'a proc_macro_srv::Group<T::Span>) {
+ self.enqueue_group(root);
+
+ while let Some((idx, group)) = self.work.pop_front() {
+ self.group(idx, group);
+ }
+ }
+
+ fn group(&mut self, idx: usize, group: &'a proc_macro_srv::Group<T::Span>) {
+ let mut first_tt = self.token_tree.len();
+ let n_tt = group.stream.as_ref().map_or(0, |it| it.len());
+ self.token_tree.resize(first_tt + n_tt, !0);
+
+ self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
+
+ for tt in group.stream.iter().flat_map(|it| it.iter()) {
+ let idx_tag = match tt {
+ proc_macro_srv::TokenTree::Group(group) => {
+ let idx = self.enqueue_group(group);
+ idx << 2
+ }
+ proc_macro_srv::TokenTree::Literal(lit) => {
+ let idx = self.literal.len() as u32;
+ let id = self.token_id_of(lit.span);
+ let (text, suffix) = if self.version >= EXTENDED_LEAF_DATA {
+ (
+ self.intern(lit.symbol.as_str()),
+ lit.suffix.as_ref().map(|s| self.intern(s.as_str())).unwrap_or(!0),
+ )
+ } else {
+ (self.intern_owned(proc_macro_srv::literal_to_string(lit)), !0)
+ };
+ self.literal.push(LiteralRepr {
+ id,
+ text,
+ kind: u16::from_le_bytes(match lit.kind {
+ proc_macro_srv::LitKind::ErrWithGuar => [0, 0],
+ proc_macro_srv::LitKind::Byte => [1, 0],
+ proc_macro_srv::LitKind::Char => [2, 0],
+ proc_macro_srv::LitKind::Integer => [3, 0],
+ proc_macro_srv::LitKind::Float => [4, 0],
+ proc_macro_srv::LitKind::Str => [5, 0],
+ proc_macro_srv::LitKind::StrRaw(r) => [6, r],
+ proc_macro_srv::LitKind::ByteStr => [7, 0],
+ proc_macro_srv::LitKind::ByteStrRaw(r) => [8, r],
+ proc_macro_srv::LitKind::CStr => [9, 0],
+ proc_macro_srv::LitKind::CStrRaw(r) => [10, r],
+ }),
+ suffix,
+ });
+ (idx << 2) | 0b01
+ }
+ proc_macro_srv::TokenTree::Punct(punct) => {
+ let idx = self.punct.len() as u32;
+ let id = self.token_id_of(punct.span);
+ self.punct.push(PunctRepr {
+ char: punct.ch as char,
+ spacing: if punct.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
+ id,
+ });
+ (idx << 2) | 0b10
+ }
+ proc_macro_srv::TokenTree::Ident(ident) => {
+ let idx = self.ident.len() as u32;
+ let id = self.token_id_of(ident.span);
+ let text = if self.version >= EXTENDED_LEAF_DATA {
+ self.intern(ident.sym.as_str())
+ } else if ident.is_raw {
+ self.intern_owned(format!("r#{}", ident.sym.as_str(),))
+ } else {
+ self.intern(ident.sym.as_str())
+ };
+ self.ident.push(IdentRepr { id, text, is_raw: ident.is_raw });
+ (idx << 2) | 0b11
+ }
+ };
+ self.token_tree[first_tt] = idx_tag;
+ first_tt += 1;
+ }
+ }
+
+ fn enqueue_group(&mut self, group: &'a proc_macro_srv::Group<T::Span>) -> u32 {
+ let idx = self.subtree.len();
+ let open = self.token_id_of(group.span.open);
+ let close = self.token_id_of(group.span.close);
+ let delimiter_kind = match group.delimiter {
+ proc_macro_srv::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ proc_macro_srv::Delimiter::Brace => tt::DelimiterKind::Brace,
+ proc_macro_srv::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ proc_macro_srv::Delimiter::None => tt::DelimiterKind::Invisible,
+ };
+ self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
+ self.work.push_back((idx, group));
+ idx as u32
+ }
+}
+
struct Reader<'span, S: SpanTransformer> {
version: u32,
subtree: Vec<SubtreeRepr>,
@@ -534,7 +732,7 @@ struct Reader<'span, S: SpanTransformer> {
}
impl<T: SpanTransformer> Reader<'_, T> {
- pub(crate) fn read(self) -> tt::TopSubtree<T::Span> {
+ pub(crate) fn read_subtree(self) -> tt::TopSubtree<T::Span> {
let mut res: Vec<Option<(tt::Delimiter<T::Span>, Vec<tt::TokenTree<T::Span>>)>> =
vec![None; self.subtree.len()];
let read_span = |id| T::span_for_token_id(self.span_data_table, id);
@@ -641,3 +839,122 @@ impl<T: SpanTransformer> Reader<'_, T> {
tt::TopSubtree(res.into_boxed_slice())
}
}
+
+#[cfg(feature = "sysroot-abi")]
+impl<T: SpanTransformer> Reader<'_, T> {
+ pub(crate) fn read_tokenstream(self) -> proc_macro_srv::TokenStream<T::Span> {
+ let mut res: Vec<Option<proc_macro_srv::Group<T::Span>>> = vec![None; self.subtree.len()];
+ let read_span = |id| T::span_for_token_id(self.span_data_table, id);
+ for i in (0..self.subtree.len()).rev() {
+ let repr = &self.subtree[i];
+ let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
+
+ let stream = token_trees
+ .iter()
+ .copied()
+ .map(|idx_tag| {
+ let tag = idx_tag & 0b11;
+ let idx = (idx_tag >> 2) as usize;
+ match tag {
+ // XXX: we iterate subtrees in reverse to guarantee
+ // that this unwrap doesn't fire.
+ 0b00 => proc_macro_srv::TokenTree::Group(res[idx].take().unwrap()),
+ 0b01 => {
+ let repr = &self.literal[idx];
+ let text = self.text[repr.text as usize].as_str();
+ let span = read_span(repr.id);
+ proc_macro_srv::TokenTree::Literal(
+ if self.version >= EXTENDED_LEAF_DATA {
+ proc_macro_srv::Literal {
+ symbol: Symbol::intern(text),
+ span,
+ kind: match u16::to_le_bytes(repr.kind) {
+ [0, _] => proc_macro_srv::LitKind::ErrWithGuar,
+ [1, _] => proc_macro_srv::LitKind::Byte,
+ [2, _] => proc_macro_srv::LitKind::Char,
+ [3, _] => proc_macro_srv::LitKind::Integer,
+ [4, _] => proc_macro_srv::LitKind::Float,
+ [5, _] => proc_macro_srv::LitKind::Str,
+ [6, r] => proc_macro_srv::LitKind::StrRaw(r),
+ [7, _] => proc_macro_srv::LitKind::ByteStr,
+ [8, r] => proc_macro_srv::LitKind::ByteStrRaw(r),
+ [9, _] => proc_macro_srv::LitKind::CStr,
+ [10, r] => proc_macro_srv::LitKind::CStrRaw(r),
+ _ => unreachable!(),
+ },
+ suffix: if repr.suffix != !0 {
+ Some(Symbol::intern(
+ self.text[repr.suffix as usize].as_str(),
+ ))
+ } else {
+ None
+ },
+ }
+ } else {
+ proc_macro_srv::literal_from_str(text, span).unwrap_or_else(
+ |_| proc_macro_srv::Literal {
+ symbol: Symbol::intern("internal error"),
+ span,
+ kind: proc_macro_srv::LitKind::ErrWithGuar,
+ suffix: None,
+ },
+ )
+ },
+ )
+ }
+ 0b10 => {
+ let repr = &self.punct[idx];
+ proc_macro_srv::TokenTree::Punct(proc_macro_srv::Punct {
+ ch: repr.char as u8,
+ joint: repr.spacing == tt::Spacing::Joint,
+ span: read_span(repr.id),
+ })
+ }
+ 0b11 => {
+ let repr = &self.ident[idx];
+ let text = self.text[repr.text as usize].as_str();
+ let (is_raw, text) = if self.version >= EXTENDED_LEAF_DATA {
+ (
+ if repr.is_raw {
+ tt::IdentIsRaw::Yes
+ } else {
+ tt::IdentIsRaw::No
+ },
+ text,
+ )
+ } else {
+ tt::IdentIsRaw::split_from_symbol(text)
+ };
+ proc_macro_srv::TokenTree::Ident(proc_macro_srv::Ident {
+ sym: Symbol::intern(text),
+ span: read_span(repr.id),
+ is_raw: is_raw.yes(),
+ })
+ }
+ other => panic!("bad tag: {other}"),
+ }
+ })
+ .collect::<Vec<_>>();
+ let g = proc_macro_srv::Group {
+ delimiter: match repr.kind {
+ tt::DelimiterKind::Parenthesis => proc_macro_srv::Delimiter::Parenthesis,
+ tt::DelimiterKind::Brace => proc_macro_srv::Delimiter::Brace,
+ tt::DelimiterKind::Bracket => proc_macro_srv::Delimiter::Bracket,
+ tt::DelimiterKind::Invisible => proc_macro_srv::Delimiter::None,
+ },
+ stream: if stream.is_empty() { None } else { Some(TokenStream::new(stream)) },
+ span: proc_macro_srv::DelimSpan {
+ open: read_span(repr.open),
+ close: read_span(repr.close),
+ // FIXME
+ entire: read_span(repr.close),
+ },
+ };
+ res[i] = Some(g);
+ }
+ // FIXME: double check this
+ proc_macro_srv::TokenStream::new(vec![proc_macro_srv::TokenTree::Group(
+ res[0].take().unwrap(),
+ )])
+ }
+}
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index 97919b85b5..870d81f976 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -5,6 +5,13 @@
//! is used to provide basic infrastructure for communication between two
//! processes: Client (RA itself), Server (the external program)
+#![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))]
+#![cfg_attr(
+ feature = "sysroot-abi",
+ feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)
+)]
+#![allow(internal_features)]
+
pub mod legacy_protocol;
mod process;
diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml
index 91e9e62b08..dd31e74915 100644
--- a/crates/proc-macro-srv-cli/Cargo.toml
+++ b/crates/proc-macro-srv-cli/Cargo.toml
@@ -19,7 +19,7 @@ postcard = { version = "1.1.3", optional = true }
[features]
default = ["postcard"]
-sysroot-abi = ["proc-macro-srv/sysroot-abi"]
+sysroot-abi = ["proc-macro-srv/sysroot-abi", "proc-macro-api/sysroot-abi"]
in-rust-tree = ["proc-macro-srv/in-rust-tree", "sysroot-abi"]
postcard = ["dep:postcard"]
diff --git a/crates/proc-macro-srv-cli/build.rs b/crates/proc-macro-srv-cli/build.rs
index 12e7c8b05b..bcf639fefc 100644
--- a/crates/proc-macro-srv-cli/build.rs
+++ b/crates/proc-macro-srv-cli/build.rs
@@ -5,7 +5,6 @@ use std::{env, path::PathBuf, process::Command};
fn main() {
set_rerun();
set_commit_info();
- println!("cargo::rustc-check-cfg=cfg(rust_analyzer)");
}
fn set_rerun() {
diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs
index 662d34865e..9d74fa637a 100644
--- a/crates/proc-macro-srv-cli/src/main.rs
+++ b/crates/proc-macro-srv-cli/src/main.rs
@@ -9,10 +9,10 @@ extern crate rustc_driver as _;
mod version;
-#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
+#[cfg(feature = "sysroot-abi")]
mod main_loop;
use clap::{Command, ValueEnum};
-#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
+#[cfg(feature = "sysroot-abi")]
use main_loop::run;
fn main() -> std::io::Result<()> {
@@ -77,7 +77,7 @@ impl ValueEnum for ProtocolFormat {
}
}
-#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
+#[cfg(not(feature = "sysroot-abi"))]
fn run(_: ProtocolFormat) -> std::io::Result<()> {
Err(std::io::Error::new(
std::io::ErrorKind::Unsupported,
diff --git a/crates/proc-macro-srv-cli/src/main_loop.rs b/crates/proc-macro-srv-cli/src/main_loop.rs
index 703bc965db..5533107570 100644
--- a/crates/proc-macro-srv-cli/src/main_loop.rs
+++ b/crates/proc-macro-srv-cli/src/main_loop.rs
@@ -91,9 +91,10 @@ fn run_json() -> io::Result<()> {
let mixed_site = SpanId(mixed_site as u32);
let macro_body =
- macro_body.to_subtree_unresolved::<SpanTrans>(CURRENT_API_VERSION);
- let attributes = attributes
- .map(|it| it.to_subtree_unresolved::<SpanTrans>(CURRENT_API_VERSION));
+ macro_body.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION);
+ let attributes = attributes.map(|it| {
+ it.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION)
+ });
srv.expand(
lib,
@@ -107,8 +108,9 @@ fn run_json() -> io::Result<()> {
mixed_site,
)
.map(|it| {
- msg::FlatTree::new_raw::<SpanTrans>(
- tt::SubtreeView::new(&it),
+ msg::FlatTree::from_tokenstream_raw::<SpanTrans>(
+ it,
+ call_site,
CURRENT_API_VERSION,
)
})
@@ -122,10 +124,10 @@ fn run_json() -> io::Result<()> {
let call_site = span_data_table[call_site];
let mixed_site = span_data_table[mixed_site];
- let macro_body =
- macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table);
+ let macro_body = macro_body
+ .to_tokenstream_resolved(CURRENT_API_VERSION, &span_data_table);
let attributes = attributes.map(|it| {
- it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)
+ it.to_tokenstream_resolved(CURRENT_API_VERSION, &span_data_table)
});
srv.expand(
lib,
@@ -140,9 +142,10 @@ fn run_json() -> io::Result<()> {
)
.map(|it| {
(
- msg::FlatTree::new(
- tt::SubtreeView::new(&it),
+ msg::FlatTree::from_tokenstream(
+ it,
CURRENT_API_VERSION,
+ call_site,
&mut span_data_table,
),
serialize_span_data_index_map(&span_data_table),
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index d037e715e7..3610171784 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -18,8 +18,6 @@ libloading.workspace = true
memmap2.workspace = true
temp-dir.workspace = true
-tt.workspace = true
-syntax-bridge.workspace = true
paths.workspace = true
# span = {workspace = true, default-features = false} does not work
span = { path = "../span", version = "0.0.0", default-features = false}
@@ -38,8 +36,9 @@ expect-test.workspace = true
proc-macro-test.path = "./proc-macro-test"
[features]
+default = []
sysroot-abi = []
-in-rust-tree = ["syntax-bridge/in-rust-tree", "tt/in-rust-tree", "sysroot-abi"]
+in-rust-tree = ["sysroot-abi"]
[lints]
workspace = true
diff --git a/crates/proc-macro-srv/build.rs b/crates/proc-macro-srv/build.rs
index 97c0c4bda7..4cf1820e7b 100644
--- a/crates/proc-macro-srv/build.rs
+++ b/crates/proc-macro-srv/build.rs
@@ -4,8 +4,6 @@
use std::{env, process::Command};
fn main() {
- println!("cargo::rustc-check-cfg=cfg(rust_analyzer)");
-
let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set");
#[allow(clippy::disallowed_methods)]
let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run");
diff --git a/crates/proc-macro-srv/src/bridge.rs b/crates/proc-macro-srv/src/bridge.rs
new file mode 100644
index 0000000000..fc063a07b5
--- /dev/null
+++ b/crates/proc-macro-srv/src/bridge.rs
@@ -0,0 +1,12 @@
+//! `proc_macro::bridge` newtypes.
+
+use proc_macro::bridge as pm_bridge;
+
+pub use pm_bridge::{DelimSpan, Diagnostic, ExpnGlobals, LitKind};
+
+pub type TokenTree<S> =
+ pm_bridge::TokenTree<crate::token_stream::TokenStream<S>, S, intern::Symbol>;
+pub type Literal<S> = pm_bridge::Literal<S, intern::Symbol>;
+pub type Group<S> = pm_bridge::Group<crate::token_stream::TokenStream<S>, S>;
+pub type Punct<S> = pm_bridge::Punct<S>;
+pub type Ident<S> = pm_bridge::Ident<S, intern::Symbol>;
diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs
index c8513a1067..03433197b7 100644
--- a/crates/proc-macro-srv/src/dylib.rs
+++ b/crates/proc-macro-srv/src/dylib.rs
@@ -13,7 +13,7 @@ use paths::{Utf8Path, Utf8PathBuf};
use crate::{
PanicMessage, ProcMacroKind, ProcMacroSrvSpan, dylib::proc_macros::ProcMacros,
- server_impl::TopSubtree,
+ token_stream::TokenStream,
};
pub(crate) struct Expander {
@@ -40,18 +40,18 @@ impl Expander {
pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
- macro_body: TopSubtree<S>,
- attributes: Option<TopSubtree<S>>,
+ macro_body: TokenStream<S>,
+ attribute: Option<TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<TopSubtree<S>, PanicMessage>
+ ) -> Result<TokenStream<S>, PanicMessage>
where
<S::Server as bridge::server::Types>::TokenStream: Default,
{
self.inner
.proc_macros
- .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site)
+ .expand(macro_name, macro_body, attribute, def_site, call_site, mixed_site)
}
pub(crate) fn list_macros(&self) -> impl Iterator<Item = (&str, ProcMacroKind)> {
diff --git a/crates/proc-macro-srv/src/dylib/proc_macros.rs b/crates/proc-macro-srv/src/dylib/proc_macros.rs
index 9b5721e370..c879c7609d 100644
--- a/crates/proc-macro-srv/src/dylib/proc_macros.rs
+++ b/crates/proc-macro-srv/src/dylib/proc_macros.rs
@@ -2,7 +2,7 @@
use proc_macro::bridge;
-use crate::{ProcMacroKind, ProcMacroSrvSpan, server_impl::TopSubtree};
+use crate::{ProcMacroKind, ProcMacroSrvSpan, token_stream::TokenStream};
#[repr(transparent)]
pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
@@ -17,18 +17,13 @@ impl ProcMacros {
pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
- macro_body: TopSubtree<S>,
- attributes: Option<TopSubtree<S>>,
+ macro_body: TokenStream<S>,
+ attribute: Option<TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<TopSubtree<S>, crate::PanicMessage> {
- let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
-
- let parsed_attributes = attributes
- .map_or_else(crate::server_impl::TokenStream::default, |attr| {
- crate::server_impl::TokenStream::with_subtree(attr)
- });
+ ) -> Result<TokenStream<S>, crate::PanicMessage> {
+ let parsed_attributes = attribute.unwrap_or_default();
for proc_macro in &self.0 {
match proc_macro {
@@ -38,35 +33,29 @@ impl ProcMacros {
let res = client.run(
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
- parsed_body,
+ macro_body,
cfg!(debug_assertions),
);
- return res
- .map(|it| it.into_subtree(call_site))
- .map_err(crate::PanicMessage::from);
+ return res.map_err(crate::PanicMessage::from);
}
bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
- parsed_body,
+ macro_body,
cfg!(debug_assertions),
);
- return res
- .map(|it| it.into_subtree(call_site))
- .map_err(crate::PanicMessage::from);
+ return res.map_err(crate::PanicMessage::from);
}
bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
parsed_attributes,
- parsed_body,
+ macro_body,
cfg!(debug_assertions),
);
- return res
- .map(|it| it.into_subtree(call_site))
- .map_err(crate::PanicMessage::from);
+ return res.map_err(crate::PanicMessage::from);
}
_ => continue,
}
diff --git a/crates/proc-macro-srv/src/dylib/version.rs b/crates/proc-macro-srv/src/dylib/version.rs
index 3b2551f08c..209693b4da 100644
--- a/crates/proc-macro-srv/src/dylib/version.rs
+++ b/crates/proc-macro-srv/src/dylib/version.rs
@@ -5,11 +5,14 @@ use std::io::{self, Read};
use object::read::{Object, ObjectSection};
#[derive(Debug)]
-#[allow(dead_code)]
pub struct RustCInfo {
+ #[allow(dead_code)]
pub version: (usize, usize, usize),
+ #[allow(dead_code)]
pub channel: String,
+ #[allow(dead_code)]
pub commit: Option<String>,
+ #[allow(dead_code)]
pub date: Option<String>,
// something like "rustc 1.58.1 (db9d1b20b 2022-01-20)"
pub version_string: String,
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index cb97882c58..aff4dc5037 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -10,11 +10,16 @@
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
-#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
-#![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))]
+#![cfg(feature = "sysroot-abi")]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
-#![allow(unreachable_pub, internal_features, clippy::disallowed_types, clippy::print_stderr)]
+#![allow(
+ unreachable_pub,
+ internal_features,
+ clippy::disallowed_types,
+ clippy::print_stderr,
+ unused_crate_dependencies
+)]
#![deny(deprecated_safe, clippy::undocumented_unsafe_blocks)]
extern crate proc_macro;
@@ -26,8 +31,10 @@ extern crate ra_ap_rustc_lexer as rustc_lexer;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer;
+mod bridge;
mod dylib;
mod server_impl;
+mod token_stream;
use std::{
collections::{HashMap, hash_map::Entry},
@@ -43,10 +50,14 @@ use paths::{Utf8Path, Utf8PathBuf};
use span::Span;
use temp_dir::TempDir;
-use crate::server_impl::TokenStream;
-
pub use crate::server_impl::token_id::SpanId;
+pub use proc_macro::Delimiter;
+
+pub use crate::bridge::*;
+pub use crate::server_impl::literal_from_str;
+pub use crate::token_stream::{TokenStream, literal_to_string};
+
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum ProcMacroKind {
CustomDerive,
@@ -81,12 +92,12 @@ impl ProcMacroSrv<'_> {
env: &[(String, String)],
current_dir: Option<impl AsRef<Path>>,
macro_name: &str,
- macro_body: tt::TopSubtree<S>,
- attribute: Option<tt::TopSubtree<S>>,
+ macro_body: token_stream::TokenStream<S>,
+ attribute: Option<token_stream::TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<Vec<tt::TokenTree<S>>, PanicMessage> {
+ ) -> Result<token_stream::TokenStream<S>, PanicMessage> {
let snapped_env = self.env;
let expander = self.expander(lib.as_ref()).map_err(|err| PanicMessage {
message: Some(format!("failed to load macro: {err}")),
@@ -102,15 +113,7 @@ impl ProcMacroSrv<'_> {
.name(macro_name.to_owned())
.spawn_scoped(s, move || {
expander
- .expand(
- macro_name,
- server_impl::TopSubtree(macro_body.0.into_vec()),
- attribute.map(|it| server_impl::TopSubtree(it.0.into_vec())),
- def_site,
- call_site,
- mixed_site,
- )
- .map(|tt| tt.0)
+ .expand(macro_name, macro_body, attribute, def_site, call_site, mixed_site)
});
match thread.unwrap().join() {
Ok(res) => res,
@@ -157,8 +160,8 @@ impl ProcMacroSrv<'_> {
}
}
-pub trait ProcMacroSrvSpan: Copy + Send {
- type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
+pub trait ProcMacroSrvSpan: Copy + Send + Sync {
+ type Server: proc_macro::bridge::server::Server<TokenStream = crate::token_stream::TokenStream<Self>>;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
}
diff --git a/crates/proc-macro-srv/src/server_impl.rs b/crates/proc-macro-srv/src/server_impl.rs
index 32ad32731b..bacead1a88 100644
--- a/crates/proc-macro-srv/src/server_impl.rs
+++ b/crates/proc-macro-srv/src/server_impl.rs
@@ -5,122 +5,15 @@
//! we could provide any TokenStream implementation.
//! The original idea from fedochet is using proc-macro2 as backend,
//! we use tt instead for better integration with RA.
-//!
-//! FIXME: No span and source file information is implemented yet
-
-use std::fmt;
-
-use intern::Symbol;
-use proc_macro::bridge;
-
-mod token_stream;
-pub use token_stream::TokenStream;
-
-pub mod rust_analyzer_span;
-pub mod token_id;
-
-use tt::Spacing;
-
-#[derive(Clone)]
-pub(crate) struct TopSubtree<S>(pub(crate) Vec<tt::TokenTree<S>>);
-
-impl<S: Copy + fmt::Debug> fmt::Debug for TopSubtree<S> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Debug::fmt(&tt::TokenTreesView::new(&self.0), f)
- }
-}
-
-impl<S: Copy> TopSubtree<S> {
- pub(crate) fn top_subtree(&self) -> &tt::Subtree<S> {
- let tt::TokenTree::Subtree(subtree) = &self.0[0] else {
- unreachable!("the first token tree is always the top subtree");
- };
- subtree
- }
- pub(crate) fn from_bridge(group: bridge::Group<TokenStream<S>, S>) -> Self {
- let delimiter = delim_to_internal(group.delimiter, group.span);
- let mut tts =
- group.stream.map(|it| it.token_trees).unwrap_or_else(|| Vec::with_capacity(1));
- tts.insert(0, tt::TokenTree::Subtree(tt::Subtree { delimiter, len: tts.len() as u32 }));
- TopSubtree(tts)
- }
-}
-
-fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> {
- let kind = match d {
- proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
- proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
- proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
- proc_macro::Delimiter::None => tt::DelimiterKind::Invisible,
- };
- tt::Delimiter { open: span.open, close: span.close, kind }
-}
-
-fn delim_to_external<S>(d: tt::Delimiter<S>) -> proc_macro::Delimiter {
- match d.kind {
- tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis,
- tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace,
- tt::DelimiterKind::Bracket => proc_macro::Delimiter::Bracket,
- tt::DelimiterKind::Invisible => proc_macro::Delimiter::None,
- }
-}
-
-#[allow(unused)]
-fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing {
- match spacing {
- proc_macro::Spacing::Alone => Spacing::Alone,
- proc_macro::Spacing::Joint => Spacing::Joint,
- }
-}
+pub(crate) mod rust_analyzer_span;
+pub(crate) mod token_id;
-#[allow(unused)]
-fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
- match spacing {
- Spacing::Alone | Spacing::JointHidden => proc_macro::Spacing::Alone,
- Spacing::Joint => proc_macro::Spacing::Joint,
- }
-}
-
-fn literal_kind_to_external(kind: tt::LitKind) -> bridge::LitKind {
- match kind {
- tt::LitKind::Byte => bridge::LitKind::Byte,
- tt::LitKind::Char => bridge::LitKind::Char,
- tt::LitKind::Integer => bridge::LitKind::Integer,
- tt::LitKind::Float => bridge::LitKind::Float,
- tt::LitKind::Str => bridge::LitKind::Str,
- tt::LitKind::StrRaw(r) => bridge::LitKind::StrRaw(r),
- tt::LitKind::ByteStr => bridge::LitKind::ByteStr,
- tt::LitKind::ByteStrRaw(r) => bridge::LitKind::ByteStrRaw(r),
- tt::LitKind::CStr => bridge::LitKind::CStr,
- tt::LitKind::CStrRaw(r) => bridge::LitKind::CStrRaw(r),
- tt::LitKind::Err(_) => bridge::LitKind::ErrWithGuar,
- }
-}
-
-fn literal_kind_to_internal(kind: bridge::LitKind) -> tt::LitKind {
- match kind {
- bridge::LitKind::Byte => tt::LitKind::Byte,
- bridge::LitKind::Char => tt::LitKind::Char,
- bridge::LitKind::Str => tt::LitKind::Str,
- bridge::LitKind::StrRaw(r) => tt::LitKind::StrRaw(r),
- bridge::LitKind::ByteStr => tt::LitKind::ByteStr,
- bridge::LitKind::ByteStrRaw(r) => tt::LitKind::ByteStrRaw(r),
- bridge::LitKind::CStr => tt::LitKind::CStr,
- bridge::LitKind::CStrRaw(r) => tt::LitKind::CStrRaw(r),
- bridge::LitKind::Integer => tt::LitKind::Integer,
- bridge::LitKind::Float => tt::LitKind::Float,
- bridge::LitKind::ErrWithGuar => tt::LitKind::Err(()),
- }
-}
-
-pub(super) fn literal_from_str<Span: Copy>(
+pub fn literal_from_str<Span: Copy>(
s: &str,
span: Span,
-) -> Result<bridge::Literal<Span, Symbol>, ()> {
- use proc_macro::bridge::LitKind;
+) -> Result<crate::bridge::Literal<Span>, ()> {
use rustc_lexer::{LiteralKind, Token, TokenKind};
-
let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
@@ -142,98 +35,5 @@ pub(super) fn literal_from_str<Span: Copy>(
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
- let (kind, start_offset, end_offset) = match kind {
- LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
- LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
- LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
- LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
- LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
- LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
- LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
- LiteralKind::RawStr { n_hashes } => (
- LitKind::StrRaw(n_hashes.unwrap_or_default()),
- 2 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawByteStr { n_hashes } => (
- LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawCStr { n_hashes } => (
- LitKind::CStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- };
-
- let (lit, suffix) = s.split_at(suffix_start as usize);
- let lit = &lit[start_offset..lit.len() - end_offset];
- let suffix = match suffix {
- "" | "_" => None,
- suffix => Some(Symbol::intern(suffix)),
- };
-
- Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span })
-}
-
-pub(super) fn from_token_tree<Span: Copy>(
- tree: bridge::TokenTree<TokenStream<Span>, Span, Symbol>,
-) -> TokenStream<Span> {
- match tree {
- bridge::TokenTree::Group(group) => {
- let group = TopSubtree::from_bridge(group);
- TokenStream { token_trees: group.0 }
- }
-
- bridge::TokenTree::Ident(ident) => {
- let text = ident.sym;
- let ident: tt::Ident<Span> = tt::Ident {
- sym: text,
- span: ident.span,
- is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
- };
- let leaf = tt::Leaf::from(ident);
- let tree = tt::TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
-
- bridge::TokenTree::Literal(literal) => {
- let mut token_trees = Vec::new();
- let mut symbol = literal.symbol;
- if matches!(
- literal.kind,
- proc_macro::bridge::LitKind::Integer | proc_macro::bridge::LitKind::Float
- ) && symbol.as_str().starts_with('-')
- {
- token_trees.push(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
- spacing: tt::Spacing::Alone,
- span: literal.span,
- char: '-',
- })));
- symbol = Symbol::intern(&symbol.as_str()[1..]);
- }
- let literal = tt::Literal {
- symbol,
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
- let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- token_trees.push(tree);
- TokenStream { token_trees }
- }
-
- bridge::TokenTree::Punct(p) => {
- let punct = tt::Punct {
- char: p.ch as char,
- spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
- span: p.span,
- };
- let leaf = tt::Leaf::from(punct);
- let tree = tt::TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
- }
+ Ok(crate::token_stream::literal_from_lexer(s, span, kind, suffix_start))
}
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index a1863efafb..7c685c2da7 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -10,13 +10,13 @@ use std::{
};
use intern::Symbol;
-use proc_macro::bridge::{self, server};
-use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
-use tt::{TextRange, TextSize};
+use proc_macro::bridge::server;
+use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span, TextRange, TextSize};
-use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
-
-type TokenStream = crate::server_impl::TokenStream<Span>;
+use crate::{
+ bridge::{Diagnostic, ExpnGlobals, Literal, TokenTree},
+ server_impl::literal_from_str,
+};
pub struct FreeFunctions;
@@ -32,7 +32,7 @@ pub struct RaSpanServer {
impl server::Types for RaSpanServer {
type FreeFunctions = FreeFunctions;
- type TokenStream = TokenStream;
+ type TokenStream = crate::token_stream::TokenStream<Span>;
type Span = Span;
type Symbol = Symbol;
}
@@ -49,14 +49,11 @@ impl server::FreeFunctions for RaSpanServer {
self.tracked_paths.insert(path.into());
}
- fn literal_from_str(
- &mut self,
- s: &str,
- ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
literal_from_str(s, self.call_site)
}
- fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {
+ fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {
// FIXME handle diagnostic
}
}
@@ -77,11 +74,9 @@ impl server::TokenStream for RaSpanServer {
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
}
- fn from_token_tree(
- &mut self,
- tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
- ) -> Self::TokenStream {
- from_token_tree(tree)
+
+ fn from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
+ Self::TokenStream::new(vec![tree])
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@@ -96,16 +91,17 @@ impl server::TokenStream for RaSpanServer {
fn concat_trees(
&mut self,
base: Option<Self::TokenStream>,
- trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ trees: Vec<TokenTree<Self::Span>>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
- }
- for tree in trees {
- builder.push(self.from_token_tree(tree));
+ match base {
+ Some(mut base) => {
+ for tt in trees {
+ base.push_tree(tt);
+ }
+ base
+ }
+ None => Self::TokenStream::new(trees),
}
- builder.build()
}
fn concat_streams(
@@ -113,23 +109,15 @@ impl server::TokenStream for RaSpanServer {
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
- }
- for stream in streams {
- builder.push(stream);
+ let mut stream = base.unwrap_or_default();
+ for s in streams {
+ stream.push_stream(s);
}
- builder.build()
+ stream
}
- fn into_trees(
- &mut self,
- stream: Self::TokenStream,
- ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
- stream.into_bridge(&mut |first, second| {
- server::Span::join(self, first, second).unwrap_or(first)
- })
+ fn into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
+ (*stream.0).clone()
}
}
@@ -289,8 +277,8 @@ impl server::Symbol for RaSpanServer {
}
impl server::Server for RaSpanServer {
- fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
- bridge::ExpnGlobals {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ ExpnGlobals {
def_site: self.def_site,
call_site: self.call_site,
mixed_site: self.mixed_site,
@@ -305,97 +293,3 @@ impl server::Server for RaSpanServer {
f(symbol.as_str())
}
}
-
-#[cfg(test)]
-mod tests {
- use span::{EditionedFileId, FileId, SyntaxContext};
-
- use super::*;
-
- #[test]
- fn test_ra_server_to_string() {
- let span = Span {
- range: TextRange::empty(TextSize::new(0)),
- anchor: span::SpanAnchor {
- file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
- ast_id: span::ROOT_ERASED_FILE_AST_ID,
- },
- ctx: SyntaxContext::root(span::Edition::CURRENT),
- };
- let s = TokenStream {
- token_trees: vec![
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("struct"),
- span,
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("T"),
- span,
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: span,
- close: span,
- kind: tt::DelimiterKind::Brace,
- },
- len: 1,
- }),
- tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
- kind: tt::LitKind::Str,
- symbol: Symbol::intern("string"),
- suffix: None,
- span,
- })),
- ],
- };
-
- assert_eq!(s.to_string(), "struct T {\"string\"}");
- }
-
- #[test]
- fn test_ra_server_from_str() {
- let span = Span {
- range: TextRange::empty(TextSize::new(0)),
- anchor: span::SpanAnchor {
- file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
- ast_id: span::ROOT_ERASED_FILE_AST_ID,
- },
- ctx: SyntaxContext::root(span::Edition::CURRENT),
- };
- let subtree_paren_a = vec![
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: span,
- close: span,
- kind: tt::DelimiterKind::Parenthesis,
- },
- len: 1,
- }),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- is_raw: tt::IdentIsRaw::No,
- sym: Symbol::intern("a"),
- span,
- })),
- ];
-
- let t1 = TokenStream::from_str("(a)", span).unwrap();
- assert_eq!(t1.token_trees.len(), 2);
- assert!(t1.token_trees == subtree_paren_a);
-
- let t2 = TokenStream::from_str("(a);", span).unwrap();
- assert_eq!(t2.token_trees.len(), 3);
- assert!(t2.token_trees[0..2] == subtree_paren_a);
-
- let underscore = TokenStream::from_str("_", span).unwrap();
- assert!(
- underscore.token_trees[0]
- == tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("_"),
- span,
- is_raw: tt::IdentIsRaw::No,
- }))
- );
- }
-}
diff --git a/crates/proc-macro-srv/src/server_impl/token_id.rs b/crates/proc-macro-srv/src/server_impl/token_id.rs
index 91e70ea243..3814320cbe 100644
--- a/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -3,9 +3,12 @@
use std::ops::{Bound, Range};
use intern::Symbol;
-use proc_macro::bridge::{self, server};
+use proc_macro::bridge::server;
-use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
+use crate::{
+ bridge::{Diagnostic, ExpnGlobals, Literal, TokenTree},
+ server_impl::literal_from_str,
+};
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct SpanId(pub u32);
@@ -17,7 +20,6 @@ impl std::fmt::Debug for SpanId {
}
type Span = SpanId;
-type TokenStream = crate::server_impl::TokenStream<Span>;
pub struct FreeFunctions;
@@ -29,7 +31,7 @@ pub struct SpanIdServer {
impl server::Types for SpanIdServer {
type FreeFunctions = FreeFunctions;
- type TokenStream = TokenStream;
+ type TokenStream = crate::token_stream::TokenStream<Span>;
type Span = Span;
type Symbol = Symbol;
}
@@ -40,14 +42,11 @@ impl server::FreeFunctions for SpanIdServer {
}
fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {}
fn track_path(&mut self, _path: &str) {}
- fn literal_from_str(
- &mut self,
- s: &str,
- ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
literal_from_str(s, self.call_site)
}
- fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {}
+ fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {}
}
impl server::TokenStream for SpanIdServer {
@@ -66,11 +65,8 @@ impl server::TokenStream for SpanIdServer {
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
}
- fn from_token_tree(
- &mut self,
- tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
- ) -> Self::TokenStream {
- from_token_tree(tree)
+ fn from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
+ Self::TokenStream::new(vec![tree])
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@@ -80,16 +76,17 @@ impl server::TokenStream for SpanIdServer {
fn concat_trees(
&mut self,
base: Option<Self::TokenStream>,
- trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ trees: Vec<TokenTree<Self::Span>>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
- }
- for tree in trees {
- builder.push(self.from_token_tree(tree));
+ match base {
+ Some(mut base) => {
+ for tt in trees {
+ base.push_tree(tt);
+ }
+ base
+ }
+ None => Self::TokenStream::new(trees),
}
- builder.build()
}
fn concat_streams(
@@ -97,22 +94,15 @@ impl server::TokenStream for SpanIdServer {
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
+ let mut stream = base.unwrap_or_default();
+ for s in streams {
+ stream.push_stream(s);
}
- for stream in streams {
- builder.push(stream);
- }
- builder.build()
+ stream
}
- fn into_trees(
- &mut self,
- stream: Self::TokenStream,
- ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
- // Can't join with `SpanId`.
- stream.into_bridge(&mut |first, _second| first)
+ fn into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
+ (*stream.0).clone()
}
}
@@ -191,8 +181,8 @@ impl server::Symbol for SpanIdServer {
}
impl server::Server for SpanIdServer {
- fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
- bridge::ExpnGlobals {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ ExpnGlobals {
def_site: self.def_site,
call_site: self.call_site,
mixed_site: self.mixed_site,
@@ -207,73 +197,3 @@ impl server::Server for SpanIdServer {
f(symbol.as_str())
}
}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_ra_server_to_string() {
- let s = TokenStream {
- token_trees: vec![
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("struct"),
- span: SpanId(0),
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("T"),
- span: SpanId(0),
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: SpanId(0),
- close: SpanId(0),
- kind: tt::DelimiterKind::Brace,
- },
- len: 0,
- }),
- ],
- };
-
- assert_eq!(s.to_string(), "struct T {}");
- }
-
- #[test]
- fn test_ra_server_from_str() {
- let subtree_paren_a = vec![
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: SpanId(0),
- close: SpanId(0),
- kind: tt::DelimiterKind::Parenthesis,
- },
- len: 1,
- }),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- is_raw: tt::IdentIsRaw::No,
- sym: Symbol::intern("a"),
- span: SpanId(0),
- })),
- ];
-
- let t1 = TokenStream::from_str("(a)", SpanId(0)).unwrap();
- assert_eq!(t1.token_trees.len(), 2);
- assert!(t1.token_trees[0..2] == subtree_paren_a);
-
- let t2 = TokenStream::from_str("(a);", SpanId(0)).unwrap();
- assert_eq!(t2.token_trees.len(), 3);
- assert!(t2.token_trees[0..2] == subtree_paren_a);
-
- let underscore = TokenStream::from_str("_", SpanId(0)).unwrap();
- assert!(
- underscore.token_trees[0]
- == tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("_"),
- span: SpanId(0),
- is_raw: tt::IdentIsRaw::No,
- }))
- );
- }
-}
diff --git a/crates/proc-macro-srv/src/server_impl/token_stream.rs b/crates/proc-macro-srv/src/server_impl/token_stream.rs
deleted file mode 100644
index c5019a5917..0000000000
--- a/crates/proc-macro-srv/src/server_impl/token_stream.rs
+++ /dev/null
@@ -1,170 +0,0 @@
-//! TokenStream implementation used by sysroot ABI
-
-use proc_macro::bridge;
-
-use crate::server_impl::{TopSubtree, delim_to_external, literal_kind_to_external};
-
-#[derive(Clone)]
-pub struct TokenStream<S> {
- pub(super) token_trees: Vec<tt::TokenTree<S>>,
-}
-
-// #[derive(Default)] would mean that `S: Default`.
-impl<S> Default for TokenStream<S> {
- fn default() -> Self {
- Self { token_trees: Default::default() }
- }
-}
-
-impl<S: std::fmt::Debug + Copy> std::fmt::Debug for TokenStream<S> {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.debug_struct("TokenStream")
- .field("token_trees", &tt::TokenTreesView::new(&self.token_trees))
- .finish()
- }
-}
-
-impl<S: Copy> TokenStream<S> {
- pub(crate) fn with_subtree(subtree: TopSubtree<S>) -> Self {
- let delimiter_kind = subtree.top_subtree().delimiter.kind;
- let mut token_trees = subtree.0;
- if delimiter_kind == tt::DelimiterKind::Invisible {
- token_trees.remove(0);
- }
- TokenStream { token_trees }
- }
-
- pub(crate) fn into_subtree(mut self, call_site: S) -> TopSubtree<S>
- where
- S: Copy,
- {
- self.token_trees.insert(
- 0,
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: call_site,
- close: call_site,
- kind: tt::DelimiterKind::Invisible,
- },
- len: self.token_trees.len() as u32,
- }),
- );
- TopSubtree(self.token_trees)
- }
-
- pub(super) fn is_empty(&self) -> bool {
- self.token_trees.is_empty()
- }
-
- pub(crate) fn into_bridge(
- self,
- join_spans: &mut dyn FnMut(S, S) -> S,
- ) -> Vec<bridge::TokenTree<Self, S, intern::Symbol>> {
- let mut result = Vec::new();
- let mut iter = self.token_trees.into_iter();
- while let Some(tree) = iter.next() {
- match tree {
- tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
- result.push(bridge::TokenTree::Ident(bridge::Ident {
- sym: ident.sym,
- is_raw: ident.is_raw.yes(),
- span: ident.span,
- }))
- }
- // Note, we do not have to assemble our `-` punct and literal split into a single
- // negative bridge literal here. As the proc-macro docs state
- // > Literals created from negative numbers might not survive round-trips through
- // > TokenStream or strings and may be broken into two tokens (- and positive
- // > literal).
- tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
- result.push(bridge::TokenTree::Literal(bridge::Literal {
- span: lit.span,
- kind: literal_kind_to_external(lit.kind),
- symbol: lit.symbol,
- suffix: lit.suffix,
- }))
- }
- tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
- result.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: punct.char as u8,
- joint: punct.spacing == tt::Spacing::Joint,
- span: punct.span,
- }))
- }
- tt::TokenTree::Subtree(subtree) => {
- result.push(bridge::TokenTree::Group(bridge::Group {
- delimiter: delim_to_external(subtree.delimiter),
- stream: if subtree.len == 0 {
- None
- } else {
- Some(TokenStream {
- token_trees: iter.by_ref().take(subtree.usize_len()).collect(),
- })
- },
- span: bridge::DelimSpan {
- open: subtree.delimiter.open,
- close: subtree.delimiter.close,
- entire: join_spans(subtree.delimiter.open, subtree.delimiter.close),
- },
- }))
- }
- }
- }
- result
- }
-}
-
-pub(super) struct TokenStreamBuilder<S> {
- acc: TokenStream<S>,
-}
-
-/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
-pub(super) mod token_stream_impls {
-
- use core::fmt;
-
- use super::{TokenStream, TopSubtree};
-
- /// Attempts to break the string into tokens and parse those tokens into a token stream.
- /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
- /// or characters not existing in the language.
- /// All tokens in the parsed stream get `Span::call_site()` spans.
- ///
- /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
- /// change these errors into `LexError`s later.
- impl<S: Copy + fmt::Debug> TokenStream<S> {
- pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
- let subtree = syntax_bridge::parse_to_token_tree_static_span(
- span::Edition::CURRENT_FIXME,
- call_site,
- src,
- )
- .ok_or_else(|| format!("lexing error: {src}"))?;
-
- Ok(TokenStream::with_subtree(TopSubtree(subtree.0.into_vec())))
- }
- }
-
- #[allow(clippy::to_string_trait_impl)]
- impl<S> ToString for TokenStream<S> {
- fn to_string(&self) -> String {
- ::tt::pretty(&self.token_trees)
- }
- }
-}
-
-impl<S: Copy> TokenStreamBuilder<S> {
- pub(super) fn push(&mut self, stream: TokenStream<S>) {
- self.acc.token_trees.extend(stream.token_trees)
- }
-
- pub(super) fn build(self) -> TokenStream<S> {
- self.acc
- }
-}
-
-impl<S: Copy> Default for TokenStreamBuilder<S> {
- fn default() -> Self {
- Self { acc: TokenStream::default() }
- }
-}
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index d4f9976c92..1e2e8da60c 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -10,25 +10,45 @@ use expect_test::expect;
fn test_derive_empty() {
assert_expand(
"DeriveEmpty",
- r#"struct S;"#,
+ r#"struct S { field: &'r#lt fn(u32) -> &'a r#u32 }"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT struct 1
- IDENT S 1
- PUNCH ; [alone] 1
-
-
-
- SUBTREE $$ 1 1"#]],
+ IDENT 1 struct
+ IDENT 1 S
+ GROUP {} 1 1 1
+ IDENT 1 field
+ PUNCT 1 : [alone]
+ PUNCT 1 & [joint]
+ PUNCT 1 ' [joint]
+ IDENT 1 r#lt
+ IDENT 1 fn
+ GROUP () 1 1 1
+ IDENT 1 u32
+ PUNCT 1 - [joint]
+ PUNCT 1 > [alone]
+ PUNCT 1 & [joint]
+ PUNCT 1 ' [joint]
+ IDENT 1 a
+ IDENT 1 r#u32
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT struct 42:Root[0000, 0]@0..6#ROOT2024
- IDENT S 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@8..9#ROOT2024
-
-
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..6#ROOT2024 struct
+ IDENT 42:Root[0000, 0]@7..8#ROOT2024 S
+ GROUP {} 42:Root[0000, 0]@9..10#ROOT2024 42:Root[0000, 0]@46..47#ROOT2024 42:Root[0000, 0]@9..47#ROOT2024
+ IDENT 42:Root[0000, 0]@11..16#ROOT2024 field
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 : [alone]
+ PUNCT 42:Root[0000, 0]@18..19#ROOT2024 & [joint]
+ PUNCT 42:Root[0000, 0]@22..23#ROOT2024 ' [joint]
+ IDENT 42:Root[0000, 0]@22..24#ROOT2024 r#lt
+ IDENT 42:Root[0000, 0]@25..27#ROOT2024 fn
+ GROUP () 42:Root[0000, 0]@27..28#ROOT2024 42:Root[0000, 0]@31..32#ROOT2024 42:Root[0000, 0]@27..32#ROOT2024
+ IDENT 42:Root[0000, 0]@28..31#ROOT2024 u32
+ PUNCT 42:Root[0000, 0]@33..34#ROOT2024 - [joint]
+ PUNCT 42:Root[0000, 0]@34..35#ROOT2024 > [alone]
+ PUNCT 42:Root[0000, 0]@36..37#ROOT2024 & [joint]
+ PUNCT 42:Root[0000, 0]@38..39#ROOT2024 ' [joint]
+ IDENT 42:Root[0000, 0]@38..39#ROOT2024 a
+ IDENT 42:Root[0000, 0]@42..45#ROOT2024 r#u32
+ "#]],
);
}
@@ -36,35 +56,37 @@ fn test_derive_empty() {
fn test_derive_error() {
assert_expand(
"DeriveError",
- r#"struct S;"#,
+ r#"struct S { field: u32 }"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT struct 1
- IDENT S 1
- PUNCH ; [alone] 1
-
-
-
- SUBTREE $$ 1 1
- IDENT compile_error 1
- PUNCH ! [alone] 1
- SUBTREE () 1 1
- LITERAL Str #[derive(DeriveError)] struct S ; 1
- PUNCH ; [alone] 1"#]],
+ IDENT 1 struct
+ IDENT 1 S
+ GROUP {} 1 1 1
+ IDENT 1 field
+ PUNCT 1 : [alone]
+ IDENT 1 u32
+
+
+ IDENT 1 compile_error
+ PUNCT 1 ! [joint]
+ GROUP () 1 1 1
+ LITER 1 Str #[derive(DeriveError)] struct S {field 58 u32 }
+ PUNCT 1 ; [alone]
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT struct 42:Root[0000, 0]@0..6#ROOT2024
- IDENT S 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@8..9#ROOT2024
-
-
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT compile_error 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ! [alone] 42:Root[0000, 0]@0..100#ROOT2024
- SUBTREE () 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str #[derive(DeriveError)] struct S ; 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..6#ROOT2024 struct
+ IDENT 42:Root[0000, 0]@7..8#ROOT2024 S
+ GROUP {} 42:Root[0000, 0]@9..10#ROOT2024 42:Root[0000, 0]@22..23#ROOT2024 42:Root[0000, 0]@9..23#ROOT2024
+ IDENT 42:Root[0000, 0]@11..16#ROOT2024 field
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 : [alone]
+ IDENT 42:Root[0000, 0]@18..21#ROOT2024 u32
+
+
+ IDENT 42:Root[0000, 0]@0..13#ROOT2024 compile_error
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 ! [joint]
+ GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@64..65#ROOT2024 42:Root[0000, 0]@14..65#ROOT2024
+ LITER 42:Root[0000, 0]@15..64#ROOT2024 Str #[derive(DeriveError)] struct S {field 58 u32 }
+ PUNCT 42:Root[0000, 0]@65..66#ROOT2024 ; [alone]
+ "#]],
);
}
@@ -74,45 +96,41 @@ fn test_fn_like_macro_noop() {
"fn_like_noop",
r#"ident, 0, 1, []"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- LITERAL Integer 0 1
- PUNCH , [alone] 1
- LITERAL Integer 1 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1
-
-
-
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- LITERAL Integer 0 1
- PUNCH , [alone] 1
- LITERAL Integer 1 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1"#]],
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ LITER 1 Integer 0
+ PUNCT 1 , [alone]
+ LITER 1 Integer 1
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
+
+
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ LITER 1 Integer 0
+ PUNCT 1 , [alone]
+ LITER 1 Integer 1
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- LITERAL Integer 0 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@8..9#ROOT2024
- LITERAL Integer 1 42:Root[0000, 0]@10..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024
-
-
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- LITERAL Integer 0 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@8..9#ROOT2024
- LITERAL Integer 1 42:Root[0000, 0]@10..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@7..8#ROOT2024 Integer 0
+ PUNCT 42:Root[0000, 0]@8..9#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@10..11#ROOT2024 Integer 1
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@13..15#ROOT2024
+
+
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@7..8#ROOT2024 Integer 0
+ PUNCT 42:Root[0000, 0]@8..9#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@10..11#ROOT2024 Integer 1
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@13..15#ROOT2024
+ "#]],
);
}
@@ -122,29 +140,25 @@ fn test_fn_like_macro_clone_ident_subtree() {
"fn_like_clone_tokens",
r#"ident, []"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
-
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1"#]],
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@8..9#ROOT2024
-
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@8..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024
+ "#]],
);
}
@@ -154,21 +168,17 @@ fn test_fn_like_macro_clone_raw_ident() {
"fn_like_clone_tokens",
"r#async",
expect![[r#"
- SUBTREE $$ 1 1
- IDENT r#async 1
-
+ IDENT 1 r#async
- SUBTREE $$ 1 1
- IDENT r#async 1"#]],
+ IDENT 1 r#async
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#async 42:Root[0000, 0]@0..7#ROOT2024
+ IDENT 42:Root[0000, 0]@2..7#ROOT2024 r#async
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#async 42:Root[0000, 0]@0..7#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@2..7#ROOT2024 r#async
+ "#]],
);
}
@@ -178,23 +188,19 @@ fn test_fn_like_fn_like_span_join() {
"fn_like_span_join",
"foo bar",
expect![[r#"
- SUBTREE $$ 1 1
- IDENT foo 1
- IDENT bar 1
-
+ IDENT 1 foo
+ IDENT 1 bar
- SUBTREE $$ 1 1
- IDENT r#joined 1"#]],
+ IDENT 1 r#joined
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT foo 42:Root[0000, 0]@0..3#ROOT2024
- IDENT bar 42:Root[0000, 0]@8..11#ROOT2024
-
+ IDENT 42:Root[0000, 0]@0..3#ROOT2024 foo
+ IDENT 42:Root[0000, 0]@8..11#ROOT2024 bar
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#joined 42:Root[0000, 0]@0..11#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..11#ROOT2024 r#joined
+ "#]],
);
}
@@ -204,29 +210,25 @@ fn test_fn_like_fn_like_span_ops() {
"fn_like_span_ops",
"set_def_site resolved_at_def_site start_span",
expect![[r#"
- SUBTREE $$ 1 1
- IDENT set_def_site 1
- IDENT resolved_at_def_site 1
- IDENT start_span 1
+ IDENT 1 set_def_site
+ IDENT 1 resolved_at_def_site
+ IDENT 1 start_span
-
- SUBTREE $$ 1 1
- IDENT set_def_site 0
- IDENT resolved_at_def_site 1
- IDENT start_span 1"#]],
+ IDENT 0 set_def_site
+ IDENT 1 resolved_at_def_site
+ IDENT 1 start_span
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT set_def_site 42:Root[0000, 0]@0..12#ROOT2024
- IDENT resolved_at_def_site 42:Root[0000, 0]@13..33#ROOT2024
- IDENT start_span 42:Root[0000, 0]@34..44#ROOT2024
-
+ IDENT 42:Root[0000, 0]@0..12#ROOT2024 set_def_site
+ IDENT 42:Root[0000, 0]@13..33#ROOT2024 resolved_at_def_site
+ IDENT 42:Root[0000, 0]@34..44#ROOT2024 start_span
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT set_def_site 41:Root[0000, 0]@0..150#ROOT2024
- IDENT resolved_at_def_site 42:Root[0000, 0]@13..33#ROOT2024
- IDENT start_span 42:Root[0000, 0]@34..34#ROOT2024"#]],
+ IDENT 41:Root[0000, 0]@0..150#ROOT2024 set_def_site
+ IDENT 42:Root[0000, 0]@13..33#ROOT2024 resolved_at_def_site
+ IDENT 42:Root[0000, 0]@34..34#ROOT2024 start_span
+ "#]],
);
}
@@ -236,51 +238,39 @@ fn test_fn_like_mk_literals() {
"fn_like_mk_literals",
r#""#,
expect![[r#"
- SUBTREE $$ 1 1
-
-
-
- SUBTREE $$ 1 1
- LITERAL ByteStr byte_string 1
- LITERAL Char c 1
- LITERAL Str string 1
- LITERAL Str -string 1
- LITERAL CStr cstring 1
- LITERAL Float 3.14f64 1
- PUNCH - [alone] 1
- LITERAL Float 3.14f64 1
- LITERAL Float 3.14 1
- PUNCH - [alone] 1
- LITERAL Float 3.14 1
- LITERAL Integer 123i64 1
- PUNCH - [alone] 1
- LITERAL Integer 123i64 1
- LITERAL Integer 123 1
- PUNCH - [alone] 1
- LITERAL Integer 123 1"#]],
+
+
+ LITER 1 ByteStr byte_string
+ LITER 1 Char c
+ LITER 1 Str string
+ LITER 1 Str -string
+ LITER 1 CStr cstring
+ LITER 1 Float 3.14f64
+ LITER 1 Float -3.14f64
+ LITER 1 Float 3.14
+ LITER 1 Float -3.14
+ LITER 1 Integer 123i64
+ LITER 1 Integer -123i64
+ LITER 1 Integer 123
+ LITER 1 Integer -123
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
-
-
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL ByteStr byte_string 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Char c 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str string 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str -string 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL CStr cstring 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14f64 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14f64 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123i64 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123i64 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123 42:Root[0000, 0]@0..100#ROOT2024"#]],
+
+
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 ByteStr byte_string
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Char c
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Str string
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Str -string
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 CStr cstring
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float 3.14f64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float -3.14f64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float 3.14
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float -3.14
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 123i64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer -123i64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 123
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer -123
+ "#]],
);
}
@@ -290,21 +280,17 @@ fn test_fn_like_mk_idents() {
"fn_like_mk_idents",
r#""#,
expect![[r#"
- SUBTREE $$ 1 1
-
- SUBTREE $$ 1 1
- IDENT standard 1
- IDENT r#raw 1"#]],
+ IDENT 1 standard
+ IDENT 1 r#raw
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT standard 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#raw 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..100#ROOT2024 standard
+ IDENT 42:Root[0000, 0]@0..100#ROOT2024 r#raw
+ "#]],
);
}
@@ -314,97 +300,93 @@ fn test_fn_like_macro_clone_literals() {
"fn_like_clone_tokens",
r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###,
expect![[r#"
- SUBTREE $$ 1 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 4i64 1
- PUNCH , [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- LITERAL Str hello bridge 1
- PUNCH , [alone] 1
- LITERAL Err(()) "suffixed"suffix 1
- PUNCH , [alone] 1
- LITERAL StrRaw(2) raw 1
- PUNCH , [alone] 1
- LITERAL Char a 1
- PUNCH , [alone] 1
- LITERAL Byte b 1
- PUNCH , [alone] 1
- LITERAL CStr null 1
-
-
-
- SUBTREE $$ 1 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 4i64 1
- PUNCH , [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- LITERAL Str hello bridge 1
- PUNCH , [alone] 1
- LITERAL Str suffixedsuffix 1
- PUNCH , [alone] 1
- LITERAL StrRaw(2) raw 1
- PUNCH , [alone] 1
- LITERAL Char a 1
- PUNCH , [alone] 1
- LITERAL Byte b 1
- PUNCH , [alone] 1
- LITERAL CStr null 1"#]],
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 4i64
+ PUNCT 1 , [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ LITER 1 Str hello bridge
+ PUNCT 1 , [alone]
+ LITER 1 Str suffixedsuffix
+ PUNCT 1 , [alone]
+ LITER 1 StrRaw(2) raw
+ PUNCT 1 , [alone]
+ LITER 1 Char a
+ PUNCT 1 , [alone]
+ LITER 1 Byte b
+ PUNCT 1 , [alone]
+ LITER 1 CStr null
+
+
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 4i64
+ PUNCT 1 , [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ LITER 1 Str hello bridge
+ PUNCT 1 , [alone]
+ LITER 1 Str suffixedsuffix
+ PUNCT 1 , [alone]
+ LITER 1 StrRaw(2) raw
+ PUNCT 1 , [alone]
+ LITER 1 Char a
+ PUNCT 1 , [alone]
+ LITER 1 Byte b
+ PUNCT 1 , [alone]
+ LITER 1 CStr null
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@0..4#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@4..5#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@6..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@13..14#ROOT2024
- LITERAL Integer 4i64 42:Root[0000, 0]@14..18#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@18..19#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@20..27#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@27..28#ROOT2024
- LITERAL Str hello bridge 42:Root[0000, 0]@29..43#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@43..44#ROOT2024
- LITERAL Err(()) "suffixed"suffix 42:Root[0000, 0]@45..61#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@61..62#ROOT2024
- LITERAL StrRaw(2) raw 42:Root[0000, 0]@63..73#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@73..74#ROOT2024
- LITERAL Char a 42:Root[0000, 0]@75..78#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@78..79#ROOT2024
- LITERAL Byte b 42:Root[0000, 0]@80..84#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@84..85#ROOT2024
- LITERAL CStr null 42:Root[0000, 0]@86..93#ROOT2024
-
-
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@0..4#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@4..5#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@6..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@13..14#ROOT2024
- LITERAL Integer 4i64 42:Root[0000, 0]@14..18#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@18..19#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@20..27#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@27..28#ROOT2024
- LITERAL Str hello bridge 42:Root[0000, 0]@29..43#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@43..44#ROOT2024
- LITERAL Str suffixedsuffix 42:Root[0000, 0]@45..61#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@61..62#ROOT2024
- LITERAL StrRaw(2) raw 42:Root[0000, 0]@63..73#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@73..74#ROOT2024
- LITERAL Char a 42:Root[0000, 0]@75..78#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@78..79#ROOT2024
- LITERAL Byte b 42:Root[0000, 0]@80..84#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@84..85#ROOT2024
- LITERAL CStr null 42:Root[0000, 0]@86..93#ROOT2024"#]],
+ LITER 42:Root[0000, 0]@0..4#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@4..5#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@6..11#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@14..18#ROOT2024 Integer 4i64
+ PUNCT 42:Root[0000, 0]@18..19#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@20..27#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@27..28#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@29..43#ROOT2024 Str hello bridge
+ PUNCT 42:Root[0000, 0]@43..44#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@45..61#ROOT2024 Str suffixedsuffix
+ PUNCT 42:Root[0000, 0]@61..62#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@63..73#ROOT2024 StrRaw(2) raw
+ PUNCT 42:Root[0000, 0]@73..74#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@75..78#ROOT2024 Char a
+ PUNCT 42:Root[0000, 0]@78..79#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@80..84#ROOT2024 Byte b
+ PUNCT 42:Root[0000, 0]@84..85#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@86..93#ROOT2024 CStr null
+
+
+ LITER 42:Root[0000, 0]@0..4#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@4..5#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@6..11#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@14..18#ROOT2024 Integer 4i64
+ PUNCT 42:Root[0000, 0]@18..19#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@20..27#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@27..28#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@29..43#ROOT2024 Str hello bridge
+ PUNCT 42:Root[0000, 0]@43..44#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@45..61#ROOT2024 Str suffixedsuffix
+ PUNCT 42:Root[0000, 0]@61..62#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@63..73#ROOT2024 StrRaw(2) raw
+ PUNCT 42:Root[0000, 0]@73..74#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@75..78#ROOT2024 Char a
+ PUNCT 42:Root[0000, 0]@78..79#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@80..84#ROOT2024 Byte b
+ PUNCT 42:Root[0000, 0]@84..85#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@86..93#ROOT2024 CStr null
+ "#]],
);
}
@@ -414,61 +396,57 @@ fn test_fn_like_macro_negative_literals() {
"fn_like_clone_tokens",
r###"-1u16, - 2_u32, -3.14f32, - 2.7"###,
expect![[r#"
- SUBTREE $$ 1 1
- PUNCH - [alone] 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 2.7 1
-
-
-
- SUBTREE $$ 1 1
- PUNCH - [alone] 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 2.7 1"#]],
+ PUNCT 1 - [alone]
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 2.7
+
+
+ PUNCT 1 - [alone]
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 2.7
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..1#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@1..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@7..8#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@9..14#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@14..15#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@16..17#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@17..24#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@24..25#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@26..27#ROOT2024
- LITERAL Float 2.7 42:Root[0000, 0]@28..31#ROOT2024
-
-
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..1#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@1..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@7..8#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@9..14#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@14..15#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@16..17#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@17..24#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@24..25#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@26..27#ROOT2024
- LITERAL Float 2.7 42:Root[0000, 0]@28..31#ROOT2024"#]],
+ PUNCT 42:Root[0000, 0]@0..1#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@1..5#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@7..8#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@9..14#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@14..15#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@17..24#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@24..25#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@26..27#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@28..31#ROOT2024 Float 2.7
+
+
+ PUNCT 42:Root[0000, 0]@0..1#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@1..5#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@7..8#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@9..14#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@14..15#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@17..24#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@24..25#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@26..27#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@28..31#ROOT2024 Float 2.7
+ "#]],
);
}
@@ -482,40 +460,64 @@ fn test_attr_macro() {
r#"mod m {}"#,
r#"some arguments"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT mod 1
- IDENT m 1
- SUBTREE {} 1 1
-
- SUBTREE $$ 1 1
- IDENT some 1
- IDENT arguments 1
-
- SUBTREE $$ 1 1
- IDENT compile_error 1
- PUNCH ! [alone] 1
- SUBTREE () 1 1
- LITERAL Str #[attr_error(some arguments)] mod m {} 1
- PUNCH ; [alone] 1"#]],
+ IDENT 1 mod
+ IDENT 1 m
+ GROUP {} 1 1 1
+
+
+ IDENT 1 some
+ IDENT 1 arguments
+
+
+ IDENT 1 compile_error
+ PUNCT 1 ! [joint]
+ GROUP () 1 1 1
+ LITER 1 Str #[attr_error(some arguments )] mod m {}
+ PUNCT 1 ; [alone]
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT mod 42:Root[0000, 0]@0..3#ROOT2024
- IDENT m 42:Root[0000, 0]@4..5#ROOT2024
- SUBTREE {} 42:Root[0000, 0]@6..7#ROOT2024 42:Root[0000, 0]@7..8#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT some 42:Root[0000, 0]@0..4#ROOT2024
- IDENT arguments 42:Root[0000, 0]@5..14#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT compile_error 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ! [alone] 42:Root[0000, 0]@0..100#ROOT2024
- SUBTREE () 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str #[attr_error(some arguments)] mod m {} 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..3#ROOT2024 mod
+ IDENT 42:Root[0000, 0]@4..5#ROOT2024 m
+ GROUP {} 42:Root[0000, 0]@6..7#ROOT2024 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@6..8#ROOT2024
+
+
+ IDENT 42:Root[0000, 0]@0..4#ROOT2024 some
+ IDENT 42:Root[0000, 0]@5..14#ROOT2024 arguments
+
+
+ IDENT 42:Root[0000, 0]@0..13#ROOT2024 compile_error
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 ! [joint]
+ GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@56..57#ROOT2024 42:Root[0000, 0]@14..57#ROOT2024
+ LITER 42:Root[0000, 0]@15..56#ROOT2024 Str #[attr_error(some arguments )] mod m {}
+ PUNCT 42:Root[0000, 0]@57..58#ROOT2024 ; [alone]
+ "#]],
);
}
+#[test]
+#[should_panic = "called `Result::unwrap()` on an `Err` value: \"Mismatched token groups\""]
+fn test_broken_input_unclosed_delim() {
+ assert_expand("fn_like_clone_tokens", r###"{"###, expect![[]], expect![[]]);
+}
+
+#[test]
+#[should_panic = "called `Result::unwrap()` on an `Err` value: \"Unexpected '}'\""]
+fn test_broken_input_unopened_delim() {
+ assert_expand("fn_like_clone_tokens", r###"}"###, expect![[]], expect![[]]);
+}
+
+#[test]
+#[should_panic = "called `Result::unwrap()` on an `Err` value: \"Expected '}'\""]
+fn test_broken_input_mismatched_delim() {
+ assert_expand("fn_like_clone_tokens", r###"(}"###, expect![[]], expect![[]]);
+}
+
+#[test]
+#[should_panic = "called `Result::unwrap()` on an `Err` value: \"Invalid identifier: `🪟`\""]
+fn test_broken_input_unknowm_token() {
+ assert_expand("fn_like_clone_tokens", r###"🪟"###, expect![[]], expect![[]]);
+}
+
/// Tests that we find and classify all proc macros correctly.
#[test]
fn list_test_macros() {
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index f5a76e30bb..1b12308ad6 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -1,31 +1,25 @@
//! utils used in proc-macro tests
use expect_test::Expect;
-use span::{EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext};
-use tt::TextRange;
+use span::{
+ EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TextRange,
+};
-use crate::{EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path};
+use crate::{
+ EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path, token_stream::TokenStream,
+};
-fn parse_string(call_site: SpanId, src: &str) -> crate::server_impl::TokenStream<SpanId> {
- crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
- syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
- .unwrap()
- .0
- .into_vec(),
- ))
+fn parse_string(call_site: SpanId, src: &str) -> TokenStream<SpanId> {
+ TokenStream::from_str(src, call_site).unwrap()
}
fn parse_string_spanned(
anchor: SpanAnchor,
call_site: SyntaxContext,
src: &str,
-) -> crate::server_impl::TokenStream<Span> {
- crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
- syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src)
- .unwrap()
- .0
- .into_vec(),
- ))
+) -> TokenStream<Span> {
+ TokenStream::from_str(src, Span { range: TextRange::default(), anchor, ctx: call_site })
+ .unwrap()
}
pub fn assert_expand(
@@ -60,16 +54,18 @@ fn assert_expand_impl(
let def_site = SpanId(0);
let call_site = SpanId(1);
let mixed_site = SpanId(2);
- let input_ts = parse_string(call_site, input).into_subtree(call_site);
- let attr_ts = attr.map(|attr| parse_string(call_site, attr).into_subtree(call_site));
+ let input_ts = parse_string(call_site, input);
+ let attr_ts = attr.map(|attr| parse_string(call_site, attr));
let input_ts_string = format!("{input_ts:?}");
let attr_ts_string = attr_ts.as_ref().map(|it| format!("{it:?}"));
let res =
expander.expand(macro_name, input_ts, attr_ts, def_site, call_site, mixed_site).unwrap();
expect.assert_eq(&format!(
- "{input_ts_string}\n\n{}\n\n{res:?}",
- attr_ts_string.unwrap_or_default()
+ "{input_ts_string}{}{}{}",
+ if attr_ts_string.is_some() { "\n\n" } else { "" },
+ attr_ts_string.unwrap_or_default(),
+ if res.is_empty() { String::new() } else { format!("\n\n{res:?}") }
));
let def_site = Span {
@@ -90,17 +86,18 @@ fn assert_expand_impl(
};
let mixed_site = call_site;
- let fixture =
- parse_string_spanned(call_site.anchor, call_site.ctx, input).into_subtree(call_site);
- let attr = attr.map(|attr| {
- parse_string_spanned(call_site.anchor, call_site.ctx, attr).into_subtree(call_site)
- });
+ let fixture = parse_string_spanned(call_site.anchor, call_site.ctx, input);
+ let attr = attr.map(|attr| parse_string_spanned(call_site.anchor, call_site.ctx, attr));
let fixture_string = format!("{fixture:?}");
let attr_string = attr.as_ref().map(|it| format!("{it:?}"));
let res = expander.expand(macro_name, fixture, attr, def_site, call_site, mixed_site).unwrap();
- expect_spanned
- .assert_eq(&format!("{fixture_string}\n\n{}\n\n{res:#?}", attr_string.unwrap_or_default()));
+ expect_spanned.assert_eq(&format!(
+ "{fixture_string}{}{}{}",
+ if attr_string.is_some() { "\n\n" } else { "" },
+ attr_string.unwrap_or_default(),
+ if res.is_empty() { String::new() } else { format!("\n\n{res:?}") }
+ ));
}
pub(crate) fn list() -> Vec<String> {
diff --git a/crates/proc-macro-srv/src/token_stream.rs b/crates/proc-macro-srv/src/token_stream.rs
new file mode 100644
index 0000000000..628d694239
--- /dev/null
+++ b/crates/proc-macro-srv/src/token_stream.rs
@@ -0,0 +1,745 @@
+//! The proc-macro server token stream implementation.
+
+use core::fmt;
+use std::sync::Arc;
+
+use intern::Symbol;
+use proc_macro::Delimiter;
+use rustc_lexer::{DocStyle, LiteralKind};
+
+use crate::bridge::{DelimSpan, Group, Ident, LitKind, Literal, Punct, TokenTree};
+
+/// Trait for allowing tests to parse tokenstreams with dynamic span ranges
+pub(crate) trait SpanLike {
+ fn derive_ranged(&self, range: std::ops::Range<usize>) -> Self;
+}
+
+#[derive(Clone)]
+pub struct TokenStream<S>(pub(crate) Arc<Vec<TokenTree<S>>>);
+
+impl<S> Default for TokenStream<S> {
+ fn default() -> Self {
+ Self(Default::default())
+ }
+}
+
+impl<S> TokenStream<S> {
+ pub fn new(tts: Vec<TokenTree<S>>) -> TokenStream<S> {
+ TokenStream(Arc::new(tts))
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+
+ pub fn iter(&self) -> TokenStreamIter<'_, S> {
+ TokenStreamIter::new(self)
+ }
+
+ pub(crate) fn from_str(s: &str, span: S) -> Result<Self, String>
+ where
+ S: SpanLike + Copy,
+ {
+ let mut groups = Vec::new();
+ groups.push((proc_macro::Delimiter::None, 0..0, vec![]));
+ let mut offset = 0;
+ let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No).peekable();
+ while let Some(token) = tokens.next() {
+ let range = offset..offset + token.len as usize;
+ offset += token.len as usize;
+
+ let mut is_joint = || {
+ tokens.peek().is_some_and(|token| {
+ matches!(
+ token.kind,
+ rustc_lexer::TokenKind::RawLifetime
+ | rustc_lexer::TokenKind::GuardedStrPrefix
+ | rustc_lexer::TokenKind::Lifetime { .. }
+ | rustc_lexer::TokenKind::Semi
+ | rustc_lexer::TokenKind::Comma
+ | rustc_lexer::TokenKind::Dot
+ | rustc_lexer::TokenKind::OpenParen
+ | rustc_lexer::TokenKind::CloseParen
+ | rustc_lexer::TokenKind::OpenBrace
+ | rustc_lexer::TokenKind::CloseBrace
+ | rustc_lexer::TokenKind::OpenBracket
+ | rustc_lexer::TokenKind::CloseBracket
+ | rustc_lexer::TokenKind::At
+ | rustc_lexer::TokenKind::Pound
+ | rustc_lexer::TokenKind::Tilde
+ | rustc_lexer::TokenKind::Question
+ | rustc_lexer::TokenKind::Colon
+ | rustc_lexer::TokenKind::Dollar
+ | rustc_lexer::TokenKind::Eq
+ | rustc_lexer::TokenKind::Bang
+ | rustc_lexer::TokenKind::Lt
+ | rustc_lexer::TokenKind::Gt
+ | rustc_lexer::TokenKind::Minus
+ | rustc_lexer::TokenKind::And
+ | rustc_lexer::TokenKind::Or
+ | rustc_lexer::TokenKind::Plus
+ | rustc_lexer::TokenKind::Star
+ | rustc_lexer::TokenKind::Slash
+ | rustc_lexer::TokenKind::Percent
+ | rustc_lexer::TokenKind::Caret
+ )
+ })
+ };
+
+ let Some((open_delim, _, tokenstream)) = groups.last_mut() else {
+ return Err("Unbalanced delimiters".to_owned());
+ };
+ match token.kind {
+ rustc_lexer::TokenKind::OpenParen => {
+ groups.push((proc_macro::Delimiter::Parenthesis, range, vec![]))
+ }
+ rustc_lexer::TokenKind::CloseParen if *open_delim != Delimiter::Parenthesis => {
+ return if *open_delim == Delimiter::None {
+ Err("Unexpected ')'".to_owned())
+ } else {
+ Err("Expected ')'".to_owned())
+ };
+ }
+ rustc_lexer::TokenKind::CloseParen => {
+ let (delimiter, open_range, stream) = groups.pop().unwrap();
+ groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
+ TokenTree::Group(Group {
+ delimiter,
+ stream: if stream.is_empty() {
+ None
+ } else {
+ Some(TokenStream::new(stream))
+ },
+ span: DelimSpan {
+ entire: span.derive_ranged(open_range.start..range.end),
+ open: span.derive_ranged(open_range),
+ close: span.derive_ranged(range),
+ },
+ }),
+ );
+ }
+ rustc_lexer::TokenKind::OpenBrace => {
+ groups.push((proc_macro::Delimiter::Brace, range, vec![]))
+ }
+ rustc_lexer::TokenKind::CloseBrace if *open_delim != Delimiter::Brace => {
+ return if *open_delim == Delimiter::None {
+ Err("Unexpected '}'".to_owned())
+ } else {
+ Err("Expected '}'".to_owned())
+ };
+ }
+ rustc_lexer::TokenKind::CloseBrace => {
+ let (delimiter, open_range, stream) = groups.pop().unwrap();
+ groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
+ TokenTree::Group(Group {
+ delimiter,
+ stream: if stream.is_empty() {
+ None
+ } else {
+ Some(TokenStream::new(stream))
+ },
+ span: DelimSpan {
+ entire: span.derive_ranged(open_range.start..range.end),
+ open: span.derive_ranged(open_range),
+ close: span.derive_ranged(range),
+ },
+ }),
+ );
+ }
+ rustc_lexer::TokenKind::OpenBracket => {
+ groups.push((proc_macro::Delimiter::Bracket, range, vec![]))
+ }
+ rustc_lexer::TokenKind::CloseBracket if *open_delim != Delimiter::Bracket => {
+ return if *open_delim == Delimiter::None {
+ Err("Unexpected ']'".to_owned())
+ } else {
+ Err("Expected ']'".to_owned())
+ };
+ }
+ rustc_lexer::TokenKind::CloseBracket => {
+ let (delimiter, open_range, stream) = groups.pop().unwrap();
+ groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
+ TokenTree::Group(Group {
+ delimiter,
+ stream: if stream.is_empty() {
+ None
+ } else {
+ Some(TokenStream::new(stream))
+ },
+ span: DelimSpan {
+ entire: span.derive_ranged(open_range.start..range.end),
+ open: span.derive_ranged(open_range),
+ close: span.derive_ranged(range),
+ },
+ }),
+ );
+ }
+ rustc_lexer::TokenKind::LineComment { doc_style: None }
+ | rustc_lexer::TokenKind::BlockComment { doc_style: None, terminated: _ } => {
+ continue;
+ }
+ rustc_lexer::TokenKind::LineComment { doc_style: Some(doc_style) } => {
+ let text = &s[range.start + 2..range.end];
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span }));
+ if doc_style == DocStyle::Inner {
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'!', joint: false, span }));
+ }
+ tokenstream.push(TokenTree::Group(Group {
+ delimiter: Delimiter::Bracket,
+ stream: Some(TokenStream::new(vec![
+ TokenTree::Ident(Ident {
+ sym: Symbol::intern("doc"),
+ is_raw: false,
+ span,
+ }),
+ TokenTree::Punct(Punct { ch: b'=', joint: false, span }),
+ TokenTree::Literal(Literal {
+ kind: LitKind::Str,
+ symbol: Symbol::intern(&text.escape_debug().to_string()),
+ suffix: None,
+ span: span.derive_ranged(range),
+ }),
+ ])),
+ span: DelimSpan { open: span, close: span, entire: span },
+ }));
+ }
+ rustc_lexer::TokenKind::BlockComment { doc_style: Some(doc_style), terminated } => {
+ let text =
+ &s[range.start + 2..if terminated { range.end - 2 } else { range.end }];
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span }));
+ if doc_style == DocStyle::Inner {
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'!', joint: false, span }));
+ }
+ tokenstream.push(TokenTree::Group(Group {
+ delimiter: Delimiter::Bracket,
+ stream: Some(TokenStream::new(vec![
+ TokenTree::Ident(Ident {
+ sym: Symbol::intern("doc"),
+ is_raw: false,
+ span,
+ }),
+ TokenTree::Punct(Punct { ch: b'=', joint: false, span }),
+ TokenTree::Literal(Literal {
+ kind: LitKind::Str,
+ symbol: Symbol::intern(&text.escape_debug().to_string()),
+ suffix: None,
+ span: span.derive_ranged(range),
+ }),
+ ])),
+ span: DelimSpan { open: span, close: span, entire: span },
+ }));
+ }
+ rustc_lexer::TokenKind::Whitespace => continue,
+ rustc_lexer::TokenKind::Frontmatter { .. } => unreachable!(),
+ rustc_lexer::TokenKind::Unknown => {
+ return Err(format!("Unknown token: `{}`", &s[range]));
+ }
+ rustc_lexer::TokenKind::UnknownPrefix => {
+ return Err(format!("Unknown prefix: `{}`", &s[range]));
+ }
+ rustc_lexer::TokenKind::UnknownPrefixLifetime => {
+ return Err(format!("Unknown lifetime prefix: `{}`", &s[range]));
+ }
+ // FIXME: Error on edition >= 2024 ... I dont think the proc-macro server can fetch editions currently
+ // and whose edition is this?
+ rustc_lexer::TokenKind::GuardedStrPrefix => {
+ tokenstream.push(TokenTree::Punct(Punct {
+ ch: s.as_bytes()[range.start],
+ joint: true,
+ span: span.derive_ranged(range.start..range.start + 1),
+ }));
+ tokenstream.push(TokenTree::Punct(Punct {
+ ch: s.as_bytes()[range.start + 1],
+ joint: is_joint(),
+ span: span.derive_ranged(range.start + 1..range.end),
+ }))
+ }
+ rustc_lexer::TokenKind::Ident => tokenstream.push(TokenTree::Ident(Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: false,
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::InvalidIdent => {
+ return Err(format!("Invalid identifier: `{}`", &s[range]));
+ }
+ rustc_lexer::TokenKind::RawIdent => {
+ let range = range.start + 2..range.end;
+ tokenstream.push(TokenTree::Ident(Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: true,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
+ tokenstream.push(TokenTree::Literal(literal_from_lexer(
+ &s[range.clone()],
+ span.derive_ranged(range),
+ kind,
+ suffix_start,
+ )))
+ }
+ rustc_lexer::TokenKind::RawLifetime => {
+ let range = range.start + 1 + 2..range.end;
+ tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'\'',
+ joint: true,
+ span: span.derive_ranged(range.start..range.start + 1),
+ }));
+ tokenstream.push(TokenTree::Ident(Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: true,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
+ if starts_with_number {
+ return Err("Lifetime cannot start with a number".to_owned());
+ }
+ let range = range.start + 1..range.end;
+ tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'\'',
+ joint: true,
+ span: span.derive_ranged(range.start..range.start + 1),
+ }));
+ tokenstream.push(TokenTree::Ident(Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: false,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Semi => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b';',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Comma => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b',',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Dot => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'.',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::At => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'@',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Pound => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'#',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Tilde => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'~',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Question => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'?',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Colon => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b':',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Dollar => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'$',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Eq => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'=',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Bang => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'!',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Lt => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'<',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Gt => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'>',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Minus => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'-',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::And => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'&',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Or => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'|',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Plus => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'+',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Star => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'*',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Slash => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'/',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Caret => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'^',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Percent => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'%',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Eof => break,
+ }
+ }
+ if let Some((Delimiter::None, _, tokentrees)) = groups.pop()
+ && groups.is_empty()
+ {
+ Ok(TokenStream::new(tokentrees))
+ } else {
+ Err("Mismatched token groups".to_owned())
+ }
+ }
+}
+
+impl<S> fmt::Display for TokenStream<S> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ for tt in self.0.iter() {
+ display_token_tree(tt, f)?;
+ }
+ Ok(())
+ }
+}
+
+fn display_token_tree<S>(tt: &TokenTree<S>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match tt {
+ TokenTree::Group(Group { delimiter, stream, span: _ }) => {
+ write!(
+ f,
+ "{}",
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => "(",
+ proc_macro::Delimiter::Brace => "{",
+ proc_macro::Delimiter::Bracket => "[",
+ proc_macro::Delimiter::None => "",
+ }
+ )?;
+ if let Some(stream) = stream {
+ write!(f, "{stream}")?;
+ }
+ write!(
+ f,
+ "{}",
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => ")",
+ proc_macro::Delimiter::Brace => "}",
+ proc_macro::Delimiter::Bracket => "]",
+ proc_macro::Delimiter::None => "",
+ }
+ )?;
+ }
+ TokenTree::Punct(Punct { ch, joint, span: _ }) => {
+ write!(f, "{ch}{}", if *joint { "" } else { " " })?
+ }
+ TokenTree::Ident(Ident { sym, is_raw, span: _ }) => {
+ if *is_raw {
+ write!(f, "r#")?;
+ }
+ write!(f, "{sym} ")?;
+ }
+ TokenTree::Literal(lit) => {
+ display_fmt_literal(lit, f)?;
+ let joint = match lit.kind {
+ LitKind::Str
+ | LitKind::StrRaw(_)
+ | LitKind::ByteStr
+ | LitKind::ByteStrRaw(_)
+ | LitKind::CStr
+ | LitKind::CStrRaw(_) => true,
+ _ => false,
+ };
+ if !joint {
+ write!(f, " ")?;
+ }
+ }
+ }
+ Ok(())
+}
+
+pub fn literal_to_string<S>(literal: &Literal<S>) -> String {
+ let mut buf = String::new();
+ display_fmt_literal(literal, &mut buf).unwrap();
+ buf
+}
+
+fn display_fmt_literal<S>(literal: &Literal<S>, f: &mut impl std::fmt::Write) -> fmt::Result {
+ match literal.kind {
+ LitKind::Byte => write!(f, "b'{}'", literal.symbol),
+ LitKind::Char => write!(f, "'{}'", literal.symbol),
+ LitKind::Integer | LitKind::Float | LitKind::ErrWithGuar => {
+ write!(f, "{}", literal.symbol)
+ }
+ LitKind::Str => write!(f, "\"{}\"", literal.symbol),
+ LitKind::ByteStr => write!(f, "b\"{}\"", literal.symbol),
+ LitKind::CStr => write!(f, "c\"{}\"", literal.symbol),
+ LitKind::StrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = literal.symbol
+ )
+ }
+ LitKind::ByteStrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = literal.symbol
+ )
+ }
+ LitKind::CStrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = literal.symbol
+ )
+ }
+ }?;
+ if let Some(suffix) = &literal.suffix {
+ write!(f, "{suffix}")?;
+ }
+ Ok(())
+}
+
+impl<S: fmt::Debug> fmt::Debug for TokenStream<S> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ debug_token_stream(self, 0, f)
+ }
+}
+
+fn debug_token_stream<S: fmt::Debug>(
+ ts: &TokenStream<S>,
+ depth: usize,
+ f: &mut std::fmt::Formatter<'_>,
+) -> std::fmt::Result {
+ for tt in ts.0.iter() {
+ debug_token_tree(tt, depth, f)?;
+ }
+ Ok(())
+}
+
+fn debug_token_tree<S: fmt::Debug>(
+ tt: &TokenTree<S>,
+ depth: usize,
+ f: &mut std::fmt::Formatter<'_>,
+) -> std::fmt::Result {
+ write!(f, "{:indent$}", "", indent = depth * 2)?;
+ match tt {
+ TokenTree::Group(Group { delimiter, stream, span }) => {
+ writeln!(
+ f,
+ "GROUP {}{} {:#?} {:#?} {:#?}",
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => "(",
+ proc_macro::Delimiter::Brace => "{",
+ proc_macro::Delimiter::Bracket => "[",
+ proc_macro::Delimiter::None => "$",
+ },
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => ")",
+ proc_macro::Delimiter::Brace => "}",
+ proc_macro::Delimiter::Bracket => "]",
+ proc_macro::Delimiter::None => "$",
+ },
+ span.open,
+ span.close,
+ span.entire,
+ )?;
+ if let Some(stream) = stream {
+ debug_token_stream(stream, depth + 1, f)?;
+ }
+ return Ok(());
+ }
+ TokenTree::Punct(Punct { ch, joint, span }) => write!(
+ f,
+ "PUNCT {span:#?} {} {}",
+ *ch as char,
+ if *joint { "[joint]" } else { "[alone]" }
+ )?,
+ TokenTree::Ident(Ident { sym, is_raw, span }) => {
+ write!(f, "IDENT {span:#?} ")?;
+ if *is_raw {
+ write!(f, "r#")?;
+ }
+ write!(f, "{sym}")?;
+ }
+ TokenTree::Literal(Literal { kind, symbol, suffix, span }) => write!(
+ f,
+ "LITER {span:#?} {kind:?} {symbol}{}",
+ match suffix {
+ Some(suffix) => suffix.clone(),
+ None => Symbol::intern(""),
+ }
+ )?,
+ }
+ writeln!(f)
+}
+
+impl<S: Copy> TokenStream<S> {
+ /// Push `tt` onto the end of the stream, possibly gluing it to the last
+ /// token. Uses `make_mut` to maximize efficiency.
+ pub(crate) fn push_tree(&mut self, tt: TokenTree<S>) {
+ let vec_mut = Arc::make_mut(&mut self.0);
+ vec_mut.push(tt);
+ }
+
+ /// Push `stream` onto the end of the stream, possibly gluing the first
+ /// token tree to the last token. (No other token trees will be glued.)
+ /// Uses `make_mut` to maximize efficiency.
+ pub(crate) fn push_stream(&mut self, stream: TokenStream<S>) {
+ let vec_mut = Arc::make_mut(&mut self.0);
+
+ let stream_iter = stream.0.iter().cloned();
+
+ vec_mut.extend(stream_iter);
+ }
+}
+
+impl<S> FromIterator<TokenTree<S>> for TokenStream<S> {
+ fn from_iter<I: IntoIterator<Item = TokenTree<S>>>(iter: I) -> Self {
+ TokenStream::new(iter.into_iter().collect::<Vec<TokenTree<S>>>())
+ }
+}
+
+#[derive(Clone)]
+pub struct TokenStreamIter<'t, S> {
+ stream: &'t TokenStream<S>,
+ index: usize,
+}
+
+impl<'t, S> TokenStreamIter<'t, S> {
+ fn new(stream: &'t TokenStream<S>) -> Self {
+ TokenStreamIter { stream, index: 0 }
+ }
+}
+
+impl<'t, S> Iterator for TokenStreamIter<'t, S> {
+ type Item = &'t TokenTree<S>;
+
+ fn next(&mut self) -> Option<&'t TokenTree<S>> {
+ self.stream.0.get(self.index).map(|tree| {
+ self.index += 1;
+ tree
+ })
+ }
+}
+
+pub(super) fn literal_from_lexer<Span>(
+ s: &str,
+ span: Span,
+ kind: rustc_lexer::LiteralKind,
+ suffix_start: u32,
+) -> Literal<Span> {
+ let (kind, start_offset, end_offset) = match kind {
+ LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
+ LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
+ LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
+ LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
+ LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
+ LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
+ LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
+ LiteralKind::RawStr { n_hashes } => (
+ LitKind::StrRaw(n_hashes.unwrap_or_default()),
+ 2 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ LiteralKind::RawByteStr { n_hashes } => (
+ LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
+ 3 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ LiteralKind::RawCStr { n_hashes } => (
+ LitKind::CStrRaw(n_hashes.unwrap_or_default()),
+ 3 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ };
+
+ let (lit, suffix) = s.split_at(suffix_start as usize);
+ let lit = &lit[start_offset..lit.len() - end_offset];
+ let suffix = match suffix {
+ "" | "_" => None,
+ suffix => Some(Symbol::intern(suffix)),
+ };
+
+ Literal { kind, symbol: Symbol::intern(lit), suffix, span }
+}
+
+impl SpanLike for crate::SpanId {
+ fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
+ *self
+ }
+}
+
+impl SpanLike for () {
+ fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
+ *self
+ }
+}
+
+impl SpanLike for crate::Span {
+ fn derive_ranged(&self, range: std::ops::Range<usize>) -> Self {
+ crate::Span {
+ range: span::TextRange::new(
+ span::TextSize::new(range.start as u32),
+ span::TextSize::new(range.end as u32),
+ ),
+ anchor: self.anchor,
+ ctx: self.ctx,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn roundtrip() {
+ let token_stream = TokenStream::from_str("struct T {\"string\"}", ()).unwrap();
+ token_stream.to_string();
+ assert_eq!(token_stream.to_string(), "struct T {\"string\"}");
+ }
+}
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 782ec55614..2e48c5a5a6 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -48,7 +48,6 @@ tracing-subscriber.workspace = true
tracing-tree.workspace = true
triomphe.workspace = true
toml.workspace = true
-nohash-hasher.workspace = true
walkdir = "2.5.0"
semver.workspace = true
memchr = "2.7.5"
@@ -61,7 +60,6 @@ hir-def.workspace = true
hir-ty.workspace = true
hir.workspace = true
ide-db.workspace = true
-intern.workspace = true
# This should only be used in CLI
ide-ssr.workspace = true
ide.workspace = true
diff --git a/crates/rust-analyzer/build.rs b/crates/rust-analyzer/build.rs
index 0fd381d612..cc7f112599 100644
--- a/crates/rust-analyzer/build.rs
+++ b/crates/rust-analyzer/build.rs
@@ -5,7 +5,6 @@ use std::{env, path::PathBuf, process::Command};
fn main() {
set_rerun();
set_commit_info();
- println!("cargo::rustc-check-cfg=cfg(rust_analyzer)");
if option_env!("CFG_RELEASE").is_none() {
println!("cargo:rustc-env=POKE_RA_DEVS=1");
}
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index f9a547f611..ea0752250d 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -826,58 +826,6 @@ impl<S> fmt::Display for Ident<S> {
}
}
-impl<S> Literal<S> {
- pub fn display_no_minus(&self) -> impl fmt::Display {
- struct NoMinus<'a, S>(&'a Literal<S>);
- impl<S> fmt::Display for NoMinus<'_, S> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let symbol =
- self.0.symbol.as_str().strip_prefix('-').unwrap_or(self.0.symbol.as_str());
- match self.0.kind {
- LitKind::Byte => write!(f, "b'{symbol}'"),
- LitKind::Char => write!(f, "'{symbol}'"),
- LitKind::Integer | LitKind::Float | LitKind::Err(_) => write!(f, "{symbol}"),
- LitKind::Str => write!(f, "\"{symbol}\""),
- LitKind::ByteStr => write!(f, "b\"{symbol}\""),
- LitKind::CStr => write!(f, "c\"{symbol}\""),
- LitKind::StrRaw(num_of_hashes) => {
- let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = symbol
- )
- }
- LitKind::ByteStrRaw(num_of_hashes) => {
- let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = symbol
- )
- }
- LitKind::CStrRaw(num_of_hashes) => {
- let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = symbol
- )
- }
- }?;
- if let Some(suffix) = &self.0.suffix {
- write!(f, "{suffix}")?;
- }
- Ok(())
- }
- }
- NoMinus(self)
- }
-}
-
impl<S> fmt::Display for Literal<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {
diff --git a/xtask/src/install.rs b/xtask/src/install.rs
index 975e361ba5..bddce0f027 100644
--- a/xtask/src/install.rs
+++ b/xtask/src/install.rs
@@ -174,10 +174,17 @@ fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> {
fn install_proc_macro_server(sh: &Shell, opts: ProcMacroServerOpt) -> anyhow::Result<()> {
let profile = if opts.dev_rel { "dev-rel" } else { "release" };
- cmd!(
+ let mut cmd = cmd!(
sh,
- "cargo +nightly install --path crates/proc-macro-srv-cli --profile={profile} --locked --force --features sysroot-abi"
- ).run()?;
+ "cargo install --path crates/proc-macro-srv-cli --profile={profile} --locked --force --features sysroot-abi"
+ );
+ if std::env::var_os("RUSTUP_TOOLCHAIN").is_none() {
+ cmd = cmd.env("RUSTUP_TOOLCHAIN", "nightly");
+ } else {
+ cmd = cmd.env("RUSTC_BOOTSTRAP", "1");
+ }
+
+ cmd.run()?;
Ok(())
}
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index ebfc7d0a94..05528505f2 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -194,6 +194,7 @@ fn check_test_attrs(path: &Path, text: &str) {
"test-utils/src/fixture.rs",
// Generated code from lints contains doc tests in string literals.
"ide-db/src/generated/lints.rs",
+ "proc-macro-srv/src/tests/mod.rs",
];
if need_panic.iter().any(|p| path.ends_with(p)) {
return;