Unnamed repository; edit this file 'description' to name the repository.
Diffstat (limited to 'crates/mbe/src/syntax_bridge.rs')
-rw-r--r--crates/mbe/src/syntax_bridge.rs122
1 files changed, 63 insertions, 59 deletions
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index b89bfd74a6..3440c1dd8c 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -1,6 +1,7 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
use rustc_hash::{FxHashMap, FxHashSet};
+use span::{SpanAnchor, SpanData, SpanMap};
use stdx::{never, non_empty_vec::NonEmptyVec};
use syntax::{
ast::{self, make::tokens::doc_comment},
@@ -10,10 +11,10 @@ use syntax::{
};
use tt::{
buffer::{Cursor, TokenBuffer},
- Span, SpanData, SyntaxContext,
+ Span, SyntaxContext,
};
-use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, SpanMap};
+use crate::{to_parser_input::to_parser_input, tt_iter::TtIter};
#[cfg(test)]
mod tests;
@@ -36,16 +37,20 @@ impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
/// Dummy things for testing where spans don't matter.
pub(crate) mod dummy_test_span_utils {
+ use tt::SyntaxContext;
+
use super::*;
- pub type DummyTestSpanData = tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>;
- pub const DUMMY: DummyTestSpanData = DummyTestSpanData::DUMMY;
+ pub type DummyTestSpanData = span::SpanData<DummyTestSyntaxContext>;
+ pub const DUMMY: DummyTestSpanData = span::SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: span::SpanAnchor {
+ file_id: span::FileId::BOGUS,
+ ast_id: span::ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: DummyTestSyntaxContext,
+ };
- #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
- pub struct DummyTestSpanAnchor;
- impl tt::SpanAnchor for DummyTestSpanAnchor {
- const DUMMY: Self = DummyTestSpanAnchor;
- }
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct DummyTestSyntaxContext;
impl SyntaxContext for DummyTestSyntaxContext {
@@ -54,27 +59,30 @@ pub(crate) mod dummy_test_span_utils {
pub struct DummyTestSpanMap;
- impl SpanMapper<tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>> for DummyTestSpanMap {
- fn span_for(
- &self,
- range: syntax::TextRange,
- ) -> tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext> {
- tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext }
+ impl SpanMapper<span::SpanData<DummyTestSyntaxContext>> for DummyTestSpanMap {
+ fn span_for(&self, range: syntax::TextRange) -> span::SpanData<DummyTestSyntaxContext> {
+ span::SpanData {
+ range,
+ anchor: span::SpanAnchor {
+ file_id: span::FileId::BOGUS,
+ ast_id: span::ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: DummyTestSyntaxContext,
+ }
}
}
}
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
/// subtree's spans.
-pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>(
+pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
node: &SyntaxNode,
map: SpanMap,
-) -> tt::Subtree<SpanData<Anchor, Ctx>>
+) -> tt::Subtree<SpanData<Ctx>>
where
- SpanData<Anchor, Ctx>: Span,
- Anchor: Copy,
+ SpanData<Ctx>: Span,
Ctx: SyntaxContext,
- SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
+ SpanMap: SpanMapper<SpanData<Ctx>>,
{
let mut c = Converter::new(node, map, Default::default(), Default::default());
convert_tokens(&mut c)
@@ -83,16 +91,15 @@ where
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
/// be injected or hidden from the output.
-pub fn syntax_node_to_token_tree_modified<Anchor, Ctx, SpanMap>(
+pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
node: &SyntaxNode,
map: SpanMap,
- append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Anchor, Ctx>>>>,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
remove: FxHashSet<SyntaxNode>,
-) -> tt::Subtree<SpanData<Anchor, Ctx>>
+) -> tt::Subtree<SpanData<Ctx>>
where
- SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
- SpanData<Anchor, Ctx>: Span,
- Anchor: Copy,
+ SpanMap: SpanMapper<SpanData<Ctx>>,
+ SpanData<Ctx>: Span,
Ctx: SyntaxContext,
{
let mut c = Converter::new(node, map, append, remove);
@@ -113,13 +120,12 @@ where
/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`].
/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans.
-pub fn token_tree_to_syntax_node<Anchor, Ctx>(
- tt: &tt::Subtree<SpanData<Anchor, Ctx>>,
+pub fn token_tree_to_syntax_node<Ctx>(
+ tt: &tt::Subtree<SpanData<Ctx>>,
entry_point: parser::TopEntryPoint,
-) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>)
+) -> (Parse<SyntaxNode>, SpanMap<SpanData<Ctx>>)
where
- SpanData<Anchor, Ctx>: Span,
- Anchor: Copy,
+ SpanData<Ctx>: Span,
Ctx: SyntaxContext,
{
let buffer = match tt {
@@ -150,21 +156,20 @@ where
/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
/// anchor with the given context.
-pub fn parse_to_token_tree<Anchor, Ctx>(
- anchor: Anchor,
+pub fn parse_to_token_tree<Ctx>(
+ anchor: SpanAnchor,
ctx: Ctx,
text: &str,
-) -> Option<tt::Subtree<SpanData<Anchor, Ctx>>>
+) -> Option<tt::Subtree<SpanData<Ctx>>>
where
- SpanData<Anchor, Ctx>: Span,
- Anchor: Copy,
+ SpanData<Ctx>: Span,
Ctx: SyntaxContext,
{
let lexed = parser::LexedStr::new(text);
if lexed.errors().next().is_some() {
return None;
}
- let mut conv = RawConverter { lexed, pos: 0, anchor, ctx };
+ let mut conv = RawConverter { lexed, anchor, pos: 0, ctx };
Some(convert_tokens(&mut conv))
}
@@ -450,10 +455,10 @@ fn convert_doc_comment<S: Copy>(
}
/// A raw token (straight from lexer) converter
-struct RawConverter<'a, Anchor, Ctx> {
+struct RawConverter<'a, Ctx> {
lexed: parser::LexedStr<'a>,
pos: usize,
- anchor: Anchor,
+ anchor: SpanAnchor,
ctx: Ctx,
}
/// A raw token (straight from lexer) converter that gives every token the same span.
@@ -487,16 +492,16 @@ trait TokenConverter<S>: Sized {
fn span_for(&self, range: TextRange) -> S;
}
-impl<Anchor, S, Ctx> SrcToken<RawConverter<'_, Anchor, Ctx>, S> for usize {
- fn kind(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SyntaxKind {
+impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize {
+ fn kind(&self, ctx: &RawConverter<'_, Ctx>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
- fn to_char(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> Option<char> {
+ fn to_char(&self, ctx: &RawConverter<'_, Ctx>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
- fn to_text(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SmolStr {
+ fn to_text(&self, ctx: &RawConverter<'_, Ctx>) -> SmolStr {
ctx.lexed.text(*self).into()
}
}
@@ -515,18 +520,17 @@ impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
}
}
-impl<Anchor: Copy, Ctx: SyntaxContext> TokenConverter<SpanData<Anchor, Ctx>>
- for RawConverter<'_, Anchor, Ctx>
+impl<Ctx: SyntaxContext> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx>
where
- SpanData<Anchor, Ctx>: Span,
+ SpanData<Ctx>: Span,
{
type Token = usize;
fn convert_doc_comment(
&self,
&token: &usize,
- span: SpanData<Anchor, Ctx>,
- ) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
+ span: SpanData<Ctx>,
+ ) -> Option<Vec<tt::TokenTree<SpanData<Ctx>>>> {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span)
}
@@ -550,7 +554,7 @@ where
Some(self.pos)
}
- fn span_for(&self, range: TextRange) -> SpanData<Anchor, Ctx> {
+ fn span_for(&self, range: TextRange) -> SpanData<Ctx> {
SpanData { range, anchor: self.anchor, ctx: self.ctx }
}
}
@@ -778,22 +782,22 @@ where
}
}
-struct TtTreeSink<'a, Anchor, Ctx>
+struct TtTreeSink<'a, Ctx>
where
- SpanData<Anchor, Ctx>: Span,
+ SpanData<Ctx>: Span,
{
buf: String,
- cursor: Cursor<'a, SpanData<Anchor, Ctx>>,
+ cursor: Cursor<'a, SpanData<Ctx>>,
text_pos: TextSize,
inner: SyntaxTreeBuilder,
- token_map: SpanMap<SpanData<Anchor, Ctx>>,
+ token_map: SpanMap<SpanData<Ctx>>,
}
-impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx>
+impl<'a, Ctx> TtTreeSink<'a, Ctx>
where
- SpanData<Anchor, Ctx>: Span,
+ SpanData<Ctx>: Span,
{
- fn new(cursor: Cursor<'a, SpanData<Anchor, Ctx>>) -> Self {
+ fn new(cursor: Cursor<'a, SpanData<Ctx>>) -> Self {
TtTreeSink {
buf: String::new(),
cursor,
@@ -803,7 +807,7 @@ where
}
}
- fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>) {
+ fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<SpanData<Ctx>>) {
self.token_map.finish();
(self.inner.finish(), self.token_map)
}
@@ -821,9 +825,9 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
Some(&texts[idx..texts.len() - (1 - idx)])
}
-impl<Anchor, Ctx> TtTreeSink<'_, Anchor, Ctx>
+impl<Ctx> TtTreeSink<'_, Ctx>
where
- SpanData<Anchor, Ctx>: Span,
+ SpanData<Ctx>: Span,
{
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.