Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--crates/hir-def/Cargo.toml1
-rw-r--r--crates/hir-ty/Cargo.toml2
-rw-r--r--crates/hir/Cargo.toml1
-rw-r--r--crates/ide-db/Cargo.toml1
-rw-r--r--crates/proc-macro-srv/src/bridge.rs10
-rw-r--r--crates/proc-macro-srv/src/dylib.rs3
-rw-r--r--crates/proc-macro-srv/src/dylib/proc_macros.rs2
-rw-r--r--crates/proc-macro-srv/src/dylib/version.rs5
-rw-r--r--crates/proc-macro-srv/src/lib.rs11
-rw-r--r--crates/proc-macro-srv/src/server_impl.rs8
-rw-r--r--crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs38
-rw-r--r--crates/proc-macro-srv/src/server_impl/token_id.rs37
-rw-r--r--crates/proc-macro-srv/src/tests/utils.rs2
-rw-r--r--crates/proc-macro-srv/src/token_stream.rs (renamed from crates/proc-macro-srv/src/tt.rs)503
-rw-r--r--crates/rust-analyzer/Cargo.toml2
15 files changed, 274 insertions, 352 deletions
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index abb4819a76..d0ab037664 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -27,7 +27,6 @@ tracing.workspace = true
smallvec.workspace = true
triomphe.workspace = true
rustc_apfloat = "0.2.3"
-text-size.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
query-group.workspace = true
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index 378a0f0382..fc2c0d5ac1 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -16,14 +16,12 @@ doctest = false
cov-mark = "2.0.0"
itertools.workspace = true
arrayvec.workspace = true
-bitflags.workspace = true
smallvec.workspace = true
ena = "0.14.3"
either.workspace = true
oorandom = "11.1.5"
tracing.workspace = true
rustc-hash.workspace = true
-scoped-tls = "1.0.1"
la-arena.workspace = true
triomphe.workspace = true
typed-arena = "2.0.2"
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index dfa3938432..d2d43e0d86 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -20,7 +20,6 @@ itertools.workspace = true
smallvec.workspace = true
tracing.workspace = true
triomphe.workspace = true
-indexmap.workspace = true
ra-ap-rustc_type_ir.workspace = true
diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml
index b714816018..14b038ae40 100644
--- a/crates/ide-db/Cargo.toml
+++ b/crates/ide-db/Cargo.toml
@@ -22,7 +22,6 @@ rustc-hash.workspace = true
either.workspace = true
itertools.workspace = true
arrayvec.workspace = true
-indexmap.workspace = true
memchr = "2.7.5"
salsa.workspace = true
salsa-macros.workspace = true
diff --git a/crates/proc-macro-srv/src/bridge.rs b/crates/proc-macro-srv/src/bridge.rs
new file mode 100644
index 0000000000..b6c4692319
--- /dev/null
+++ b/crates/proc-macro-srv/src/bridge.rs
@@ -0,0 +1,10 @@
+use proc_macro::bridge as pm_bridge;
+
+pub(crate) use pm_bridge::{DelimSpan, Diagnostic, ExpnGlobals, LitKind};
+
+pub(crate) type TokenTree<S> =
+ pm_bridge::TokenTree<crate::token_stream::TokenStream<S>, S, intern::Symbol>;
+pub(crate) type Literal<S> = pm_bridge::Literal<S, intern::Symbol>;
+pub(crate) type Group<S> = pm_bridge::Group<crate::token_stream::TokenStream<S>, S>;
+pub(crate) type Punct<S> = pm_bridge::Punct<S>;
+pub(crate) type Ident<S> = pm_bridge::Ident<S, intern::Symbol>;
diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs
index 0176868f65..03433197b7 100644
--- a/crates/proc-macro-srv/src/dylib.rs
+++ b/crates/proc-macro-srv/src/dylib.rs
@@ -12,7 +12,8 @@ use object::Object;
use paths::{Utf8Path, Utf8PathBuf};
use crate::{
- PanicMessage, ProcMacroKind, ProcMacroSrvSpan, dylib::proc_macros::ProcMacros, tt::TokenStream,
+ PanicMessage, ProcMacroKind, ProcMacroSrvSpan, dylib::proc_macros::ProcMacros,
+ token_stream::TokenStream,
};
pub(crate) struct Expander {
diff --git a/crates/proc-macro-srv/src/dylib/proc_macros.rs b/crates/proc-macro-srv/src/dylib/proc_macros.rs
index 0b29a1d5fe..c879c7609d 100644
--- a/crates/proc-macro-srv/src/dylib/proc_macros.rs
+++ b/crates/proc-macro-srv/src/dylib/proc_macros.rs
@@ -2,7 +2,7 @@
use proc_macro::bridge;
-use crate::{ProcMacroKind, ProcMacroSrvSpan, tt::TokenStream};
+use crate::{ProcMacroKind, ProcMacroSrvSpan, token_stream::TokenStream};
#[repr(transparent)]
pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
diff --git a/crates/proc-macro-srv/src/dylib/version.rs b/crates/proc-macro-srv/src/dylib/version.rs
index 3b2551f08c..209693b4da 100644
--- a/crates/proc-macro-srv/src/dylib/version.rs
+++ b/crates/proc-macro-srv/src/dylib/version.rs
@@ -5,11 +5,14 @@ use std::io::{self, Read};
use object::read::{Object, ObjectSection};
#[derive(Debug)]
-#[allow(dead_code)]
pub struct RustCInfo {
+ #[allow(dead_code)]
pub version: (usize, usize, usize),
+ #[allow(dead_code)]
pub channel: String,
+ #[allow(dead_code)]
pub commit: Option<String>,
+ #[allow(dead_code)]
pub date: Option<String>,
// something like "rustc 1.58.1 (db9d1b20b 2022-01-20)"
pub version_string: String,
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index f4decb74af..cf125cbec6 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -26,9 +26,10 @@ extern crate ra_ap_rustc_lexer as rustc_lexer;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer;
+mod bridge;
mod dylib;
mod server_impl;
-mod tt;
+mod token_stream;
use std::{
collections::{HashMap, hash_map::Entry},
@@ -80,12 +81,12 @@ impl ProcMacroSrv<'_> {
env: &[(String, String)],
current_dir: Option<impl AsRef<Path>>,
macro_name: &str,
- macro_body: tt::TokenStream<S>,
- attribute: Option<tt::TokenStream<S>>,
+ macro_body: token_stream::TokenStream<S>,
+ attribute: Option<token_stream::TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<tt::TokenStream<S>, PanicMessage> {
+ ) -> Result<token_stream::TokenStream<S>, PanicMessage> {
let snapped_env = self.env;
let expander = self.expander(lib.as_ref()).map_err(|err| PanicMessage {
message: Some(format!("failed to load macro: {err}")),
@@ -149,7 +150,7 @@ impl ProcMacroSrv<'_> {
}
pub trait ProcMacroSrvSpan: Copy + Send + Sync {
- type Server: proc_macro::bridge::server::Server<TokenStream = crate::tt::TokenStream<Self>>;
+ type Server: proc_macro::bridge::server::Server<TokenStream = crate::token_stream::TokenStream<Self>>;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
}
diff --git a/crates/proc-macro-srv/src/server_impl.rs b/crates/proc-macro-srv/src/server_impl.rs
index 8d509fd47e..bc46f8f0e4 100644
--- a/crates/proc-macro-srv/src/server_impl.rs
+++ b/crates/proc-macro-srv/src/server_impl.rs
@@ -6,13 +6,13 @@
//! The original idea from fedochet is using proc-macro2 as backend,
//! we use tt instead for better integration with RA.
-pub mod rust_analyzer_span;
-pub mod token_id;
+pub(crate) mod rust_analyzer_span;
+pub(crate) mod token_id;
pub(super) fn literal_from_str<Span: Copy>(
s: &str,
span: Span,
-) -> Result<proc_macro::bridge::Literal<Span, intern::Symbol>, ()> {
+) -> Result<crate::bridge::Literal<Span>, ()> {
use rustc_lexer::{LiteralKind, Token, TokenKind};
let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
@@ -35,5 +35,5 @@ pub(super) fn literal_from_str<Span: Copy>(
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
- Ok(crate::tt::literal_from_lexer(s, span, kind, suffix_start))
+ Ok(crate::token_stream::literal_from_lexer(s, span, kind, suffix_start))
}
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index 0d44cbb178..7c685c2da7 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -10,12 +10,13 @@ use std::{
};
use intern::Symbol;
-use proc_macro::bridge::{self, server};
+use proc_macro::bridge::server;
use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span, TextRange, TextSize};
-use crate::server_impl::literal_from_str;
-
-type TokenStream = crate::tt::TokenStream<Span>;
+use crate::{
+ bridge::{Diagnostic, ExpnGlobals, Literal, TokenTree},
+ server_impl::literal_from_str,
+};
pub struct FreeFunctions;
@@ -31,7 +32,7 @@ pub struct RaSpanServer {
impl server::Types for RaSpanServer {
type FreeFunctions = FreeFunctions;
- type TokenStream = TokenStream;
+ type TokenStream = crate::token_stream::TokenStream<Span>;
type Span = Span;
type Symbol = Symbol;
}
@@ -48,14 +49,11 @@ impl server::FreeFunctions for RaSpanServer {
self.tracked_paths.insert(path.into());
}
- fn literal_from_str(
- &mut self,
- s: &str,
- ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
literal_from_str(s, self.call_site)
}
- fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {
+ fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {
// FIXME handle diagnostic
}
}
@@ -77,11 +75,8 @@ impl server::TokenStream for RaSpanServer {
stream.to_string()
}
- fn from_token_tree(
- &mut self,
- tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
- ) -> Self::TokenStream {
- TokenStream::new(vec![tree])
+ fn from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
+ Self::TokenStream::new(vec![tree])
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@@ -96,7 +91,7 @@ impl server::TokenStream for RaSpanServer {
fn concat_trees(
&mut self,
base: Option<Self::TokenStream>,
- trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ trees: Vec<TokenTree<Self::Span>>,
) -> Self::TokenStream {
match base {
Some(mut base) => {
@@ -105,7 +100,7 @@ impl server::TokenStream for RaSpanServer {
}
base
}
- None => TokenStream::new(trees),
+ None => Self::TokenStream::new(trees),
}
}
@@ -121,10 +116,7 @@ impl server::TokenStream for RaSpanServer {
stream
}
- fn into_trees(
- &mut self,
- stream: Self::TokenStream,
- ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
+ fn into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
(*stream.0).clone()
}
}
@@ -285,8 +277,8 @@ impl server::Symbol for RaSpanServer {
}
impl server::Server for RaSpanServer {
- fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
- bridge::ExpnGlobals {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ ExpnGlobals {
def_site: self.def_site,
call_site: self.call_site,
mixed_site: self.mixed_site,
diff --git a/crates/proc-macro-srv/src/server_impl/token_id.rs b/crates/proc-macro-srv/src/server_impl/token_id.rs
index d637aeb2ec..3814320cbe 100644
--- a/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -3,9 +3,12 @@
use std::ops::{Bound, Range};
use intern::Symbol;
-use proc_macro::bridge::{self, server};
+use proc_macro::bridge::server;
-use crate::server_impl::literal_from_str;
+use crate::{
+ bridge::{Diagnostic, ExpnGlobals, Literal, TokenTree},
+ server_impl::literal_from_str,
+};
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct SpanId(pub u32);
@@ -17,7 +20,6 @@ impl std::fmt::Debug for SpanId {
}
type Span = SpanId;
-type TokenStream = crate::tt::TokenStream<Span>;
pub struct FreeFunctions;
@@ -29,7 +31,7 @@ pub struct SpanIdServer {
impl server::Types for SpanIdServer {
type FreeFunctions = FreeFunctions;
- type TokenStream = TokenStream;
+ type TokenStream = crate::token_stream::TokenStream<Span>;
type Span = Span;
type Symbol = Symbol;
}
@@ -40,14 +42,11 @@ impl server::FreeFunctions for SpanIdServer {
}
fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {}
fn track_path(&mut self, _path: &str) {}
- fn literal_from_str(
- &mut self,
- s: &str,
- ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
literal_from_str(s, self.call_site)
}
- fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {}
+ fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {}
}
impl server::TokenStream for SpanIdServer {
@@ -66,11 +65,8 @@ impl server::TokenStream for SpanIdServer {
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
}
- fn from_token_tree(
- &mut self,
- tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
- ) -> Self::TokenStream {
- TokenStream::new(vec![tree])
+ fn from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
+ Self::TokenStream::new(vec![tree])
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@@ -80,7 +76,7 @@ impl server::TokenStream for SpanIdServer {
fn concat_trees(
&mut self,
base: Option<Self::TokenStream>,
- trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ trees: Vec<TokenTree<Self::Span>>,
) -> Self::TokenStream {
match base {
Some(mut base) => {
@@ -89,7 +85,7 @@ impl server::TokenStream for SpanIdServer {
}
base
}
- None => TokenStream::new(trees),
+ None => Self::TokenStream::new(trees),
}
}
@@ -105,10 +101,7 @@ impl server::TokenStream for SpanIdServer {
stream
}
- fn into_trees(
- &mut self,
- stream: Self::TokenStream,
- ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
+ fn into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
(*stream.0).clone()
}
}
@@ -188,8 +181,8 @@ impl server::Symbol for SpanIdServer {
}
impl server::Server for SpanIdServer {
- fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
- bridge::ExpnGlobals {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ ExpnGlobals {
def_site: self.def_site,
call_site: self.call_site,
mixed_site: self.mixed_site,
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index 52b2849d90..1b12308ad6 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -6,7 +6,7 @@ use span::{
};
use crate::{
- EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path, tt::TokenStream,
+ EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path, token_stream::TokenStream,
};
fn parse_string(call_site: SpanId, src: &str) -> TokenStream<SpanId> {
diff --git a/crates/proc-macro-srv/src/tt.rs b/crates/proc-macro-srv/src/token_stream.rs
index 14cf7ca105..9a8d0cea8f 100644
--- a/crates/proc-macro-srv/src/tt.rs
+++ b/crates/proc-macro-srv/src/token_stream.rs
@@ -2,41 +2,16 @@ use core::fmt;
use std::sync::Arc;
use intern::Symbol;
-use proc_macro::{Delimiter, bridge};
+use proc_macro::Delimiter;
use rustc_lexer::{DocStyle, LiteralKind};
-pub type TokenTree<S> = bridge::TokenTree<TokenStream<S>, S, Symbol>;
+use crate::bridge::{DelimSpan, Group, Ident, LitKind, Literal, Punct, TokenTree};
-/// Trait for allowing integration tests to parse tokenstreams with dynamic span ranges
-pub trait SpanLike {
+/// Trait for allowing tests to parse tokenstreams with dynamic span ranges
+pub(crate) trait SpanLike {
fn derive_ranged(&self, range: std::ops::Range<usize>) -> Self;
}
-impl SpanLike for crate::SpanId {
- fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
- *self
- }
-}
-
-impl SpanLike for () {
- fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
- *self
- }
-}
-
-impl SpanLike for crate::Span {
- fn derive_ranged(&self, range: std::ops::Range<usize>) -> Self {
- crate::Span {
- range: span::TextRange::new(
- span::TextSize::new(range.start as u32),
- span::TextSize::new(range.end as u32),
- ),
- anchor: self.anchor,
- ctx: self.ctx,
- }
- }
-}
-
#[derive(Clone)]
pub struct TokenStream<S>(pub(crate) Arc<Vec<TokenTree<S>>>);
@@ -47,31 +22,31 @@ impl<S> Default for TokenStream<S> {
}
impl<S> TokenStream<S> {
- pub fn new(tts: Vec<TokenTree<S>>) -> TokenStream<S> {
+ pub(crate) fn new(tts: Vec<TokenTree<S>>) -> TokenStream<S> {
TokenStream(Arc::new(tts))
}
- pub fn is_empty(&self) -> bool {
+ pub(crate) fn is_empty(&self) -> bool {
self.0.is_empty()
}
- pub fn len(&self) -> usize {
+ pub(crate) fn len(&self) -> usize {
self.0.len()
}
- pub fn get(&self, index: usize) -> Option<&TokenTree<S>> {
+ pub(crate) fn get(&self, index: usize) -> Option<&TokenTree<S>> {
self.0.get(index)
}
- pub fn iter(&self) -> TokenStreamIter<'_, S> {
+ pub(crate) fn iter(&self) -> TokenStreamIter<'_, S> {
TokenStreamIter::new(self)
}
- pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree<S>> {
+ pub(crate) fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree<S>> {
self.0.chunks(chunk_size)
}
- pub fn from_str(s: &str, span: S) -> Result<Self, String>
+ pub(crate) fn from_str(s: &str, span: S) -> Result<Self, String>
where
S: SpanLike + Copy,
{
@@ -134,14 +109,14 @@ impl<S> TokenStream<S> {
rustc_lexer::TokenKind::CloseParen => {
let (delimiter, open_range, stream) = groups.pop().unwrap();
groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
- TokenTree::Group(bridge::Group {
+ TokenTree::Group(Group {
delimiter,
stream: if stream.is_empty() {
None
} else {
Some(TokenStream::new(stream))
},
- span: bridge::DelimSpan {
+ span: DelimSpan {
entire: span.derive_ranged(open_range.start..range.end),
open: span.derive_ranged(open_range),
close: span.derive_ranged(range),
@@ -158,14 +133,14 @@ impl<S> TokenStream<S> {
rustc_lexer::TokenKind::CloseBrace => {
let (delimiter, open_range, stream) = groups.pop().unwrap();
groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
- TokenTree::Group(bridge::Group {
+ TokenTree::Group(Group {
delimiter,
stream: if stream.is_empty() {
None
} else {
Some(TokenStream::new(stream))
},
- span: bridge::DelimSpan {
+ span: DelimSpan {
entire: span.derive_ranged(open_range.start..range.end),
open: span.derive_ranged(open_range),
close: span.derive_ranged(range),
@@ -182,14 +157,14 @@ impl<S> TokenStream<S> {
rustc_lexer::TokenKind::CloseBracket => {
let (delimiter, open_range, stream) = groups.pop().unwrap();
groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
- TokenTree::Group(bridge::Group {
+ TokenTree::Group(Group {
delimiter,
stream: if stream.is_empty() {
None
} else {
Some(TokenStream::new(stream))
},
- span: bridge::DelimSpan {
+ span: DelimSpan {
entire: span.derive_ranged(open_range.start..range.end),
open: span.derive_ranged(open_range),
close: span.derive_ranged(range),
@@ -203,77 +178,53 @@ impl<S> TokenStream<S> {
}
rustc_lexer::TokenKind::LineComment { doc_style: Some(doc_style) } => {
let text = &s[range.start + 2..range.end];
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'#',
- joint: false,
- span,
- }));
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span }));
if doc_style == DocStyle::Inner {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'!',
- joint: false,
- span,
- }));
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'!', joint: false, span }));
}
- tokenstream.push(bridge::TokenTree::Group(bridge::Group {
+ tokenstream.push(TokenTree::Group(Group {
delimiter: Delimiter::Bracket,
stream: Some(TokenStream::new(vec![
- bridge::TokenTree::Ident(bridge::Ident {
+ TokenTree::Ident(Ident {
sym: Symbol::intern("doc"),
is_raw: false,
span,
}),
- bridge::TokenTree::Punct(bridge::Punct {
- ch: b'=',
- joint: false,
- span,
- }),
- bridge::TokenTree::Literal(bridge::Literal {
- kind: bridge::LitKind::Str,
+ TokenTree::Punct(Punct { ch: b'=', joint: false, span }),
+ TokenTree::Literal(Literal {
+ kind: LitKind::Str,
symbol: Symbol::intern(&text.escape_debug().to_string()),
suffix: None,
span: span.derive_ranged(range),
}),
])),
- span: bridge::DelimSpan { open: span, close: span, entire: span },
+ span: DelimSpan { open: span, close: span, entire: span },
}));
}
rustc_lexer::TokenKind::BlockComment { doc_style: Some(doc_style), terminated } => {
let text =
&s[range.start + 2..if terminated { range.end - 2 } else { range.end }];
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'#',
- joint: false,
- span,
- }));
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span }));
if doc_style == DocStyle::Inner {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'!',
- joint: false,
- span,
- }));
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'!', joint: false, span }));
}
- tokenstream.push(bridge::TokenTree::Group(bridge::Group {
+ tokenstream.push(TokenTree::Group(Group {
delimiter: Delimiter::Bracket,
stream: Some(TokenStream::new(vec![
- bridge::TokenTree::Ident(bridge::Ident {
+ TokenTree::Ident(Ident {
sym: Symbol::intern("doc"),
is_raw: false,
span,
}),
- bridge::TokenTree::Punct(bridge::Punct {
- ch: b'=',
- joint: false,
- span,
- }),
- bridge::TokenTree::Literal(bridge::Literal {
- kind: bridge::LitKind::Str,
+ TokenTree::Punct(Punct { ch: b'=', joint: false, span }),
+ TokenTree::Literal(Literal {
+ kind: LitKind::Str,
symbol: Symbol::intern(&text.escape_debug().to_string()),
suffix: None,
span: span.derive_ranged(range),
}),
])),
- span: bridge::DelimSpan { open: span, close: span, entire: span },
+ span: DelimSpan { open: span, close: span, entire: span },
}));
}
rustc_lexer::TokenKind::Whitespace => continue,
@@ -286,35 +237,33 @@ impl<S> TokenStream<S> {
// FIXME: Error on edition >= 2024 ... I dont think the proc-macro server can fetch editions currently
// and whose edition is this?
rustc_lexer::TokenKind::GuardedStrPrefix => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ tokenstream.push(TokenTree::Punct(Punct {
ch: s.as_bytes()[range.start],
joint: true,
span: span.derive_ranged(range.start..range.start + 1),
}));
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ tokenstream.push(TokenTree::Punct(Punct {
ch: s.as_bytes()[range.start + 1],
joint: is_joint(),
span: span.derive_ranged(range.start + 1..range.end),
}))
}
- rustc_lexer::TokenKind::Ident => {
- tokenstream.push(bridge::TokenTree::Ident(bridge::Ident {
- sym: Symbol::intern(&s[range.clone()]),
- is_raw: false,
- span: span.derive_ranged(range),
- }))
- }
+ rustc_lexer::TokenKind::Ident => tokenstream.push(TokenTree::Ident(Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: false,
+ span: span.derive_ranged(range),
+ })),
rustc_lexer::TokenKind::InvalidIdent => return Err("Invalid identifier".to_owned()),
rustc_lexer::TokenKind::RawIdent => {
let range = range.start + 2..range.end;
- tokenstream.push(bridge::TokenTree::Ident(bridge::Ident {
+ tokenstream.push(TokenTree::Ident(Ident {
sym: Symbol::intern(&s[range.clone()]),
is_raw: true,
span: span.derive_ranged(range),
}))
}
rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
- tokenstream.push(bridge::TokenTree::Literal(literal_from_lexer(
+ tokenstream.push(TokenTree::Literal(literal_from_lexer(
&s[range.clone()],
span.derive_ranged(range),
kind,
@@ -323,12 +272,12 @@ impl<S> TokenStream<S> {
}
rustc_lexer::TokenKind::RawLifetime => {
let range = range.start + 1 + 2..range.end;
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ tokenstream.push(TokenTree::Punct(Punct {
ch: b'\'',
joint: true,
span: span.derive_ranged(range.start..range.start + 1),
}));
- tokenstream.push(bridge::TokenTree::Ident(bridge::Ident {
+ tokenstream.push(TokenTree::Ident(Ident {
sym: Symbol::intern(&s[range.clone()]),
is_raw: true,
span: span.derive_ranged(range),
@@ -339,164 +288,122 @@ impl<S> TokenStream<S> {
return Err("Lifetime cannot start with a number".to_owned());
}
let range = range.start + 1..range.end;
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
+ tokenstream.push(TokenTree::Punct(Punct {
ch: b'\'',
joint: true,
span: span.derive_ranged(range.start..range.start + 1),
}));
- tokenstream.push(bridge::TokenTree::Ident(bridge::Ident {
+ tokenstream.push(TokenTree::Ident(Ident {
sym: Symbol::intern(&s[range.clone()]),
is_raw: false,
span: span.derive_ranged(range),
}))
}
- rustc_lexer::TokenKind::Semi => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b';',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Comma => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b',',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Dot => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'.',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::At => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'@',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Pound => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'#',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Tilde => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'~',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Question => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'?',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Colon => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b':',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Dollar => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'$',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Eq => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'=',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Bang => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'!',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Lt => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'<',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Gt => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'>',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Minus => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'-',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::And => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'&',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Or => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'|',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Plus => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'+',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Star => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'*',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Slash => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'/',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Caret => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'^',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
- rustc_lexer::TokenKind::Percent => {
- tokenstream.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: b'%',
- joint: is_joint(),
- span: span.derive_ranged(range),
- }))
- }
+ rustc_lexer::TokenKind::Semi => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b';',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Comma => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b',',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Dot => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'.',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::At => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'@',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Pound => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'#',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Tilde => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'~',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Question => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'?',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Colon => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b':',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Dollar => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'$',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Eq => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'=',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Bang => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'!',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Lt => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'<',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Gt => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'>',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Minus => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'-',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::And => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'&',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Or => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'|',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Plus => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'+',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Star => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'*',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Slash => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'/',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Caret => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'^',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Percent => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'%',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
rustc_lexer::TokenKind::Eof => break,
}
}
@@ -521,7 +428,7 @@ impl<S> fmt::Display for TokenStream<S> {
fn display_token_tree<S>(tt: &TokenTree<S>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match tt {
- bridge::TokenTree::Group(bridge::Group { delimiter, stream, span: _ }) => {
+ TokenTree::Group(Group { delimiter, stream, span: _ }) => {
write!(
f,
"{}",
@@ -546,24 +453,24 @@ fn display_token_tree<S>(tt: &TokenTree<S>, f: &mut std::fmt::Formatter<'_>) ->
}
)?;
}
- bridge::TokenTree::Punct(bridge::Punct { ch, joint, span: _ }) => {
+ TokenTree::Punct(Punct { ch, joint, span: _ }) => {
write!(f, "{ch}{}", if *joint { "" } else { " " })?
}
- bridge::TokenTree::Ident(bridge::Ident { sym, is_raw, span: _ }) => {
+ TokenTree::Ident(Ident { sym, is_raw, span: _ }) => {
if *is_raw {
write!(f, "r#")?;
}
write!(f, "{sym} ")?;
}
- bridge::TokenTree::Literal(lit) => {
+ TokenTree::Literal(lit) => {
display_fmt_literal(lit, f)?;
let joint = match lit.kind {
- bridge::LitKind::Str
- | bridge::LitKind::StrRaw(_)
- | bridge::LitKind::ByteStr
- | bridge::LitKind::ByteStrRaw(_)
- | bridge::LitKind::CStr
- | bridge::LitKind::CStrRaw(_) => true,
+ LitKind::Str
+ | LitKind::StrRaw(_)
+ | LitKind::ByteStr
+ | LitKind::ByteStrRaw(_)
+ | LitKind::CStr
+ | LitKind::CStrRaw(_) => true,
_ => false,
};
if !joint {
@@ -574,20 +481,17 @@ fn display_token_tree<S>(tt: &TokenTree<S>, f: &mut std::fmt::Formatter<'_>) ->
Ok(())
}
-fn display_fmt_literal<S>(
- literal: &bridge::Literal<S, Symbol>,
- f: &mut fmt::Formatter<'_>,
-) -> fmt::Result {
+fn display_fmt_literal<S>(literal: &Literal<S>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match literal.kind {
- bridge::LitKind::Byte => write!(f, "b'{}'", literal.symbol),
- bridge::LitKind::Char => write!(f, "'{}'", literal.symbol),
- bridge::LitKind::Integer | bridge::LitKind::Float | bridge::LitKind::ErrWithGuar => {
+ LitKind::Byte => write!(f, "b'{}'", literal.symbol),
+ LitKind::Char => write!(f, "'{}'", literal.symbol),
+ LitKind::Integer | LitKind::Float | LitKind::ErrWithGuar => {
write!(f, "{}", literal.symbol)
}
- bridge::LitKind::Str => write!(f, "\"{}\"", literal.symbol),
- bridge::LitKind::ByteStr => write!(f, "b\"{}\"", literal.symbol),
- bridge::LitKind::CStr => write!(f, "c\"{}\"", literal.symbol),
- bridge::LitKind::StrRaw(num_of_hashes) => {
+ LitKind::Str => write!(f, "\"{}\"", literal.symbol),
+ LitKind::ByteStr => write!(f, "b\"{}\"", literal.symbol),
+ LitKind::CStr => write!(f, "c\"{}\"", literal.symbol),
+ LitKind::StrRaw(num_of_hashes) => {
let num_of_hashes = num_of_hashes as usize;
write!(
f,
@@ -596,7 +500,7 @@ fn display_fmt_literal<S>(
text = literal.symbol
)
}
- bridge::LitKind::ByteStrRaw(num_of_hashes) => {
+ LitKind::ByteStrRaw(num_of_hashes) => {
let num_of_hashes = num_of_hashes as usize;
write!(
f,
@@ -605,7 +509,7 @@ fn display_fmt_literal<S>(
text = literal.symbol
)
}
- bridge::LitKind::CStrRaw(num_of_hashes) => {
+ LitKind::CStrRaw(num_of_hashes) => {
let num_of_hashes = num_of_hashes as usize;
write!(
f,
@@ -645,7 +549,7 @@ fn debug_token_tree<S: fmt::Debug>(
) -> std::fmt::Result {
write!(f, "{:indent$}", "", indent = depth * 2)?;
match tt {
- bridge::TokenTree::Group(bridge::Group { delimiter, stream, span }) => {
+ TokenTree::Group(Group { delimiter, stream, span }) => {
writeln!(
f,
"GROUP {}{} {:#?} {:#?} {:#?}",
@@ -670,20 +574,20 @@ fn debug_token_tree<S: fmt::Debug>(
}
return Ok(());
}
- bridge::TokenTree::Punct(bridge::Punct { ch, joint, span }) => write!(
+ TokenTree::Punct(Punct { ch, joint, span }) => write!(
f,
"PUNCT {span:#?} {} {}",
*ch as char,
if *joint { "[joint]" } else { "[alone]" }
)?,
- bridge::TokenTree::Ident(bridge::Ident { sym, is_raw, span }) => {
+ TokenTree::Ident(Ident { sym, is_raw, span }) => {
write!(f, "IDENT {span:#?} ")?;
if *is_raw {
write!(f, "r#")?;
}
write!(f, "{sym}")?;
}
- bridge::TokenTree::Literal(bridge::Literal { kind, symbol, suffix, span }) => write!(
+ TokenTree::Literal(Literal { kind, symbol, suffix, span }) => write!(
f,
"LITER {span:#?} {kind:?} {symbol}{} ",
match suffix {
@@ -698,7 +602,7 @@ fn debug_token_tree<S: fmt::Debug>(
impl<S: Copy> TokenStream<S> {
/// Push `tt` onto the end of the stream, possibly gluing it to the last
/// token. Uses `make_mut` to maximize efficiency.
- pub fn push_tree(&mut self, tt: TokenTree<S>) {
+ pub(crate) fn push_tree(&mut self, tt: TokenTree<S>) {
let vec_mut = Arc::make_mut(&mut self.0);
vec_mut.push(tt);
}
@@ -706,7 +610,7 @@ impl<S: Copy> TokenStream<S> {
/// Push `stream` onto the end of the stream, possibly gluing the first
/// token tree to the last token. (No other token trees will be glued.)
/// Uses `make_mut` to maximize efficiency.
- pub fn push_stream(&mut self, stream: TokenStream<S>) {
+ pub(crate) fn push_stream(&mut self, stream: TokenStream<S>) {
let vec_mut = Arc::make_mut(&mut self.0);
let stream_iter = stream.0.iter().cloned();
@@ -722,7 +626,7 @@ impl<S> FromIterator<TokenTree<S>> for TokenStream<S> {
}
#[derive(Clone)]
-pub struct TokenStreamIter<'t, S> {
+pub(crate) struct TokenStreamIter<'t, S> {
stream: &'t TokenStream<S>,
index: usize,
}
@@ -735,7 +639,7 @@ impl<'t, S> TokenStreamIter<'t, S> {
// Peeking could be done via `Peekable`, but most iterators need peeking,
// and this is simple and avoids the need to use `peekable` and `Peekable`
// at all the use sites.
- pub fn peek(&self) -> Option<&'t TokenTree<S>> {
+ pub(crate) fn peek(&self) -> Option<&'t TokenTree<S>> {
self.stream.0.get(self.index)
}
}
@@ -756,27 +660,27 @@ pub(super) fn literal_from_lexer<Span>(
span: Span,
kind: rustc_lexer::LiteralKind,
suffix_start: u32,
-) -> bridge::Literal<Span, Symbol> {
+) -> Literal<Span> {
let (kind, start_offset, end_offset) = match kind {
- LiteralKind::Int { .. } => (bridge::LitKind::Integer, 0, 0),
- LiteralKind::Float { .. } => (bridge::LitKind::Float, 0, 0),
- LiteralKind::Char { terminated } => (bridge::LitKind::Char, 1, terminated as usize),
- LiteralKind::Byte { terminated } => (bridge::LitKind::Byte, 2, terminated as usize),
- LiteralKind::Str { terminated } => (bridge::LitKind::Str, 1, terminated as usize),
- LiteralKind::ByteStr { terminated } => (bridge::LitKind::ByteStr, 2, terminated as usize),
- LiteralKind::CStr { terminated } => (bridge::LitKind::CStr, 2, terminated as usize),
+ LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
+ LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
+ LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
+ LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
+ LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
+ LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
+ LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
- bridge::LitKind::StrRaw(n_hashes.unwrap_or_default()),
+ LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
- bridge::LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
+ LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
- bridge::LitKind::CStrRaw(n_hashes.unwrap_or_default()),
+ LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
@@ -789,7 +693,32 @@ pub(super) fn literal_from_lexer<Span>(
suffix => Some(Symbol::intern(suffix)),
};
- bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span }
+ Literal { kind, symbol: Symbol::intern(lit), suffix, span }
+}
+
+impl SpanLike for crate::SpanId {
+ fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
+ *self
+ }
+}
+
+impl SpanLike for () {
+ fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
+ *self
+ }
+}
+
+impl SpanLike for crate::Span {
+ fn derive_ranged(&self, range: std::ops::Range<usize>) -> Self {
+ crate::Span {
+ range: span::TextRange::new(
+ span::TextSize::new(range.start as u32),
+ span::TextSize::new(range.end as u32),
+ ),
+ anchor: self.anchor,
+ ctx: self.ctx,
+ }
+ }
}
#[cfg(test)]
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 782ec55614..2e48c5a5a6 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -48,7 +48,6 @@ tracing-subscriber.workspace = true
tracing-tree.workspace = true
triomphe.workspace = true
toml.workspace = true
-nohash-hasher.workspace = true
walkdir = "2.5.0"
semver.workspace = true
memchr = "2.7.5"
@@ -61,7 +60,6 @@ hir-def.workspace = true
hir-ty.workspace = true
hir.workspace = true
ide-db.workspace = true
-intern.workspace = true
# This should only be used in CLI
ide-ssr.workspace = true
ide.workspace = true