Unnamed repository; edit this file 'description' to name the repository.
Auto merge of #16088 - Veykril:proc-macro-srv-2, r=Veykril
feat: Implement a rust-analyzer span backed proc-macro server mode This implements the basic span APIs. Basically anything that doesn't require talking back to the client for information access. This also commits our syntax fixup marker to use an `ErasedAstFileId` of `!0-1` aka `0xffff_fffe`, instead of using a dummy FileId as a marker, as we need that for the `SourceFile` API to be implementable. The reason as to why the server needs to know about this at all is to prevent it from creating invalid fixup spans which could make r-a panic.
bors 2023-12-22
parent 3ce3593 · parent 5761b50 · commit 20e09c6
-rw-r--r--.github/workflows/ci.yaml1
-rw-r--r--Cargo.lock7
-rw-r--r--Cargo.toml3
-rw-r--r--crates/hir-expand/src/builtin_fn_macro.rs13
-rw-r--r--crates/hir-expand/src/fixup.rs44
-rw-r--r--crates/hir-expand/src/name.rs1
-rw-r--r--crates/hir/src/source_analyzer.rs68
-rw-r--r--crates/proc-macro-api/src/lib.rs21
-rw-r--r--crates/proc-macro-api/src/msg.rs45
-rw-r--r--crates/proc-macro-api/src/msg/flat.rs41
-rw-r--r--crates/proc-macro-api/src/process.rs36
-rw-r--r--crates/proc-macro-srv-cli/src/main.rs14
-rw-r--r--crates/proc-macro-srv/Cargo.toml6
-rw-r--r--crates/proc-macro-srv/proc-macro-test/Cargo.toml19
-rw-r--r--crates/proc-macro-srv/proc-macro-test/build.rs (renamed from crates/proc-macro-test/build.rs)3
-rw-r--r--crates/proc-macro-srv/proc-macro-test/imp/.gitignore (renamed from crates/proc-macro-test/imp/.gitignore)0
-rw-r--r--crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml (renamed from crates/proc-macro-test/imp/Cargo.toml)7
-rw-r--r--crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs (renamed from crates/proc-macro-test/imp/src/lib.rs)25
-rw-r--r--crates/proc-macro-srv/proc-macro-test/src/lib.rs (renamed from crates/proc-macro-test/src/lib.rs)0
-rw-r--r--crates/proc-macro-srv/src/dylib.rs22
-rw-r--r--crates/proc-macro-srv/src/lib.rs194
-rw-r--r--crates/proc-macro-srv/src/proc_macros.rs48
-rw-r--r--crates/proc-macro-srv/src/server.rs399
-rw-r--r--crates/proc-macro-srv/src/server/rust_analyzer_span.rs411
-rw-r--r--crates/proc-macro-srv/src/server/token_id.rs380
-rw-r--r--crates/proc-macro-srv/src/server/token_stream.rs109
-rw-r--r--crates/proc-macro-srv/src/tests/mod.rs92
-rw-r--r--crates/proc-macro-srv/src/tests/utils.rs87
-rw-r--r--crates/proc-macro-test/Cargo.toml20
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs2
-rw-r--r--crates/span/src/lib.rs15
31 files changed, 1479 insertions, 654 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 1f2a7796d1..be830415f9 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -38,7 +38,6 @@ jobs:
- 'crates/proc-macro-api/**'
- 'crates/proc-macro-srv/**'
- 'crates/proc-macro-srv-cli/**'
- - 'crates/proc-macro-test/**'
rust:
needs: changes
diff --git a/Cargo.lock b/Cargo.lock
index 2cb26d0c59..4616b2fde8 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1277,6 +1277,7 @@ dependencies = [
name = "proc-macro-srv"
version = "0.0.0"
dependencies = [
+ "base-db",
"expect-test",
"libloading",
"mbe",
@@ -1285,6 +1286,7 @@ dependencies = [
"paths",
"proc-macro-api",
"proc-macro-test",
+ "span",
"stdx",
"tt",
]
@@ -1302,15 +1304,10 @@ name = "proc-macro-test"
version = "0.0.0"
dependencies = [
"cargo_metadata",
- "proc-macro-test-impl",
"toolchain",
]
[[package]]
-name = "proc-macro-test-impl"
-version = "0.0.0"
-
-[[package]]
name = "proc-macro2"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index 556d159ab7..d4cff420bc 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[workspace]
members = ["xtask/", "lib/*", "crates/*"]
-exclude = ["crates/proc-macro-test/imp"]
+exclude = ["crates/proc-macro-srv/proc-macro-test/"]
resolver = "2"
[workspace.package]
@@ -81,7 +81,6 @@ vfs = { path = "./crates/vfs", version = "0.0.0" }
rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" }
# local crates that aren't published to crates.io. These should not have versions.
-proc-macro-test = { path = "./crates/proc-macro-test" }
sourcegen = { path = "./crates/sourcegen" }
test-fixture = { path = "./crates/test-fixture" }
test-utils = { path = "./crates/test-utils" }
diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs
index 0475a0f4ac..f99a891762 100644
--- a/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/crates/hir-expand/src/builtin_fn_macro.rs
@@ -108,6 +108,7 @@ register_builtin! {
(format_args, FormatArgs) => format_args_expand,
(const_format_args, ConstFormatArgs) => format_args_expand,
(format_args_nl, FormatArgsNl) => format_args_nl_expand,
+ (quote, Quote) => quote_expand,
EAGER:
(compile_error, CompileError) => compile_error_expand,
@@ -770,3 +771,15 @@ fn option_env_expand(
ExpandResult::ok(expanded)
}
+
+fn quote_expand(
+ _db: &dyn ExpandDatabase,
+ _arg_id: MacroCallId,
+ _tt: &tt::Subtree,
+ span: Span,
+) -> ExpandResult<tt::Subtree> {
+ ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
+ ExpandError::other("quote! is not implemented"),
+ )
+}
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index 53e87f8197..d241d94b8c 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -1,10 +1,9 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
-use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
-use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SpanData};
+use span::{ErasedFileAstId, Span, SpanAnchor, SpanData, FIXUP_ERASED_FILE_AST_ID_MARKER};
use stdx::never;
use syntax::{
ast::{self, AstNode, HasLoopBody},
@@ -39,13 +38,11 @@ impl SyntaxFixupUndoInfo {
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
}
-// censoring -> just don't convert the node
-// replacement -> censor + append
-// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
-// to remove later
-const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID);
-const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0));
+// We mark spans with `FIXUP_DUMMY_AST_ID` to indicate that they are fake.
+const FIXUP_DUMMY_AST_ID: ErasedFileAstId = FIXUP_ERASED_FILE_AST_ID_MARKER;
const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
+// If the fake span has this range end, that means that the range start is an index into the
+// `original` list in `SyntaxFixupUndoInfo`.
const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
pub(crate) fn fixup_syntax(
@@ -58,13 +55,13 @@ pub(crate) fn fixup_syntax(
let mut preorder = node.preorder();
let mut original = Vec::new();
let dummy_range = FIXUP_DUMMY_RANGE;
- // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
- // the index into the replacement vec but only if the end points to !0
- let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID };
- let fake_span = |range| SpanData {
- range: dummy_range,
- anchor: dummy_anchor,
- ctx: span_map.span_for_range(range).ctx,
+ let fake_span = |range| {
+ let span = span_map.span_for_range(range);
+ SpanData {
+ range: dummy_range,
+ anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
+ ctx: span.ctx,
+ }
};
while let Some(event) = preorder.next() {
let syntax::WalkEvent::Enter(node) = event else { continue };
@@ -76,12 +73,13 @@ pub(crate) fn fixup_syntax(
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
let idx = original.len() as u32;
original.push(original_tree);
+ let span = span_map.span_for_range(node_range);
let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
span: SpanData {
range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
- anchor: dummy_anchor,
- ctx: span_map.span_for_range(node_range).ctx,
+ anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
+ ctx: span.ctx,
},
});
append.insert(node.clone().into(), vec![replacement]);
@@ -304,8 +302,8 @@ pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo)
let undo_info = &**undo_info;
#[allow(deprecated)]
if never!(
- tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
- || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
+ || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
) {
tt.delimiter.close = SpanData::DUMMY;
tt.delimiter.open = SpanData::DUMMY;
@@ -321,7 +319,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
let span = leaf.span();
- let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE;
+ let is_real_leaf = span.anchor.ast_id != FIXUP_DUMMY_AST_ID;
let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
is_real_leaf || is_replaced_node
}
@@ -329,8 +327,8 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
- if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
- || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ if tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
+ || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
{
// Even though fixup never creates subtrees with fixup spans, the old proc-macro server
// might copy them if the proc-macro asks for it, so we need to filter those out
@@ -341,7 +339,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
- if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE {
+ if leaf.span().anchor.ast_id == FIXUP_DUMMY_AST_ID {
// we have a fake node here, we need to replace it again with the original
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs
index d122938e8e..3d8d01e255 100644
--- a/crates/hir-expand/src/name.rs
+++ b/crates/hir-expand/src/name.rs
@@ -388,6 +388,7 @@ pub mod known {
log_syntax,
module_path,
option_env,
+ quote,
std_panic,
stringify,
trace_macros,
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 5fbc860a4b..0961a71376 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -1162,9 +1162,40 @@ fn resolve_hir_path_qualifier(
resolver: &Resolver,
path: &Path,
) -> Option<PathResolution> {
- resolver
- .resolve_path_in_type_ns_fully(db.upcast(), &path)
- .map(|ty| match ty {
+ (|| {
+ let (ty, unresolved) = match path.type_anchor() {
+ Some(type_ref) => {
+ let (_, res) =
+ TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner())
+ .lower_ty_ext(type_ref);
+ res.map(|ty_ns| (ty_ns, path.segments().first()))
+ }
+ None => {
+ let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
+ match remaining_idx {
+ Some(remaining_idx) => {
+ if remaining_idx + 1 == path.segments().len() {
+ Some((ty, path.segments().last()))
+ } else {
+ None
+ }
+ }
+ None => Some((ty, None)),
+ }
+ }
+ }?;
+
+ // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
+ // within the trait's associated types.
+ if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
+ if let Some(type_alias_id) =
+ db.trait_data(trait_id).associated_type_by_name(unresolved.name)
+ {
+ return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
+ }
+ }
+
+ let res = match ty {
TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
@@ -1175,11 +1206,28 @@ fn resolve_hir_path_qualifier(
TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()),
- })
- .or_else(|| {
- resolver
- .resolve_module_path_in_items(db.upcast(), path.mod_path()?)
- .take_types()
- .map(|it| PathResolution::Def(it.into()))
- })
+ };
+ match unresolved {
+ Some(unresolved) => resolver
+ .generic_def()
+ .and_then(|def| {
+ hir_ty::associated_type_shorthand_candidates(
+ db,
+ def,
+ res.in_type_ns()?,
+ |name, id| (name == unresolved.name).then_some(id),
+ )
+ })
+ .map(TypeAlias::from)
+ .map(Into::into)
+ .map(PathResolution::Def),
+ None => Some(res),
+ }
+ })()
+ .or_else(|| {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path()?)
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ })
}
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index a5d4cfeb27..a87becd63e 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -20,7 +20,10 @@ use triomphe::Arc;
use serde::{Deserialize, Serialize};
use crate::{
- msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS},
+ msg::{
+ deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro,
+ ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT,
+ },
process::ProcMacroProcessSrv,
};
@@ -166,6 +169,11 @@ impl ProcMacro {
call_site,
mixed_site,
},
+ span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT {
+ serialize_span_data_index_map(&span_data_table)
+ } else {
+ Vec::new()
+ },
};
let response = self
@@ -178,9 +186,14 @@ impl ProcMacro {
msg::Response::ExpandMacro(it) => {
Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table)))
}
- msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
- Err(ServerError { message: "unexpected response".to_string(), io: None })
- }
+ msg::Response::ExpandMacroExtended(it) => Ok(it.map(|resp| {
+ FlatTree::to_subtree_resolved(
+ resp.tree,
+ version,
+ &deserialize_span_data_index_map(&resp.span_data_table),
+ )
+ })),
+ _ => Err(ServerError { message: "unexpected response".to_string(), io: None }),
}
}
}
diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs
index 18fd9ed728..557ddba5c7 100644
--- a/crates/proc-macro-api/src/msg.rs
+++ b/crates/proc-macro-api/src/msg.rs
@@ -10,28 +10,63 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::ProcMacroKind;
-pub use crate::msg::flat::{FlatTree, TokenId};
+pub use crate::msg::flat::{
+ deserialize_span_data_index_map, serialize_span_data_index_map, FlatTree, SpanDataIndexMap,
+ TokenId,
+};
// The versions of the server protocol
pub const NO_VERSION_CHECK_VERSION: u32 = 0;
pub const VERSION_CHECK_VERSION: u32 = 1;
pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2;
pub const HAS_GLOBAL_SPANS: u32 = 3;
+pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4;
-pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS;
+pub const CURRENT_API_VERSION: u32 = RUST_ANALYZER_SPAN_SUPPORT;
#[derive(Debug, Serialize, Deserialize)]
pub enum Request {
+ /// Since [`NO_VERSION_CHECK_VERSION`]
ListMacros { dylib_path: PathBuf },
+ /// Since [`NO_VERSION_CHECK_VERSION`]
ExpandMacro(ExpandMacro),
+ /// Since [`VERSION_CHECK_VERSION`]
ApiVersionCheck {},
+ /// Since [`RUST_ANALYZER_SPAN_SUPPORT`]
+ SetConfig(ServerConfig),
+}
+
+#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)]
+pub enum SpanMode {
+ #[default]
+ Id,
+ RustAnalyzer,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum Response {
+ /// Since [`NO_VERSION_CHECK_VERSION`]
ListMacros(Result<Vec<(String, ProcMacroKind)>, String>),
+ /// Since [`NO_VERSION_CHECK_VERSION`]
ExpandMacro(Result<FlatTree, PanicMessage>),
+ /// Since [`NO_VERSION_CHECK_VERSION`]
ApiVersionCheck(u32),
+ /// Since [`RUST_ANALYZER_SPAN_SUPPORT`]
+ SetConfig(ServerConfig),
+ /// Since [`RUST_ANALYZER_SPAN_SUPPORT`]
+ ExpandMacroExtended(Result<ExpandMacroExtended, PanicMessage>),
+}
+
+#[derive(Debug, Serialize, Deserialize, Default)]
+#[serde(default)]
+pub struct ServerConfig {
+ pub span_mode: SpanMode,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct ExpandMacroExtended {
+ pub tree: FlatTree,
+ pub span_data_table: Vec<u32>,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -64,9 +99,12 @@ pub struct ExpandMacro {
#[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
#[serde(default)]
pub has_global_spans: ExpnGlobals,
+ #[serde(skip_serializing_if = "Vec::is_empty")]
+ #[serde(default)]
+ pub span_data_table: Vec<u32>,
}
-#[derive(Default, Debug, Serialize, Deserialize)]
+#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)]
pub struct ExpnGlobals {
#[serde(skip_serializing)]
#[serde(default)]
@@ -241,6 +279,7 @@ mod tests {
call_site: 0,
mixed_site: 0,
},
+ span_data_table: Vec::new(),
};
let json = serde_json::to_string(&task).unwrap();
diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs
index a12581ac13..8dfaba5262 100644
--- a/crates/proc-macro-api/src/msg/flat.rs
+++ b/crates/proc-macro-api/src/msg/flat.rs
@@ -38,12 +38,45 @@
use std::collections::{HashMap, VecDeque};
use indexmap::IndexSet;
+use la_arena::RawIdx;
use serde::{Deserialize, Serialize};
-use span::Span;
+use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId};
+use text_size::TextRange;
use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
-type SpanIndexMap = IndexSet<Span>;
+pub type SpanDataIndexMap = IndexSet<Span>;
+
+pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec<u32> {
+ map.iter()
+ .flat_map(|span| {
+ [
+ span.anchor.file_id.index(),
+ span.anchor.ast_id.into_raw().into_u32(),
+ span.range.start().into(),
+ span.range.end().into(),
+ span.ctx.into_u32(),
+ ]
+ })
+ .collect()
+}
+
+pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap {
+ debug_assert!(map.len() % 5 == 0);
+ map.chunks_exact(5)
+ .map(|span| {
+ let &[file_id, ast_id, start, end, e] = span else { unreachable!() };
+ Span {
+ anchor: SpanAnchor {
+ file_id: FileId::from_raw(file_id),
+ ast_id: ErasedFileAstId::from_raw(RawIdx::from_u32(ast_id)),
+ },
+ range: TextRange::new(start.into(), end.into()),
+ ctx: SyntaxContextId::from_u32(e),
+ }
+ })
+ .collect()
+}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct TokenId(pub u32);
@@ -93,7 +126,7 @@ impl FlatTree {
pub fn new(
subtree: &tt::Subtree<Span>,
version: u32,
- span_data_table: &mut SpanIndexMap,
+ span_data_table: &mut SpanDataIndexMap,
) -> FlatTree {
let mut w = Writer {
string_table: HashMap::new(),
@@ -155,7 +188,7 @@ impl FlatTree {
pub fn to_subtree_resolved(
self,
version: u32,
- span_data_table: &SpanIndexMap,
+ span_data_table: &SpanDataIndexMap,
) -> tt::Subtree<Span> {
Reader {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs
index 9a20fa63ed..3494164c06 100644
--- a/crates/proc-macro-api/src/process.rs
+++ b/crates/proc-macro-api/src/process.rs
@@ -9,7 +9,7 @@ use paths::{AbsPath, AbsPathBuf};
use stdx::JodChild;
use crate::{
- msg::{Message, Request, Response, CURRENT_API_VERSION},
+ msg::{Message, Request, Response, SpanMode, CURRENT_API_VERSION, RUST_ANALYZER_SPAN_SUPPORT},
ProcMacroKind, ServerError,
};
@@ -19,6 +19,7 @@ pub(crate) struct ProcMacroProcessSrv {
stdin: ChildStdin,
stdout: BufReader<ChildStdout>,
version: u32,
+ mode: SpanMode,
}
impl ProcMacroProcessSrv {
@@ -27,7 +28,13 @@ impl ProcMacroProcessSrv {
let mut process = Process::run(process_path.clone(), null_stderr)?;
let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
- io::Result::Ok(ProcMacroProcessSrv { _process: process, stdin, stdout, version: 0 })
+ io::Result::Ok(ProcMacroProcessSrv {
+ _process: process,
+ stdin,
+ stdout,
+ version: 0,
+ mode: SpanMode::Id,
+ })
};
let mut srv = create_srv(true)?;
tracing::info!("sending version check");
@@ -43,6 +50,11 @@ impl ProcMacroProcessSrv {
tracing::info!("got version {v}");
srv = create_srv(false)?;
srv.version = v;
+ if srv.version > RUST_ANALYZER_SPAN_SUPPORT {
+ if let Ok(mode) = srv.enable_rust_analyzer_spans() {
+ srv.mode = mode;
+ }
+ }
Ok(srv)
}
Err(e) => {
@@ -62,9 +74,19 @@ impl ProcMacroProcessSrv {
match response {
Response::ApiVersionCheck(version) => Ok(version),
- Response::ExpandMacro { .. } | Response::ListMacros { .. } => {
- Err(ServerError { message: "unexpected response".to_string(), io: None })
- }
+ _ => Err(ServerError { message: "unexpected response".to_string(), io: None }),
+ }
+ }
+
+ fn enable_rust_analyzer_spans(&mut self) -> Result<SpanMode, ServerError> {
+ let request = Request::SetConfig(crate::msg::ServerConfig {
+ span_mode: crate::msg::SpanMode::RustAnalyzer,
+ });
+ let response = self.send_task(request)?;
+
+ match response {
+ Response::SetConfig(crate::msg::ServerConfig { span_mode }) => Ok(span_mode),
+ _ => Err(ServerError { message: "unexpected response".to_string(), io: None }),
}
}
@@ -78,9 +100,7 @@ impl ProcMacroProcessSrv {
match response {
Response::ListMacros(it) => Ok(it),
- Response::ExpandMacro { .. } | Response::ApiVersionCheck { .. } => {
- Err(ServerError { message: "unexpected response".to_string(), io: None })
- }
+ _ => Err(ServerError { message: "unexpected response".to_string(), io: None }),
}
}
diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs
index 50ce586fc4..000a526e9f 100644
--- a/crates/proc-macro-srv-cli/src/main.rs
+++ b/crates/proc-macro-srv-cli/src/main.rs
@@ -39,10 +39,22 @@ fn run() -> io::Result<()> {
msg::Request::ListMacros { dylib_path } => {
msg::Response::ListMacros(srv.list_macros(&dylib_path))
}
- msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)),
+ msg::Request::ExpandMacro(task) => match srv.span_mode() {
+ msg::SpanMode::Id => msg::Response::ExpandMacro(srv.expand(task).map(|(it, _)| it)),
+ msg::SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended(
+ srv.expand(task).map(|(tree, span_data_table)| msg::ExpandMacroExtended {
+ tree,
+ span_data_table,
+ }),
+ ),
+ },
msg::Request::ApiVersionCheck {} => {
msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION)
}
+ msg::Request::SetConfig(config) => {
+ srv.set_span_mode(config.span_mode);
+ msg::Response::SetConfig(config)
+ }
};
write_response(res)?
}
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index 99993f16e2..bef2c30e9f 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -26,13 +26,15 @@ stdx.workspace = true
tt.workspace = true
mbe.workspace = true
paths.workspace = true
+base-db.workspace = true
+span.workspace = true
proc-macro-api.workspace = true
[dev-dependencies]
expect-test = "1.4.0"
# used as proc macro test targets
-proc-macro-test.workspace = true
+proc-macro-test.path = "./proc-macro-test"
[features]
-sysroot-abi = []
+sysroot-abi = ["proc-macro-test/sysroot-abi"]
diff --git a/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
new file mode 100644
index 0000000000..55be6bc23b
--- /dev/null
+++ b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "proc-macro-test"
+version = "0.0.0"
+publish = false
+
+edition = "2021"
+license = "MIT OR Apache-2.0"
+
+[lib]
+doctest = false
+
+[build-dependencies]
+cargo_metadata = "0.18.1"
+
+# local deps
+toolchain = { path = "../../toolchain", version = "0.0.0" }
+
+[features]
+sysroot-abi = []
diff --git a/crates/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs
index 7827157865..7299147686 100644
--- a/crates/proc-macro-test/build.rs
+++ b/crates/proc-macro-srv/proc-macro-test/build.rs
@@ -70,6 +70,9 @@ fn main() {
// instance to use the same target directory.
.arg("--target-dir")
.arg(&target_dir);
+ if cfg!(feature = "sysroot-abi") {
+ cmd.args(["--features", "sysroot-abi"]);
+ }
if let Ok(target) = std::env::var("TARGET") {
cmd.args(["--target", &target]);
diff --git a/crates/proc-macro-test/imp/.gitignore b/crates/proc-macro-srv/proc-macro-test/imp/.gitignore
index 2c96eb1b65..2c96eb1b65 100644
--- a/crates/proc-macro-test/imp/.gitignore
+++ b/crates/proc-macro-srv/proc-macro-test/imp/.gitignore
diff --git a/crates/proc-macro-test/imp/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
index 2a36737cef..dc94fcd61a 100644
--- a/crates/proc-macro-test/imp/Cargo.toml
+++ b/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml
@@ -9,8 +9,11 @@ publish = false
doctest = false
proc-macro = true
-[workspace]
-
[dependencies]
# this crate should not have any dependencies, since it uses its own workspace,
# and its own `Cargo.lock`
+
+[features]
+sysroot-abi = []
+
+[workspace]
diff --git a/crates/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
index 32510fba2f..03241b16be 100644
--- a/crates/proc-macro-test/imp/src/lib.rs
+++ b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
@@ -1,6 +1,8 @@
//! Exports a few trivial procedural macros for testing.
+#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
#![warn(rust_2018_idioms, unused_lifetimes)]
+#![feature(proc_macro_span, proc_macro_def_site)]
use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
@@ -49,6 +51,29 @@ pub fn fn_like_mk_idents(_args: TokenStream) -> TokenStream {
TokenStream::from_iter(trees)
}
+#[proc_macro]
+pub fn fn_like_span_join(args: TokenStream) -> TokenStream {
+ let args = &mut args.into_iter();
+ let first = args.next().unwrap();
+ let second = args.next().unwrap();
+ TokenStream::from(TokenTree::from(Ident::new_raw(
+ "joined",
+ first.span().join(second.span()).unwrap(),
+ )))
+}
+
+#[proc_macro]
+pub fn fn_like_span_ops(args: TokenStream) -> TokenStream {
+ let args = &mut args.into_iter();
+ let mut first = args.next().unwrap();
+ first.set_span(Span::def_site());
+ let mut second = args.next().unwrap();
+ second.set_span(second.span().resolved_at(Span::def_site()));
+ let mut third = args.next().unwrap();
+ third.set_span(third.span().start());
+ TokenStream::from_iter(vec![first, second, third])
+}
+
#[proc_macro_attribute]
pub fn attr_noop(_args: TokenStream, item: TokenStream) -> TokenStream {
item
diff --git a/crates/proc-macro-test/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/src/lib.rs
index 739c6ec6f4..739c6ec6f4 100644
--- a/crates/proc-macro-test/src/lib.rs
+++ b/crates/proc-macro-srv/proc-macro-test/src/lib.rs
diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs
index f20e6832f6..52b4cced5f 100644
--- a/crates/proc-macro-srv/src/dylib.rs
+++ b/crates/proc-macro-srv/src/dylib.rs
@@ -11,7 +11,10 @@ use libloading::Library;
use memmap2::Mmap;
use object::Object;
use paths::AbsPath;
-use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind};
+use proc_macro::bridge;
+use proc_macro_api::{read_dylib_info, ProcMacroKind};
+
+use crate::ProcMacroSrvSpan;
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
@@ -147,15 +150,18 @@ impl Expander {
Ok(Expander { inner: library })
}
- pub fn expand(
+ pub fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
- macro_body: &crate::tt::Subtree,
- attributes: Option<&crate::tt::Subtree>,
- def_site: TokenId,
- call_site: TokenId,
- mixed_site: TokenId,
- ) -> Result<crate::tt::Subtree, String> {
+ macro_body: tt::Subtree<S>,
+ attributes: Option<tt::Subtree<S>>,
+ def_site: S,
+ call_site: S,
+ mixed_site: S,
+ ) -> Result<tt::Subtree<S>, String>
+ where
+ <S::Server as bridge::server::Types>::TokenStream: Default,
+ {
let result = self
.inner
.proc_macros
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index 56529f71d8..7cd6df2df8 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -32,36 +32,67 @@ use std::{
};
use proc_macro_api::{
- msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION},
+ msg::{
+ self, deserialize_span_data_index_map, serialize_span_data_index_map, ExpnGlobals,
+ SpanMode, TokenId, CURRENT_API_VERSION,
+ },
ProcMacroKind,
};
+use span::Span;
-mod tt {
- pub use proc_macro_api::msg::TokenId;
+use crate::server::TokenStream;
- pub use ::tt::*;
+// see `build.rs`
+include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
- pub type Subtree = ::tt::Subtree<TokenId>;
- pub type TokenTree = ::tt::TokenTree<TokenId>;
- pub type Delimiter = ::tt::Delimiter<TokenId>;
- pub type Leaf = ::tt::Leaf<TokenId>;
- pub type Literal = ::tt::Literal<TokenId>;
- pub type Punct = ::tt::Punct<TokenId>;
- pub type Ident = ::tt::Ident<TokenId>;
+trait ProcMacroSrvSpan: tt::Span {
+ type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
+ fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
}
-// see `build.rs`
-include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
+impl ProcMacroSrvSpan for TokenId {
+ type Server = server::token_id::TokenIdServer;
+
+ fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
+ Self::Server { interner: &server::SYMBOL_INTERNER, call_site, def_site, mixed_site }
+ }
+}
+impl ProcMacroSrvSpan for Span {
+ type Server = server::rust_analyzer_span::RaSpanServer;
+ fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
+ Self::Server {
+ interner: &server::SYMBOL_INTERNER,
+ call_site,
+ def_site,
+ mixed_site,
+ tracked_env_vars: Default::default(),
+ tracked_paths: Default::default(),
+ }
+ }
+}
#[derive(Default)]
pub struct ProcMacroSrv {
expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
+ span_mode: SpanMode,
}
const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
impl ProcMacroSrv {
- pub fn expand(&mut self, task: msg::ExpandMacro) -> Result<msg::FlatTree, msg::PanicMessage> {
+ pub fn set_span_mode(&mut self, span_mode: SpanMode) {
+ self.span_mode = span_mode;
+ }
+
+ pub fn span_mode(&self) -> SpanMode {
+ self.span_mode
+ }
+
+ pub fn expand(
+ &mut self,
+ task: msg::ExpandMacro,
+ ) -> Result<(msg::FlatTree, Vec<u32>), msg::PanicMessage> {
+ let span_mode = self.span_mode;
let expander = self.expander(task.lib.as_ref()).map_err(|err| {
debug_assert!(false, "should list macros before asking to expand");
msg::PanicMessage(format!("failed to load macro: {err}"))
@@ -71,10 +102,10 @@ impl ProcMacroSrv {
for (k, v) in &task.env {
env::set_var(k, v);
}
- let prev_working_dir = match task.current_dir {
+ let prev_working_dir = match &task.current_dir {
Some(dir) => {
let prev_working_dir = std::env::current_dir().ok();
- if let Err(err) = std::env::set_current_dir(&dir) {
+ if let Err(err) = std::env::set_current_dir(dir) {
eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}")
}
prev_working_dir
@@ -83,38 +114,15 @@ impl ProcMacroSrv {
};
let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans;
- let def_site = TokenId(def_site as u32);
- let call_site = TokenId(call_site as u32);
- let mixed_site = TokenId(mixed_site as u32);
-
- let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION);
- let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION));
- let result = thread::scope(|s| {
- let thread = thread::Builder::new()
- .stack_size(EXPANDER_STACK_SIZE)
- .name(task.macro_name.clone())
- .spawn_scoped(s, || {
- expander
- .expand(
- &task.macro_name,
- &macro_body,
- attributes.as_ref(),
- def_site,
- call_site,
- mixed_site,
- )
- .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION))
- });
- let res = match thread {
- Ok(handle) => handle.join(),
- Err(e) => std::panic::resume_unwind(Box::new(e)),
- };
-
- match res {
- Ok(res) => res,
- Err(e) => std::panic::resume_unwind(e),
+
+ let result = match span_mode {
+ SpanMode::Id => {
+ expand_id(task, expander, def_site, call_site, mixed_site).map(|it| (it, vec![]))
}
- });
+ SpanMode::RustAnalyzer => {
+ expand_ra_span(task, expander, def_site, call_site, mixed_site)
+ }
+ };
prev_env.rollback();
@@ -155,6 +163,98 @@ impl ProcMacroSrv {
}
}
+fn expand_id(
+ task: msg::ExpandMacro,
+ expander: &dylib::Expander,
+ def_site: usize,
+ call_site: usize,
+ mixed_site: usize,
+) -> Result<msg::FlatTree, String> {
+ let def_site = TokenId(def_site as u32);
+ let call_site = TokenId(call_site as u32);
+ let mixed_site = TokenId(mixed_site as u32);
+
+ let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION);
+ let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION));
+ let result = thread::scope(|s| {
+ let thread = thread::Builder::new()
+ .stack_size(EXPANDER_STACK_SIZE)
+ .name(task.macro_name.clone())
+ .spawn_scoped(s, || {
+ expander
+ .expand(
+ &task.macro_name,
+ macro_body,
+ attributes,
+ def_site,
+ call_site,
+ mixed_site,
+ )
+ .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION))
+ });
+ let res = match thread {
+ Ok(handle) => handle.join(),
+ Err(e) => std::panic::resume_unwind(Box::new(e)),
+ };
+
+ match res {
+ Ok(res) => res,
+ Err(e) => std::panic::resume_unwind(e),
+ }
+ });
+ result
+}
+
+fn expand_ra_span(
+ task: msg::ExpandMacro,
+ expander: &dylib::Expander,
+ def_site: usize,
+ call_site: usize,
+ mixed_site: usize,
+) -> Result<(msg::FlatTree, Vec<u32>), String> {
+ let mut span_data_table = deserialize_span_data_index_map(&task.span_data_table);
+
+ let def_site = span_data_table[def_site];
+ let call_site = span_data_table[call_site];
+ let mixed_site = span_data_table[mixed_site];
+
+ let macro_body = task.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table);
+ let attributes =
+ task.attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
+ let result = thread::scope(|s| {
+ let thread = thread::Builder::new()
+ .stack_size(EXPANDER_STACK_SIZE)
+ .name(task.macro_name.clone())
+ .spawn_scoped(s, || {
+ expander
+ .expand(
+ &task.macro_name,
+ macro_body,
+ attributes,
+ def_site,
+ call_site,
+ mixed_site,
+ )
+ .map(|it| {
+ (
+ msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table),
+ serialize_span_data_index_map(&span_data_table),
+ )
+ })
+ });
+ let res = match thread {
+ Ok(handle) => handle.join(),
+ Err(e) => std::panic::resume_unwind(Box::new(e)),
+ };
+
+ match res {
+ Ok(res) => res,
+ Err(e) => std::panic::resume_unwind(e),
+ }
+ });
+ result
+}
+
pub struct PanicMessage {
message: Option<String>,
}
diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs
index 716b85d096..3fe968c81c 100644
--- a/crates/proc-macro-srv/src/proc_macros.rs
+++ b/crates/proc-macro-srv/src/proc_macros.rs
@@ -2,9 +2,9 @@
use libloading::Library;
use proc_macro::bridge;
-use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo};
+use proc_macro_api::{ProcMacroKind, RustCInfo};
-use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt};
+use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan};
pub(crate) struct ProcMacros {
exported_macros: Vec<bridge::client::ProcMacro>,
@@ -40,19 +40,19 @@ impl ProcMacros {
Err(LoadProcMacroDylibError::AbiMismatch(info.version_string))
}
- pub(crate) fn expand(
+ pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
- macro_body: &tt::Subtree,
- attributes: Option<&tt::Subtree>,
- def_site: TokenId,
- call_site: TokenId,
- mixed_site: TokenId,
- ) -> Result<tt::Subtree, crate::PanicMessage> {
- let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone());
+ macro_body: tt::Subtree<S>,
+ attributes: Option<tt::Subtree<S>>,
+ def_site: S,
+ call_site: S,
+ mixed_site: S,
+ ) -> Result<tt::Subtree<S>, crate::PanicMessage> {
+ let parsed_body = crate::server::TokenStream::with_subtree(macro_body);
- let parsed_attributes = attributes.map_or(crate::server::TokenStream::new(), |attr| {
- crate::server::TokenStream::with_subtree(attr.clone())
+ let parsed_attributes = attributes.map_or_else(crate::server::TokenStream::new, |attr| {
+ crate::server::TokenStream::with_subtree(attr)
});
for proc_macro in &self.exported_macros {
@@ -62,12 +62,7 @@ impl ProcMacros {
{
let res = client.run(
&bridge::server::SameThread,
- crate::server::RustAnalyzer {
- interner: &SYMBOL_INTERNER,
- call_site,
- def_site,
- mixed_site,
- },
+ S::make_server(call_site, def_site, mixed_site),
parsed_body,
false,
);
@@ -78,12 +73,7 @@ impl ProcMacros {
bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
- crate::server::RustAnalyzer {
- interner: &SYMBOL_INTERNER,
- call_site,
- def_site,
- mixed_site,
- },
+ S::make_server(call_site, def_site, mixed_site),
parsed_body,
false,
);
@@ -94,13 +84,7 @@ impl ProcMacros {
bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
- crate::server::RustAnalyzer {
- interner: &SYMBOL_INTERNER,
-
- call_site,
- def_site,
- mixed_site,
- },
+ S::make_server(call_site, def_site, mixed_site),
parsed_attributes,
parsed_body,
false,
@@ -113,7 +97,7 @@ impl ProcMacros {
}
}
- Err(bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ Err(bridge::PanicMessage::String(format!("proc-macro `{macro_name}` is missing")).into())
}
pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs
index 917d8a6e26..1854322ddb 100644
--- a/crates/proc-macro-srv/src/server.rs
+++ b/crates/proc-macro-srv/src/server.rs
@@ -8,226 +8,18 @@
//!
//! FIXME: No span and source file information is implemented yet
-use proc_macro::bridge::{self, server};
+use proc_macro::bridge;
mod token_stream;
-use proc_macro_api::msg::TokenId;
pub use token_stream::TokenStream;
-use token_stream::TokenStreamBuilder;
+pub mod token_id;
+pub mod rust_analyzer_span;
mod symbol;
pub use symbol::*;
+use tt::Spacing;
-use std::{
- iter,
- ops::{Bound, Range},
-};
-
-use crate::tt;
-
-type Group = tt::Subtree;
-type TokenTree = tt::TokenTree;
-#[allow(unused)]
-type Punct = tt::Punct;
-type Spacing = tt::Spacing;
-#[allow(unused)]
-type Literal = tt::Literal;
-type Span = tt::TokenId;
-
-#[derive(Clone)]
-pub struct SourceFile {
- // FIXME stub
-}
-
-pub struct FreeFunctions;
-
-pub struct RustAnalyzer {
- // FIXME: store span information here.
- pub(crate) interner: SymbolInternerRef,
- pub call_site: TokenId,
- pub def_site: TokenId,
- pub mixed_site: TokenId,
-}
-
-impl server::Types for RustAnalyzer {
- type FreeFunctions = FreeFunctions;
- type TokenStream = TokenStream;
- type SourceFile = SourceFile;
- type Span = Span;
- type Symbol = Symbol;
-}
-
-impl server::FreeFunctions for RustAnalyzer {
- fn injected_env_var(&mut self, _var: &str) -> Option<String> {
- None
- }
-
- fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
- // FIXME: track env var accesses
- // https://github.com/rust-lang/rust/pull/71858
- }
- fn track_path(&mut self, _path: &str) {}
-
- fn literal_from_str(
- &mut self,
- s: &str,
- ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
- // FIXME: keep track of LitKind and Suffix
- Ok(bridge::Literal {
- kind: bridge::LitKind::Err,
- symbol: Symbol::intern(self.interner, s),
- suffix: None,
- span: self.call_site,
- })
- }
-
- fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {
- // FIXME handle diagnostic
- }
-}
-
-impl server::TokenStream for RustAnalyzer {
- fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
- stream.is_empty()
- }
- fn from_str(&mut self, src: &str) -> Self::TokenStream {
- Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string")
- }
- fn to_string(&mut self, stream: &Self::TokenStream) -> String {
- stream.to_string()
- }
- fn from_token_tree(
- &mut self,
- tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
- ) -> Self::TokenStream {
- match tree {
- bridge::TokenTree::Group(group) => {
- let group = Group {
- delimiter: delim_to_internal(group.delimiter, group.span),
- token_trees: match group.stream {
- Some(stream) => stream.into_iter().collect(),
- None => Vec::new(),
- },
- };
- let tree = TokenTree::from(group);
- Self::TokenStream::from_iter(iter::once(tree))
- }
-
- bridge::TokenTree::Ident(ident) => {
- let text = ident.sym.text(self.interner);
- let text =
- if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text };
- let ident: tt::Ident = tt::Ident { text, span: ident.span };
- let leaf = tt::Leaf::from(ident);
- let tree = TokenTree::from(leaf);
- Self::TokenStream::from_iter(iter::once(tree))
- }
-
- bridge::TokenTree::Literal(literal) => {
- let literal = LiteralFormatter(literal);
- let text = literal.with_stringify_parts(self.interner, |parts| {
- ::tt::SmolStr::from_iter(parts.iter().copied())
- });
-
- let literal = tt::Literal { text, span: literal.0.span };
- let leaf = tt::Leaf::from(literal);
- let tree = TokenTree::from(leaf);
- Self::TokenStream::from_iter(iter::once(tree))
- }
-
- bridge::TokenTree::Punct(p) => {
- let punct = tt::Punct {
- char: p.ch as char,
- spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
- span: p.span,
- };
- let leaf = tt::Leaf::from(punct);
- let tree = TokenTree::from(leaf);
- Self::TokenStream::from_iter(iter::once(tree))
- }
- }
- }
-
- fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
- Ok(self_.clone())
- }
-
- fn concat_trees(
- &mut self,
- base: Option<Self::TokenStream>,
- trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
- ) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::new();
- if let Some(base) = base {
- builder.push(base);
- }
- for tree in trees {
- builder.push(self.from_token_tree(tree));
- }
- builder.build()
- }
-
- fn concat_streams(
- &mut self,
- base: Option<Self::TokenStream>,
- streams: Vec<Self::TokenStream>,
- ) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::new();
- if let Some(base) = base {
- builder.push(base);
- }
- for stream in streams {
- builder.push(stream);
- }
- builder.build()
- }
-
- fn into_trees(
- &mut self,
- stream: Self::TokenStream,
- ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
- stream
- .into_iter()
- .map(|tree| match tree {
- tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
- bridge::TokenTree::Ident(bridge::Ident {
- sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")),
- is_raw: ident.text.starts_with("r#"),
- span: ident.span,
- })
- }
- tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
- bridge::TokenTree::Literal(bridge::Literal {
- // FIXME: handle literal kinds
- kind: bridge::LitKind::Err,
- symbol: Symbol::intern(self.interner, &lit.text),
- // FIXME: handle suffixes
- suffix: None,
- span: lit.span,
- })
- }
- tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
- bridge::TokenTree::Punct(bridge::Punct {
- ch: punct.char as u8,
- joint: punct.spacing == Spacing::Joint,
- span: punct.span,
- })
- }
- tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
- delimiter: delim_to_external(subtree.delimiter),
- stream: if subtree.token_trees.is_empty() {
- None
- } else {
- Some(subtree.token_trees.into_iter().collect())
- },
- span: bridge::DelimSpan::from_single(subtree.delimiter.open),
- }),
- })
- .collect()
- }
-}
-
-fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan<Span>) -> tt::Delimiter {
+fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> {
let kind = match d {
proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
@@ -237,7 +29,7 @@ fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan<Span>) ->
tt::Delimiter { open: span.open, close: span.close, kind }
}
-fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter {
+fn delim_to_external<S>(d: tt::Delimiter<S>) -> proc_macro::Delimiter {
match d.kind {
tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis,
tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace,
@@ -262,121 +54,9 @@ fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
}
}
-impl server::SourceFile for RustAnalyzer {
- // FIXME these are all stubs
- fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
- true
- }
- fn path(&mut self, _file: &Self::SourceFile) -> String {
- String::new()
- }
- fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
- true
- }
-}
-
-impl server::Span for RustAnalyzer {
- fn debug(&mut self, span: Self::Span) -> String {
- format!("{:?}", span.0)
- }
- fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
- SourceFile {}
- }
- fn save_span(&mut self, _span: Self::Span) -> usize {
- // FIXME stub
- 0
- }
- fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
- // FIXME stub
- self.call_site
- }
- /// Recent feature, not yet in the proc_macro
- ///
- /// See PR:
- /// https://github.com/rust-lang/rust/pull/55780
- fn source_text(&mut self, _span: Self::Span) -> Option<String> {
- None
- }
-
- fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
- // FIXME handle span
- None
- }
- fn source(&mut self, span: Self::Span) -> Self::Span {
- // FIXME handle span
- span
- }
- fn byte_range(&mut self, _span: Self::Span) -> Range<usize> {
- // FIXME handle span
- Range { start: 0, end: 0 }
- }
- fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
- // Just return the first span again, because some macros will unwrap the result.
- Some(first)
- }
- fn subspan(
- &mut self,
- span: Self::Span,
- _start: Bound<usize>,
- _end: Bound<usize>,
- ) -> Option<Self::Span> {
- // Just return the span again, because some macros will unwrap the result.
- Some(span)
- }
- fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
- // FIXME handle span
- self.call_site
- }
-
- fn end(&mut self, _self_: Self::Span) -> Self::Span {
- self.call_site
- }
-
- fn start(&mut self, _self_: Self::Span) -> Self::Span {
- self.call_site
- }
-
- fn line(&mut self, _span: Self::Span) -> usize {
- // FIXME handle line
- 0
- }
-
- fn column(&mut self, _span: Self::Span) -> usize {
- // FIXME handle column
- 0
- }
-}
-
-impl server::Symbol for RustAnalyzer {
- fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
- // FIXME: nfc-normalize and validate idents
- Ok(<Self as server::Server>::intern_symbol(string))
- }
-}
-
-impl server::Server for RustAnalyzer {
- fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
- bridge::ExpnGlobals {
- def_site: self.def_site,
- call_site: self.call_site,
- mixed_site: self.mixed_site,
- }
- }
-
- fn intern_symbol(ident: &str) -> Self::Symbol {
- // FIXME: should be `self.interner` once the proc-macro api allows it.
- Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident))
- }
-
- fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
- // FIXME: should be `self.interner` once the proc-macro api allows it.
- f(symbol.text(&SYMBOL_INTERNER).as_str())
- }
-}
-
-struct LiteralFormatter(bridge::Literal<tt::TokenId, Symbol>);
+struct LiteralFormatter<S>(bridge::Literal<S, Symbol>);
-impl LiteralFormatter {
+impl<S> LiteralFormatter<S> {
/// Invokes the callback with a `&[&str]` consisting of each part of the
/// literal's representation. This is done to allow the `ToString` and
/// `Display` implementations to borrow references to symbol values, and
@@ -427,66 +107,3 @@ impl LiteralFormatter {
f(symbol.as_str(), suffix.as_str())
}
}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_ra_server_to_string() {
- let s = TokenStream {
- token_trees: vec![
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: "struct".into(),
- span: tt::TokenId(0),
- })),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: "T".into(),
- span: tt::TokenId(0),
- })),
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: tt::TokenId(0),
- close: tt::TokenId(0),
- kind: tt::DelimiterKind::Brace,
- },
- token_trees: vec![],
- }),
- ],
- };
-
- assert_eq!(s.to_string(), "struct T {}");
- }
-
- #[test]
- fn test_ra_server_from_str() {
- let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: tt::TokenId(0),
- close: tt::TokenId(0),
- kind: tt::DelimiterKind::Parenthesis,
- },
- token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: "a".into(),
- span: tt::TokenId(0),
- }))],
- });
-
- let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap();
- assert_eq!(t1.token_trees.len(), 1);
- assert_eq!(t1.token_trees[0], subtree_paren_a);
-
- let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap();
- assert_eq!(t2.token_trees.len(), 2);
- assert_eq!(t2.token_trees[0], subtree_paren_a);
-
- let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap();
- assert_eq!(
- underscore.token_trees[0],
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: "_".into(),
- span: tt::TokenId(0),
- }))
- );
- }
-}
diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs
new file mode 100644
index 0000000000..bcf3600d27
--- /dev/null
+++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs
@@ -0,0 +1,411 @@
+//! proc-macro server backend based on rust-analyzer's internal span represention
+//! This backend is used solely by rust-analyzer as it ties into rust-analyzer internals.
+//!
+//! It is an unfortunate result of how the proc-macro API works that we need to look into the
+//! concrete representation of the spans, and as such, RustRover cannot make use of this unless they
+//! change their representation to be compatible with rust-analyzer's.
+use std::{
+ collections::{HashMap, HashSet},
+ iter,
+ ops::{Bound, Range},
+};
+
+use ::tt::{TextRange, TextSize};
+use proc_macro::bridge::{self, server};
+use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
+
+use crate::server::{
+ delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter,
+ Symbol, SymbolInternerRef, SYMBOL_INTERNER,
+};
+mod tt {
+ pub use ::tt::*;
+
+ pub type Subtree = ::tt::Subtree<super::Span>;
+ pub type TokenTree = ::tt::TokenTree<super::Span>;
+ pub type Leaf = ::tt::Leaf<super::Span>;
+ pub type Literal = ::tt::Literal<super::Span>;
+ pub type Punct = ::tt::Punct<super::Span>;
+ pub type Ident = ::tt::Ident<super::Span>;
+}
+
+type TokenStream = crate::server::TokenStream<Span>;
+
+#[derive(Clone)]
+pub struct SourceFile;
+pub struct FreeFunctions;
+
+pub struct RaSpanServer {
+ pub(crate) interner: SymbolInternerRef,
+ // FIXME: Report this back to the caller to track as dependencies
+ pub tracked_env_vars: HashMap<Box<str>, Option<Box<str>>>,
+ // FIXME: Report this back to the caller to track as dependencies
+ pub tracked_paths: HashSet<Box<str>>,
+ pub call_site: Span,
+ pub def_site: Span,
+ pub mixed_site: Span,
+}
+
+impl server::Types for RaSpanServer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type SourceFile = SourceFile;
+ type Span = Span;
+ type Symbol = Symbol;
+}
+
+impl server::FreeFunctions for RaSpanServer {
+ fn injected_env_var(&mut self, _: &str) -> Option<std::string::String> {
+ None
+ }
+
+ fn track_env_var(&mut self, var: &str, value: Option<&str>) {
+ self.tracked_env_vars.insert(var.into(), value.map(Into::into));
+ }
+ fn track_path(&mut self, path: &str) {
+ self.tracked_paths.insert(path.into());
+ }
+
+ fn literal_from_str(
+ &mut self,
+ s: &str,
+ ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ // FIXME: keep track of LitKind and Suffix
+ Ok(bridge::Literal {
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(self.interner, s),
+ suffix: None,
+ span: self.call_site,
+ })
+ }
+
+ fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {
+ // FIXME handle diagnostic
+ }
+}
+
+impl server::TokenStream for RaSpanServer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let group = tt::Subtree {
+ delimiter: delim_to_internal(group.delimiter, group.span),
+ token_trees: match group.stream {
+ Some(stream) => stream.into_iter().collect(),
+ None => Vec::new(),
+ },
+ };
+ let tree = tt::TokenTree::from(group);
+ Self::TokenStream::from_iter(iter::once(tree))
+ }
+
+ bridge::TokenTree::Ident(ident) => {
+ let text = ident.sym.text(self.interner);
+ let text =
+ if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text };
+ let ident: tt::Ident = tt::Ident { text, span: ident.span };
+ let leaf = tt::Leaf::from(ident);
+ let tree = tt::TokenTree::from(leaf);
+ Self::TokenStream::from_iter(iter::once(tree))
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let literal = LiteralFormatter(literal);
+ let text = literal.with_stringify_parts(self.interner, |parts| {
+ ::tt::SmolStr::from_iter(parts.iter().copied())
+ });
+
+ let literal = tt::Literal { text, span: literal.0.span };
+ let leaf: tt::Leaf = tt::Leaf::from(literal);
+ let tree = tt::TokenTree::from(leaf);
+ Self::TokenStream::from_iter(iter::once(tree))
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let punct = tt::Punct {
+ char: p.ch as char,
+ spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
+ span: p.span,
+ };
+ let leaf = tt::Leaf::from(punct);
+ let tree = tt::TokenTree::from(leaf);
+ Self::TokenStream::from_iter(iter::once(tree))
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ // FIXME: requires db, more importantly this requires name resolution so we would need to
+ // eagerly expand this proc-macro, but we can't know that this proc-macro is eager until we
+ // expand it ...
+ // This calls for some kind of marker that a proc-macro wants to access this eager API,
+ // otherwise we need to treat every proc-macro eagerly / or not support this.
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")),
+ is_raw: ident.text.starts_with("r#"),
+ span: ident.span,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+ bridge::TokenTree::Literal(bridge::Literal {
+ // FIXME: handle literal kinds
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(self.interner, &lit.text),
+ // FIXME: handle suffixes
+ suffix: None,
+ span: lit.span,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: punct.char as u8,
+ joint: punct.spacing == tt::Spacing::Joint,
+ span: punct.span,
+ })
+ }
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+ delimiter: delim_to_external(subtree.delimiter),
+ stream: if subtree.token_trees.is_empty() {
+ None
+ } else {
+ Some(subtree.token_trees.into_iter().collect())
+ },
+ span: bridge::DelimSpan::from_single(subtree.delimiter.open),
+ }),
+ })
+ .collect()
+ }
+}
+
+impl server::SourceFile for RaSpanServer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Span for RaSpanServer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span)
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ // FIXME stub, requires db
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub, requires builtin quote! implementation
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub, requires builtin quote! implementation
+ self.call_site
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ // FIXME requires db, needs special handling wrt fixup spans
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME requires db, looks up the parent call site
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME requires db, returns the top level call site
+ span
+ }
+ fn byte_range(&mut self, span: Self::Span) -> Range<usize> {
+ // FIXME requires db to resolve the ast id, THIS IS NOT INCREMENTAL
+ Range { start: span.range.start().into(), end: span.range.end().into() }
+ }
+ fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
+ // We can't modify the span range for fixup spans, those are meaningful to fixup, so just
+ // prefer the non-fixup span.
+ if first.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
+ return Some(second);
+ }
+ if second.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
+ return Some(first);
+ }
+ // FIXME: Once we can talk back to the client, implement a "long join" request for anchors
+ // that differ in [AstId]s as joining those spans requires resolving the AstIds.
+ if first.anchor != second.anchor {
+ return None;
+ }
+ // Differing context, we can't merge these so prefer the one that's root
+ if first.ctx != second.ctx {
+ if first.ctx.is_root() {
+ return Some(second);
+ } else if second.ctx.is_root() {
+ return Some(first);
+ }
+ }
+ Some(Span {
+ range: first.range.cover(second.range),
+ anchor: second.anchor,
+ ctx: second.ctx,
+ })
+ }
+ fn subspan(
+ &mut self,
+ span: Self::Span,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // We can't modify the span range for fixup spans, those are meaningful to fixup.
+ if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
+ return Some(span);
+ }
+ let length = span.range.len().into();
+
+ let start: u32 = match start {
+ Bound::Included(lo) => lo,
+ Bound::Excluded(lo) => lo.checked_add(1)?,
+ Bound::Unbounded => 0,
+ }
+ .try_into()
+ .ok()?;
+
+ let end: u32 = match end {
+ Bound::Included(hi) => hi.checked_add(1)?,
+ Bound::Excluded(hi) => hi,
+ Bound::Unbounded => span.range.len().into(),
+ }
+ .try_into()
+ .ok()?;
+
+ // Bounds check the values, preventing addition overflow and OOB spans.
+ let span_start = span.range.start().into();
+ if (u32::MAX - start) < span_start
+ || (u32::MAX - end) < span_start
+ || start >= end
+ || end > length
+ {
+ return None;
+ }
+
+ Some(Span {
+ range: TextRange::new(TextSize::from(start), TextSize::from(end)) + span.range.start(),
+ ..span
+ })
+ }
+
+ fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
+ Span { ctx: at.ctx, ..span }
+ }
+
+ fn end(&mut self, span: Self::Span) -> Self::Span {
+ // We can't modify the span range for fixup spans, those are meaningful to fixup.
+ if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
+ return span;
+ }
+ Span { range: TextRange::empty(span.range.end()), ..span }
+ }
+
+ fn start(&mut self, span: Self::Span) -> Self::Span {
+ // We can't modify the span range for fixup spans, those are meaningful to fixup.
+ if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER {
+ return span;
+ }
+ Span { range: TextRange::empty(span.range.start()), ..span }
+ }
+
+ fn line(&mut self, _span: Self::Span) -> usize {
+ // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL
+ 0
+ }
+
+ fn column(&mut self, _span: Self::Span) -> usize {
+ // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL
+ 0
+ }
+}
+
+impl server::Symbol for RaSpanServer {
+ fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
+ // FIXME: nfc-normalize and validate idents
+ Ok(<Self as server::Server>::intern_symbol(string))
+ }
+}
+
+impl server::Server for RaSpanServer {
+ fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+ bridge::ExpnGlobals {
+ def_site: self.def_site,
+ call_site: self.call_site,
+ mixed_site: self.mixed_site,
+ }
+ }
+
+ fn intern_symbol(ident: &str) -> Self::Symbol {
+ // FIXME: should be `self.interner` once the proc-macro api allows it.
+ Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident))
+ }
+
+ fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
+ // FIXME: should be `self.interner` once the proc-macro api allows it.
+ f(symbol.text(&SYMBOL_INTERNER).as_str())
+ }
+}
diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs
new file mode 100644
index 0000000000..12526ad4f3
--- /dev/null
+++ b/crates/proc-macro-srv/src/server/token_id.rs
@@ -0,0 +1,380 @@
+//! proc-macro server backend based on [`proc_macro_api::msg::TokenId`] as the backing span.
+//! This backend is rather inflexible, used by RustRover and older rust-analyzer versions.
+use std::{
+ iter,
+ ops::{Bound, Range},
+};
+
+use proc_macro::bridge::{self, server};
+
+use crate::server::{
+ delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter,
+ Symbol, SymbolInternerRef, SYMBOL_INTERNER,
+};
+mod tt {
+ pub use proc_macro_api::msg::TokenId;
+
+ pub use ::tt::*;
+
+ pub type Subtree = ::tt::Subtree<TokenId>;
+ pub type TokenTree = ::tt::TokenTree<TokenId>;
+ pub type Leaf = ::tt::Leaf<TokenId>;
+ pub type Literal = ::tt::Literal<TokenId>;
+ pub type Punct = ::tt::Punct<TokenId>;
+ pub type Ident = ::tt::Ident<TokenId>;
+}
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+#[allow(unused)]
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+#[allow(unused)]
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+type TokenStream = crate::server::TokenStream<Span>;
+
+#[derive(Clone)]
+pub struct SourceFile;
+pub struct FreeFunctions;
+
+pub struct TokenIdServer {
+ pub(crate) interner: SymbolInternerRef,
+ pub call_site: Span,
+ pub def_site: Span,
+ pub mixed_site: Span,
+}
+
+impl server::Types for TokenIdServer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type SourceFile = SourceFile;
+ type Span = Span;
+ type Symbol = Symbol;
+}
+
+impl server::FreeFunctions for TokenIdServer {
+ fn injected_env_var(&mut self, _: &str) -> Option<std::string::String> {
+ None
+ }
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {}
+ fn track_path(&mut self, _path: &str) {}
+ fn literal_from_str(
+ &mut self,
+ s: &str,
+ ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ // FIXME: keep track of LitKind and Suffix
+ Ok(bridge::Literal {
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(self.interner, s),
+ suffix: None,
+ span: self.call_site,
+ })
+ }
+
+ fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {}
+}
+
+impl server::TokenStream for TokenIdServer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let group = Group {
+ delimiter: delim_to_internal(group.delimiter, group.span),
+ token_trees: match group.stream {
+ Some(stream) => stream.into_iter().collect(),
+ None => Vec::new(),
+ },
+ };
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(iter::once(tree))
+ }
+
+ bridge::TokenTree::Ident(ident) => {
+ let text = ident.sym.text(self.interner);
+ let text =
+ if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text };
+ let ident: tt::Ident = tt::Ident { text, span: ident.span };
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(iter::once(tree))
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let literal = LiteralFormatter(literal);
+ let text = literal.with_stringify_parts(self.interner, |parts| {
+ ::tt::SmolStr::from_iter(parts.iter().copied())
+ });
+
+ let literal = tt::Literal { text, span: literal.0.span };
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(iter::once(tree))
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let punct = tt::Punct {
+ char: p.ch as char,
+ spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
+ span: p.span,
+ };
+ let leaf = tt::Leaf::from(punct);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(iter::once(tree))
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")),
+ is_raw: ident.text.starts_with("r#"),
+ span: ident.span,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+ bridge::TokenTree::Literal(bridge::Literal {
+ // FIXME: handle literal kinds
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(self.interner, &lit.text),
+ // FIXME: handle suffixes
+ suffix: None,
+ span: lit.span,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: punct.char as u8,
+ joint: punct.spacing == Spacing::Joint,
+ span: punct.span,
+ })
+ }
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+ delimiter: delim_to_external(subtree.delimiter),
+ stream: if subtree.token_trees.is_empty() {
+ None
+ } else {
+ Some(subtree.token_trees.into_iter().collect())
+ },
+ span: bridge::DelimSpan::from_single(subtree.delimiter.open),
+ }),
+ })
+ .collect()
+ }
+}
+
+impl server::SourceFile for TokenIdServer {
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Span for TokenIdServer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ self.call_site
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ span
+ }
+ fn byte_range(&mut self, _span: Self::Span) -> Range<usize> {
+ Range { start: 0, end: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn subspan(
+ &mut self,
+ span: Self::Span,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // Just return the span again, because some macros will unwrap the result.
+ Some(span)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ self.call_site
+ }
+
+ fn end(&mut self, _self_: Self::Span) -> Self::Span {
+ self.call_site
+ }
+
+ fn start(&mut self, _self_: Self::Span) -> Self::Span {
+ self.call_site
+ }
+
+ fn line(&mut self, _span: Self::Span) -> usize {
+ 0
+ }
+
+ fn column(&mut self, _span: Self::Span) -> usize {
+ 0
+ }
+}
+
+impl server::Symbol for TokenIdServer {
+ fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
+ // FIXME: nfc-normalize and validate idents
+ Ok(<Self as server::Server>::intern_symbol(string))
+ }
+}
+
+impl server::Server for TokenIdServer {
+ fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+ bridge::ExpnGlobals {
+ def_site: self.def_site,
+ call_site: self.call_site,
+ mixed_site: self.mixed_site,
+ }
+ }
+
+ fn intern_symbol(ident: &str) -> Self::Symbol {
+ Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident))
+ }
+
+ fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
+ f(symbol.text(&SYMBOL_INTERNER).as_str())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ span: tt::TokenId(0),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ span: tt::TokenId(0),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: tt::Delimiter {
+ open: tt::TokenId(0),
+ close: tt::TokenId(0),
+ kind: tt::DelimiterKind::Brace,
+ },
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: tt::Delimiter {
+ open: tt::TokenId(0),
+ close: tt::TokenId(0),
+ kind: tt::DelimiterKind::Parenthesis,
+ },
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ span: tt::TokenId(0),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ span: tt::TokenId(0),
+ }))
+ );
+ }
+}
diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs
index 36be882503..8f669a3049 100644
--- a/crates/proc-macro-srv/src/server/token_stream.rs
+++ b/crates/proc-macro-srv/src/server/token_stream.rs
@@ -1,20 +1,24 @@
//! TokenStream implementation used by sysroot ABI
-use proc_macro_api::msg::TokenId;
+use tt::TokenTree;
-use crate::tt::{self, TokenTree};
+#[derive(Debug, Clone)]
+pub struct TokenStream<S> {
+ pub(super) token_trees: Vec<TokenTree<S>>,
+}
-#[derive(Debug, Default, Clone)]
-pub struct TokenStream {
- pub(super) token_trees: Vec<TokenTree>,
+impl<S> Default for TokenStream<S> {
+ fn default() -> Self {
+ Self { token_trees: vec![] }
+ }
}
-impl TokenStream {
+impl<S> TokenStream<S> {
pub(crate) fn new() -> Self {
- TokenStream::default()
+ TokenStream { token_trees: vec![] }
}
- pub(crate) fn with_subtree(subtree: tt::Subtree) -> Self {
+ pub(crate) fn with_subtree(subtree: tt::Subtree<S>) -> Self {
if subtree.delimiter.kind != tt::DelimiterKind::Invisible {
TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
} else {
@@ -22,7 +26,10 @@ impl TokenStream {
}
}
- pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree {
+ pub(crate) fn into_subtree(self, call_site: S) -> tt::Subtree<S>
+ where
+ S: Copy,
+ {
tt::Subtree {
delimiter: tt::Delimiter {
open: call_site,
@@ -39,37 +46,37 @@ impl TokenStream {
}
/// Creates a token stream containing a single token tree.
-impl From<TokenTree> for TokenStream {
- fn from(tree: TokenTree) -> TokenStream {
+impl<S> From<TokenTree<S>> for TokenStream<S> {
+ fn from(tree: TokenTree<S>) -> TokenStream<S> {
TokenStream { token_trees: vec![tree] }
}
}
/// Collects a number of token trees into a single stream.
-impl FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+impl<S> FromIterator<TokenTree<S>> for TokenStream<S> {
+ fn from_iter<I: IntoIterator<Item = TokenTree<S>>>(trees: I) -> Self {
trees.into_iter().map(TokenStream::from).collect()
}
}
/// A "flattening" operation on token streams, collects token trees
/// from multiple token streams into a single stream.
-impl FromIterator<TokenStream> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+impl<S> FromIterator<TokenStream<S>> for TokenStream<S> {
+ fn from_iter<I: IntoIterator<Item = TokenStream<S>>>(streams: I) -> Self {
let mut builder = TokenStreamBuilder::new();
streams.into_iter().for_each(|stream| builder.push(stream));
builder.build()
}
}
-impl Extend<TokenTree> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+impl<S> Extend<TokenTree<S>> for TokenStream<S> {
+ fn extend<I: IntoIterator<Item = TokenTree<S>>>(&mut self, trees: I) {
self.extend(trees.into_iter().map(TokenStream::from));
}
}
-impl Extend<TokenStream> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+impl<S> Extend<TokenStream<S>> for TokenStream<S> {
+ fn extend<I: IntoIterator<Item = TokenStream<S>>>(&mut self, streams: I) {
for item in streams {
for tkn in item {
match tkn {
@@ -87,22 +94,21 @@ impl Extend<TokenStream> for TokenStream {
}
}
-pub(super) struct TokenStreamBuilder {
- acc: TokenStream,
+pub(super) struct TokenStreamBuilder<S> {
+ acc: TokenStream<S>,
}
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
pub(super) mod token_stream {
- use proc_macro_api::msg::TokenId;
- use super::{tt, TokenStream, TokenTree};
+ use super::{TokenStream, TokenTree};
/// An iterator over `TokenStream`'s `TokenTree`s.
/// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
/// and returns whole groups as token trees.
- impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = std::vec::IntoIter<TokenTree>;
+ impl<S> IntoIterator for TokenStream<S> {
+ type Item = TokenTree<S>;
+ type IntoIter = std::vec::IntoIter<TokenTree<S>>;
fn into_iter(self) -> Self::IntoIter {
self.token_trees.into_iter()
@@ -119,71 +125,34 @@ pub(super) mod token_stream {
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later.
#[rustfmt::skip]
- impl /*FromStr for*/ TokenStream {
+ impl<S: tt::Span> /*FromStr for*/ TokenStream<S> {
// type Err = LexError;
- pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result<TokenStream, LexError> {
+ pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, LexError> {
let subtree =
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?;
- let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site);
Ok(TokenStream::with_subtree(subtree))
}
}
- impl ToString for TokenStream {
+ impl<S> ToString for TokenStream<S> {
fn to_string(&self) -> String {
::tt::pretty(&self.token_trees)
}
}
-
- fn subtree_replace_token_ids_with_call_site(
- subtree: tt::Subtree,
- call_site: TokenId,
- ) -> tt::Subtree {
- tt::Subtree {
- delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter },
- token_trees: subtree
- .token_trees
- .into_iter()
- .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site))
- .collect(),
- }
- }
-
- fn token_tree_replace_token_ids_with_call_site(
- tt: tt::TokenTree,
- call_site: TokenId,
- ) -> tt::TokenTree {
- match tt {
- tt::TokenTree::Leaf(leaf) => {
- tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site))
- }
- tt::TokenTree::Subtree(subtree) => {
- tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site))
- }
- }
- }
-
- fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf {
- match leaf {
- tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }),
- tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }),
- tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }),
- }
- }
}
-impl TokenStreamBuilder {
- pub(super) fn new() -> TokenStreamBuilder {
+impl<S> TokenStreamBuilder<S> {
+ pub(super) fn new() -> TokenStreamBuilder<S> {
TokenStreamBuilder { acc: TokenStream::new() }
}
- pub(super) fn push(&mut self, stream: TokenStream) {
+ pub(super) fn push(&mut self, stream: TokenStream<S>) {
self.acc.extend(stream.into_iter())
}
- pub(super) fn build(self) -> TokenStream {
+ pub(super) fn build(self) -> TokenStream<S> {
self.acc
}
}
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index b04e3ca19a..87d832cc76 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -8,7 +8,7 @@ use expect_test::expect;
#[test]
fn test_derive_empty() {
- assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"]);
+ assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"], expect!["SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"]);
}
#[test]
@@ -23,6 +23,13 @@ fn test_derive_error() {
SUBTREE () 1 1
LITERAL "#[derive(DeriveError)] struct S ;" 1
PUNCH ; [alone] 1"##]],
+ expect![[r##"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL "#[derive(DeriveError)] struct S ;" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]],
);
}
@@ -40,6 +47,15 @@ fn test_fn_like_macro_noop() {
LITERAL 1 1
PUNCH , [alone] 1
SUBTREE [] 1 1"#]],
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 0 SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH , [alone] SpanData { range: 8..9, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 1 SpanData { range: 10..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ SUBTREE [] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 14..15, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
);
}
@@ -53,6 +69,11 @@ fn test_fn_like_macro_clone_ident_subtree() {
IDENT ident 1
PUNCH , [alone] 1
SUBTREE [] 1 1"#]],
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ SUBTREE [] SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
);
}
@@ -64,6 +85,41 @@ fn test_fn_like_macro_clone_raw_ident() {
expect![[r#"
SUBTREE $$ 1 1
IDENT r#async 1"#]],
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT r#async SpanData { range: 0..7, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_fn_like_span_join() {
+ assert_expand(
+ "fn_like_span_join",
+ "foo bar",
+ expect![[r#"
+ SUBTREE $$ 1 1
+ IDENT r#joined 1"#]],
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT r#joined SpanData { range: 0..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_fn_like_span_ops() {
+ assert_expand(
+ "fn_like_span_ops",
+ "set_def_site resolved_at_def_site start_span",
+ expect![[r#"
+ SUBTREE $$ 1 1
+ IDENT set_def_site 0
+ IDENT resolved_at_def_site 1
+ IDENT start_span 1"#]],
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT set_def_site SpanData { range: 0..150, anchor: SpanAnchor(FileId(41), 1), ctx: SyntaxContextId(0) }
+ IDENT resolved_at_def_site SpanData { range: 13..33, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT start_span SpanData { range: 34..34, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
);
}
@@ -81,6 +137,15 @@ fn test_fn_like_mk_literals() {
LITERAL 3.14 1
LITERAL 123i64 1
LITERAL 123 1"#]],
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL b"byte_string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 'c' SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL "string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 3.14f64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 3.14 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 123i64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 123 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
);
}
@@ -93,6 +158,10 @@ fn test_fn_like_mk_idents() {
SUBTREE $$ 1 1
IDENT standard 1
IDENT r#raw 1"#]],
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT standard SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT r#raw SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
);
}
@@ -113,6 +182,18 @@ fn test_fn_like_macro_clone_literals() {
LITERAL 3.14f32 1
PUNCH , [alone] 1
LITERAL "hello bridge" 1"#]],
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 2_u32 SpanData { range: 6..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH - [alone] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 4i64 SpanData { range: 14..18, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
);
}
@@ -132,6 +213,13 @@ fn test_attr_macro() {
SUBTREE () 1 1
LITERAL "#[attr_error(some arguments)] mod m {}" 1
PUNCH ; [alone] 1"##]],
+ expect![[r##"
+ SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ IDENT compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ LITERAL "#[attr_error(some arguments)] mod m {}" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
+ PUNCH ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]],
);
}
@@ -147,6 +235,8 @@ fn list_test_macros() {
fn_like_clone_tokens [FuncLike]
fn_like_mk_literals [FuncLike]
fn_like_mk_idents [FuncLike]
+ fn_like_span_join [FuncLike]
+ fn_like_span_ops [FuncLike]
attr_noop [Attr]
attr_panic [Attr]
attr_error [Attr]
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index c12096d140..9a1311d955 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -2,47 +2,96 @@
use expect_test::Expect;
use proc_macro_api::msg::TokenId;
+use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId};
+use tt::TextRange;
use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv};
-fn parse_string(code: &str, call_site: TokenId) -> Option<crate::server::TokenStream> {
- // This is a bit strange. We need to parse a string into a token stream into
- // order to create a tt::SubTree from it in fixtures. `into_subtree` is
- // implemented by all the ABIs we have so we arbitrarily choose one ABI to
- // write a `parse_string` function for and use that. The tests don't really
- // care which ABI we're using as the `into_subtree` function isn't part of
- // the ABI and shouldn't change between ABI versions.
- crate::server::TokenStream::from_str(code, call_site).ok()
+fn parse_string(call_site: TokenId, src: &str) -> crate::server::TokenStream<TokenId> {
+ crate::server::TokenStream::with_subtree(
+ mbe::parse_to_token_tree_static_span(call_site, src).unwrap(),
+ )
}
-pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) {
- assert_expand_impl(macro_name, ra_fixture, None, expect);
+fn parse_string_spanned(
+ anchor: SpanAnchor,
+ call_site: SyntaxContextId,
+ src: &str,
+) -> crate::server::TokenStream<Span> {
+ crate::server::TokenStream::with_subtree(
+ mbe::parse_to_token_tree(anchor, call_site, src).unwrap(),
+ )
}
-pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, expect: Expect) {
- assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect);
+pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect, expect_s: Expect) {
+ assert_expand_impl(macro_name, ra_fixture, None, expect, expect_s);
}
-fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
+pub fn assert_expand_attr(
+ macro_name: &str,
+ ra_fixture: &str,
+ attr_args: &str,
+ expect: Expect,
+ expect_s: Expect,
+) {
+ assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect, expect_s);
+}
+
+fn assert_expand_impl(
+ macro_name: &str,
+ input: &str,
+ attr: Option<&str>,
+ expect: Expect,
+ expect_s: Expect,
+) {
+ let path = proc_macro_test_dylib_path();
+ let expander = dylib::Expander::new(&path).unwrap();
+
let def_site = TokenId(0);
let call_site = TokenId(1);
let mixed_site = TokenId(2);
- let path = proc_macro_test_dylib_path();
- let expander = dylib::Expander::new(&path).unwrap();
- let fixture = parse_string(input, call_site).unwrap();
- let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site));
+ let input_ts = parse_string(call_site, input);
+ let attr_ts = attr.map(|attr| parse_string(call_site, attr).into_subtree(call_site));
let res = expander
.expand(
macro_name,
- &fixture.into_subtree(call_site),
- attr.as_ref(),
+ input_ts.into_subtree(call_site),
+ attr_ts,
def_site,
call_site,
mixed_site,
)
.unwrap();
expect.assert_eq(&format!("{res:?}"));
+
+ let def_site = Span {
+ range: TextRange::new(0.into(), 150.into()),
+ anchor: SpanAnchor {
+ file_id: FileId::from_raw(41),
+ ast_id: ErasedFileAstId::from_raw(From::from(1)),
+ },
+ ctx: SyntaxContextId::ROOT,
+ };
+ let call_site = Span {
+ range: TextRange::new(0.into(), 100.into()),
+ anchor: SpanAnchor {
+ file_id: FileId::from_raw(42),
+ ast_id: ErasedFileAstId::from_raw(From::from(2)),
+ },
+ ctx: SyntaxContextId::ROOT,
+ };
+ let mixed_site = call_site;
+
+ let fixture = parse_string_spanned(call_site.anchor, call_site.ctx, input);
+ let attr = attr.map(|attr| {
+ parse_string_spanned(call_site.anchor, call_site.ctx, attr).into_subtree(call_site)
+ });
+
+ let res = expander
+ .expand(macro_name, fixture.into_subtree(call_site), attr, def_site, call_site, mixed_site)
+ .unwrap();
+ expect_s.assert_eq(&format!("{res:?}"));
}
pub(crate) fn list() -> Vec<String> {
diff --git a/crates/proc-macro-test/Cargo.toml b/crates/proc-macro-test/Cargo.toml
deleted file mode 100644
index 12d7c07d3e..0000000000
--- a/crates/proc-macro-test/Cargo.toml
+++ /dev/null
@@ -1,20 +0,0 @@
-[package]
-name = "proc-macro-test"
-version = "0.0.0"
-publish = false
-
-authors.workspace = true
-edition.workspace = true
-license.workspace = true
-rust-version.workspace = true
-
-[lib]
-doctest = false
-
-[build-dependencies]
-cargo_metadata.workspace = true
-
-proc-macro-test-impl = { path = "imp", version = "0.0.0" }
-
-# local deps
-toolchain.workspace = true
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index ec8e5c6dd9..78411e2d58 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -832,7 +832,7 @@ fn main() {
}
#[test]
-#[cfg(feature = "sysroot-abi")]
+#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
fn resolve_proc_macro() {
use expect_test::expect;
if skip_slow_tests() {
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index 09af34ce7e..7617acde64 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -30,6 +30,13 @@ pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
+/// FileId used as the span for syntax node fixups. Any Span containing this file id is to be
+/// considered fake.
+pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId =
+ // we pick the second to last for this in case we every consider making this a NonMaxU32, this
+ // is required to be stable for the proc-macro-server
+ la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(!0 - 1));
+
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct SpanData<Ctx> {
/// The text range of this span, relative to the anchor.
@@ -97,6 +104,14 @@ impl SyntaxContextId {
pub fn is_root(self) -> bool {
self == Self::ROOT
}
+
+ pub fn into_u32(self) -> u32 {
+ self.0.as_u32()
+ }
+
+ pub fn from_u32(u32: u32) -> Self {
+ Self(InternId::from(u32))
+ }
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]