Unnamed repository; edit this file 'description' to name the repository.
-rw-r--r--crates/base-db/src/input.rs7
-rw-r--r--crates/base-db/src/lib.rs13
-rw-r--r--crates/base-db/src/span.rs6
-rw-r--r--crates/cfg/src/tests.rs30
-rw-r--r--crates/mbe/src/benchmark.rs80
-rw-r--r--crates/mbe/src/lib.rs2
-rw-r--r--crates/mbe/src/syntax_bridge.rs28
-rw-r--r--crates/mbe/src/syntax_bridge/tests.rs32
-rw-r--r--crates/proc-macro-srv-cli/src/main.rs2
-rw-r--r--crates/proc-macro-srv/src/lib.rs1
-rw-r--r--crates/rust-analyzer/src/cargo_target_spec.rs13
-rw-r--r--crates/tt/src/lib.rs24
12 files changed, 99 insertions, 139 deletions
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 2fa5c25c91..0b04a91f62 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -243,6 +243,9 @@ impl CrateDisplayName {
}
}
+// FIXME: These should not be defined in here? Why does base db know about proc-macros
+// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
@@ -324,7 +327,9 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
- // FIXME: These things should not be per crate! These are more per workspace crate graph level things
+ // FIXME: These things should not be per crate! These are more per workspace crate graph level
+ // things. This info does need to be somewhat present though as to prevent deduplication from
+ // happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>,
}
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 6dc1629c3b..38a2641230 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -4,6 +4,7 @@
mod input;
mod change;
+// FIXME: Is this purely a test util mod? Consider #[cfg(test)] gating it.
pub mod fixture;
pub mod span;
@@ -13,14 +14,13 @@ use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
use triomphe::Arc;
-pub use crate::input::DependencyKind;
pub use crate::{
change::Change,
input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
- Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
- ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros,
- ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
+ DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
+ ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
+ ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
},
};
pub use salsa::{self, Cancelled};
@@ -69,8 +69,7 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
- // Parses the file into the syntax tree.
- #[salsa::invoke(parse_query)]
+ /// Parses the file into the syntax tree.
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
/// The crate graph.
@@ -82,7 +81,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
fn proc_macros(&self) -> Arc<ProcMacros>;
}
-fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
+fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id);
SourceFile::parse(&text)
diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs
index e183e9b199..370c732813 100644
--- a/crates/base-db/src/span.rs
+++ b/crates/base-db/src/span.rs
@@ -1,3 +1,5 @@
+/// File and span related types.
+// FIXME: This should probably be moved into its own crate.
use std::fmt;
use salsa::InternId;
@@ -29,10 +31,10 @@ impl SyntaxContext for SyntaxContextId {
}
// inherent trait impls please tyvm
impl SyntaxContextId {
- // FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context
+ // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context
// currently (which kind of makes sense but we need it here!)
pub const ROOT: Self = SyntaxContextId(unsafe { core::mem::transmute(1) });
- // FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context
+ // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context
// currently (which kind of makes sense but we need it here!)
pub const SELF_REF: Self = SyntaxContextId(unsafe { core::mem::transmute(!0u32) });
diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs
index 61cdbded0b..c7ac1af934 100644
--- a/crates/cfg/src/tests.rs
+++ b/crates/cfg/src/tests.rs
@@ -1,34 +1,14 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
-use mbe::{syntax_node_to_token_tree, SpanMapper};
+use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{ast, AstNode};
-use tt::{SpanAnchor, SyntaxContext};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-struct DummyFile;
-impl SpanAnchor for DummyFile {
- const DUMMY: Self = DummyFile;
-}
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
-struct DummyCtx;
-impl SyntaxContext for DummyCtx {
- const DUMMY: Self = DummyCtx;
-}
-
-struct NoOpMap;
-
-impl SpanMapper<tt::SpanData<DummyFile, DummyCtx>> for NoOpMap {
- fn span_for(&self, range: syntax::TextRange) -> tt::SpanData<DummyFile, DummyCtx> {
- tt::SpanData { range, anchor: DummyFile, ctx: DummyCtx }
- }
-}
-
fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap);
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
}
@@ -36,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap);
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual);
@@ -45,7 +25,7 @@ fn check_dnf(input: &str, expect: Expect) {
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap);
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@@ -56,7 +36,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap);
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs
index e7fbb91889..271efe1a92 100644
--- a/crates/mbe/src/benchmark.rs
+++ b/crates/mbe/src/benchmark.rs
@@ -6,35 +6,13 @@ use syntax::{
AstNode, SmolStr,
};
use test_utils::{bench, bench_fixture, skip_slow_tests};
-use tt::{Span, SpanAnchor, SyntaxContext};
+use tt::Span;
use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator},
- syntax_node_to_token_tree, DeclarativeMacro, SpanMapper,
+ syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap,
};
-type SpanData = tt::SpanData<DummyFile, DummyCtx>;
-
-#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
-struct DummyFile;
-impl SpanAnchor for DummyFile {
- const DUMMY: Self = DummyFile;
-}
-
-#[derive(PartialEq, Eq, Clone, Copy, Debug)]
-struct DummyCtx;
-impl SyntaxContext for DummyCtx {
- const DUMMY: Self = DummyCtx;
-}
-
-struct NoOpMap;
-
-impl SpanMapper<SpanData> for NoOpMap {
- fn span_for(&self, range: syntax::TextRange) -> SpanData {
- SpanData { range, anchor: DummyFile, ctx: DummyCtx }
- }
-}
-
#[test]
fn benchmark_parse_macro_rules() {
if skip_slow_tests() {
@@ -70,14 +48,14 @@ fn benchmark_expand_macro_rules() {
assert_eq!(hash, 69413);
}
-fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<SpanData>> {
+fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
macro_rules_fixtures_tt()
.into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect()
}
-fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
+fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
@@ -87,7 +65,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
.filter_map(ast::MacroRules::cast)
.map(|rule| {
let id = rule.name().unwrap().to_string();
- let def_tt = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), NoOpMap);
+ let def_tt =
+ syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap);
(id, def_tt)
})
.collect()
@@ -95,8 +74,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
/// Generate random invocation fixtures from rules
fn invocation_fixtures(
- rules: &FxHashMap<String, DeclarativeMacro<SpanData>>,
-) -> Vec<(String, tt::Subtree<SpanData>)> {
+ rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
+) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
let mut seed = 123456789;
let mut res = Vec::new();
@@ -118,8 +97,8 @@ fn invocation_fixtures(
loop {
let mut subtree = tt::Subtree {
delimiter: tt::Delimiter {
- open: SpanData::DUMMY,
- close: SpanData::DUMMY,
+ open: DummyTestSpanData::DUMMY,
+ close: DummyTestSpanData::DUMMY,
kind: tt::DelimiterKind::Invisible,
},
token_trees: vec![],
@@ -141,7 +120,11 @@ fn invocation_fixtures(
}
return res;
- fn collect_from_op(op: &Op<SpanData>, parent: &mut tt::Subtree<SpanData>, seed: &mut usize) {
+ fn collect_from_op(
+ op: &Op<DummyTestSpanData>,
+ parent: &mut tt::Subtree<DummyTestSpanData>,
+ seed: &mut usize,
+ ) {
return match op {
Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
@@ -227,22 +210,35 @@ fn invocation_fixtures(
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed
}
- fn make_ident(ident: &str) -> tt::TokenTree<SpanData> {
- tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into()
- }
- fn make_punct(char: char) -> tt::TokenTree<SpanData> {
- tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone })
+ fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Ident(tt::Ident { span: DummyTestSpanData::DUMMY, text: SmolStr::new(ident) })
.into()
}
- fn make_literal(lit: &str) -> tt::TokenTree<SpanData> {
- tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into()
+ fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Punct(tt::Punct {
+ span: DummyTestSpanData::DUMMY,
+ char,
+ spacing: tt::Spacing::Alone,
+ })
+ .into()
+ }
+ fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Literal(tt::Literal {
+ span: DummyTestSpanData::DUMMY,
+ text: SmolStr::new(lit),
+ })
+ .into()
}
fn make_subtree(
kind: tt::DelimiterKind,
- token_trees: Option<Vec<tt::TokenTree<SpanData>>>,
- ) -> tt::TokenTree<SpanData> {
+ token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
+ ) -> tt::TokenTree<DummyTestSpanData> {
tt::Subtree {
- delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind },
+ delimiter: tt::Delimiter {
+ open: DummyTestSpanData::DUMMY,
+ close: DummyTestSpanData::DUMMY,
+ kind,
+ },
token_trees: token_trees.unwrap_or_default(),
}
.into()
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 0b8461200e..1b13b39f01 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -40,6 +40,8 @@ pub use crate::{
token_map::TokenMap,
};
+pub use crate::syntax_bridge::dummy_test_span_utils::*;
+
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum ParseError {
UnexpectedToken(Box<str>),
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 36c63b365d..688ccb2325 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -33,6 +33,34 @@ impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
}
}
+pub(crate) mod dummy_test_span_utils {
+ use super::*;
+
+ pub type DummyTestSpanData = tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>;
+
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub struct DummyTestSpanAnchor;
+ impl tt::SpanAnchor for DummyTestSpanAnchor {
+ const DUMMY: Self = DummyTestSpanAnchor;
+ }
+ #[derive(Debug, Copy, Clone, PartialEq, Eq)]
+ pub struct DummyTestSyntaxContext;
+ impl SyntaxContext for DummyTestSyntaxContext {
+ const DUMMY: Self = DummyTestSyntaxContext;
+ }
+
+ pub struct DummyTestSpanMap;
+
+ impl SpanMapper<tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>> for DummyTestSpanMap {
+ fn span_for(
+ &self,
+ range: syntax::TextRange,
+ ) -> tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext> {
+ tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext }
+ }
+ }
+}
+
/// Convert the syntax node to a `TokenTree` (what macro
/// will consume).
/// TODO: Flesh out the doc comment more thoroughly
diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs
index 2e21977f68..bd8187a148 100644
--- a/crates/mbe/src/syntax_bridge/tests.rs
+++ b/crates/mbe/src/syntax_bridge/tests.rs
@@ -4,38 +4,14 @@ use syntax::{ast, AstNode};
use test_utils::extract_annotations;
use tt::{
buffer::{TokenBuffer, TokenTreeRef},
- Leaf, Punct, Spacing, SpanAnchor, SyntaxContext,
+ Leaf, Punct, Spacing,
};
-use crate::SpanMapper;
-
-use super::syntax_node_to_token_tree;
+use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap};
fn check_punct_spacing(fixture: &str) {
- type SpanData = tt::SpanData<DummyFile, DummyCtx>;
-
- #[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
- struct DummyFile;
- impl SpanAnchor for DummyFile {
- const DUMMY: Self = DummyFile;
- }
-
- #[derive(PartialEq, Eq, Clone, Copy, Debug)]
- struct DummyCtx;
- impl SyntaxContext for DummyCtx {
- const DUMMY: Self = DummyCtx;
- }
-
- struct NoOpMap;
-
- impl SpanMapper<SpanData> for NoOpMap {
- fn span_for(&self, range: syntax::TextRange) -> SpanData {
- SpanData { range, anchor: DummyFile, ctx: DummyCtx }
- }
- }
-
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
- let subtree = syntax_node_to_token_tree(source_file.syntax(), NoOpMap);
+ let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap);
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
.into_iter()
.map(|(range, annotation)| {
@@ -53,7 +29,7 @@ fn check_punct_spacing(fixture: &str) {
while !cursor.eof() {
while let Some(token_tree) = cursor.token_tree() {
if let TokenTreeRef::Leaf(
- Leaf::Punct(Punct { spacing, span: SpanData { range, .. }, .. }),
+ Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }),
_,
) = token_tree
{
diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs
index bece195187..ea65c33604 100644
--- a/crates/proc-macro-srv-cli/src/main.rs
+++ b/crates/proc-macro-srv-cli/src/main.rs
@@ -18,11 +18,13 @@ fn main() -> std::io::Result<()> {
run()
}
+#[cfg(not(FALSE))]
#[cfg(not(feature = "sysroot-abi"))]
fn run() -> io::Result<()> {
panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled");
}
+#[cfg(FALSE)]
#[cfg(feature = "sysroot-abi")]
fn run() -> io::Result<()> {
use proc_macro_api::msg::{self, Message};
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index 00042480e3..bd0d1b79fa 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -10,6 +10,7 @@
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
+#![cfg(FALSE)] // TODO
#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs
index d679bfcb01..728bade0d0 100644
--- a/crates/rust-analyzer/src/cargo_target_spec.rs
+++ b/crates/rust-analyzer/src/cargo_target_spec.rs
@@ -209,26 +209,17 @@ mod tests {
use super::*;
use cfg::CfgExpr;
- use hir_def::tt::{self, Span};
- use mbe::{syntax_node_to_token_tree, SpanMapper};
+ use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{
ast::{self, AstNode},
SmolStr,
};
- struct NoOpMap;
-
- impl SpanMapper<tt::SpanData> for NoOpMap {
- fn span_for(&self, _: syntax::TextRange) -> tt::SpanData {
- tt::SpanData::DUMMY
- }
- }
-
fn check(cfg: &str, expected_features: &[&str]) {
let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- let tt = syntax_node_to_token_tree(tt.syntax(), &NoOpMap);
+ let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap);
CfgExpr::parse(&tt)
};
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index 1374ae1a6f..7977d97797 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -7,31 +7,9 @@
use std::fmt;
use stdx::impl_from;
-use text_size::{TextRange, TextSize};
pub use smol_str::SmolStr;
-
-/// Represents identity of the token.
-///
-/// For hygiene purposes, we need to track which expanded tokens originated from
-/// which source tokens. We do it by assigning an distinct identity to each
-/// source token and making sure that identities are preserved during macro
-/// expansion.
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
-pub struct TokenId(pub u32);
-
-impl fmt::Debug for TokenId {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.0.fmt(f)
- }
-}
-
-impl TokenId {
- pub const UNSPECIFIED: TokenId = TokenId(!0);
- pub const fn unspecified() -> TokenId {
- Self::UNSPECIFIED
- }
-}
+pub use text_size::{TextRange, TextSize};
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct SpanData<Anchor, Ctx> {