Unnamed repository; edit this file 'description' to name the repository.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
//! Completion for (built-in) attributes, derives and lints.
//!
//! This module uses a bit of static metadata to provide completions for builtin-in attributes and lints.

use std::sync::LazyLock;

use ide_db::{
    FxHashMap, SymbolKind,
    generated::lints::{
        CLIPPY_LINT_GROUPS, CLIPPY_LINTS, DEFAULT_LINTS, FEATURES, Lint, RUSTDOC_LINTS,
    },
    syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
};
use itertools::Itertools;
use syntax::{
    AstNode, Edition, SyntaxKind, T,
    ast::{self, AttrKind},
};

use crate::{
    Completions,
    context::{AttrCtx, CompletionContext, PathCompletionCtx, Qualified},
    item::CompletionItem,
};

mod cfg;
mod derive;
mod lint;
mod macro_use;
mod repr;

pub(crate) use self::derive::complete_derive_path;

/// Complete inputs to known builtin attributes as well as derive attributes
pub(crate) fn complete_known_attribute_input(
    acc: &mut Completions,
    ctx: &CompletionContext<'_>,
    &colon_prefix: &bool,
    fake_attribute_under_caret: &ast::Attr,
    extern_crate: Option<&ast::ExternCrate>,
) -> Option<()> {
    let attribute = fake_attribute_under_caret;
    let name_ref = match attribute.path() {
        Some(p) => Some(p.as_single_name_ref()?),
        None => None,
    };
    let (path, tt) = name_ref.zip(attribute.token_tree())?;
    tt.l_paren_token()?;

    match path.text().as_str() {
        "repr" => repr::complete_repr(acc, ctx, tt),
        "feature" => lint::complete_lint(
            acc,
            ctx,
            colon_prefix,
            &parse_tt_as_comma_sep_paths(tt, ctx.edition)?,
            FEATURES,
        ),
        "allow" | "expect" | "deny" | "forbid" | "warn" => {
            let existing_lints = parse_tt_as_comma_sep_paths(tt, ctx.edition)?;

            let lints: Vec<Lint> = CLIPPY_LINT_GROUPS
                .iter()
                .map(|g| &g.lint)
                .chain(DEFAULT_LINTS)
                .chain(CLIPPY_LINTS)
                .chain(RUSTDOC_LINTS)
                .cloned()
                .collect();

            lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints);
        }
        "cfg" => cfg::complete_cfg(acc, ctx),
        "macro_use" => macro_use::complete_macro_use(
            acc,
            ctx,
            extern_crate,
            &parse_tt_as_comma_sep_paths(tt, ctx.edition)?,
        ),
        _ => (),
    }
    Some(())
}

pub(crate) fn complete_attribute_path(
    acc: &mut Completions,
    ctx: &CompletionContext<'_>,
    path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
    &AttrCtx { kind, annotated_item_kind, ref derive_helpers }: &AttrCtx,
) {
    let is_inner = kind == AttrKind::Inner;

    for (derive_helper, derive_name) in derive_helpers {
        let mut item = CompletionItem::new(
            SymbolKind::Attribute,
            ctx.source_range(),
            derive_helper.as_str(),
            ctx.edition,
        );
        item.detail(format!("derive helper of `{derive_name}`"));
        item.add_to(acc, ctx.db);
    }

    match qualified {
        Qualified::With {
            resolution: Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))),
            super_chain_len,
            ..
        } => {
            acc.add_super_keyword(ctx, *super_chain_len);

            for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
                match def {
                    hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_attr(ctx.db) => {
                        acc.add_macro(ctx, path_ctx, m, name)
                    }
                    hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
                        acc.add_module(ctx, path_ctx, m, name, vec![])
                    }
                    _ => (),
                }
            }
            return;
        }
        // fresh use tree with leading colon2, only show crate roots
        Qualified::Absolute => acc.add_crate_roots(ctx, path_ctx),
        // only show modules in a fresh UseTree
        Qualified::No => {
            ctx.process_all_names(&mut |name, def, doc_aliases| match def {
                hir::ScopeDef::ModuleDef(hir::ModuleDef::Macro(m)) if m.is_attr(ctx.db) => {
                    acc.add_macro(ctx, path_ctx, m, name)
                }
                hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) => {
                    acc.add_module(ctx, path_ctx, m, name, doc_aliases)
                }
                _ => (),
            });
            acc.add_nameref_keywords_with_colon(ctx);
        }
        Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
    }

    let attributes = annotated_item_kind.and_then(|kind| {
        if ast::Expr::can_cast(kind) {
            Some(EXPR_ATTRIBUTES)
        } else {
            KIND_TO_ATTRIBUTES.get(&kind).copied()
        }
    });

    let add_completion = |attr_completion: &AttrCompletion| {
        let mut item = CompletionItem::new(
            SymbolKind::Attribute,
            ctx.source_range(),
            attr_completion.label,
            ctx.edition,
        );

        if let Some(lookup) = attr_completion.lookup {
            item.lookup_by(lookup);
        }

        if let Some((snippet, cap)) = attr_completion.snippet.zip(ctx.config.snippet_cap) {
            item.insert_snippet(cap, snippet);
        }

        if is_inner || !attr_completion.prefer_inner {
            item.add_to(acc, ctx.db);
        }
    };

    match attributes {
        Some(applicable) => applicable
            .iter()
            .flat_map(|name| ATTRIBUTES.binary_search_by(|attr| attr.key().cmp(name)).ok())
            .flat_map(|idx| ATTRIBUTES.get(idx))
            .for_each(add_completion),
        None if is_inner => ATTRIBUTES.iter().for_each(add_completion),
        None => ATTRIBUTES.iter().filter(|compl| !compl.prefer_inner).for_each(add_completion),
    }
}

struct AttrCompletion {
    label: &'static str,
    lookup: Option<&'static str>,
    snippet: Option<&'static str>,
    prefer_inner: bool,
}

impl AttrCompletion {
    fn key(&self) -> &'static str {
        self.lookup.unwrap_or(self.label)
    }

    const fn prefer_inner(self) -> AttrCompletion {
        AttrCompletion { prefer_inner: true, ..self }
    }
}

const fn attr(
    label: &'static str,
    lookup: Option<&'static str>,
    snippet: Option<&'static str>,
) -> AttrCompletion {
    AttrCompletion { label, lookup, snippet, prefer_inner: false }
}

macro_rules! attrs {
    // attributes applicable to all items
    [@ { item $($tt:tt)* } {$($acc:tt)*}] => {
        attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "must_use", "no_mangle" })
    };
    // attributes applicable to all adts
    [@ { adt $($tt:tt)* } {$($acc:tt)*}] => {
        attrs!(@ { $($tt)* } { $($acc)*, "derive", "repr" })
    };
    // attributes applicable to all linkable things aka functions/statics
    [@ { linkable $($tt:tt)* } {$($acc:tt)*}] => {
        attrs!(@ { $($tt)* } { $($acc)*, "export_name", "link_name", "link_section" })
    };
    // error fallback for nicer error message
    [@ { $ty:ident $($tt:tt)* } {$($acc:tt)*}] => {
        compile_error!(concat!("unknown attr subtype ", stringify!($ty)))
    };
    // general push down accumulation
    [@ { $lit:literal $($tt:tt)*} {$($acc:tt)*}] => {
        attrs!(@ { $($tt)* } { $($acc)*, $lit })
    };
    [@ {$($tt:tt)+} {$($tt2:tt)*}] => {
        compile_error!(concat!("Unexpected input ", stringify!($($tt)+)))
    };
    // final output construction
    [@ {} {$($tt:tt)*}] => { &[$($tt)*] as _ };
    // starting matcher
    [$($tt:tt),*] => {
        attrs!(@ { $($tt)* } { "allow", "cfg", "cfg_attr", "deny", "expect", "forbid", "warn" })
    };
}

#[rustfmt::skip]
static KIND_TO_ATTRIBUTES: LazyLock<FxHashMap<SyntaxKind, &[&str]>> = LazyLock::new(|| {
    use SyntaxKind::*;
    [
        (
            SOURCE_FILE,
            attrs!(
                item,
                "crate_name", "feature", "no_implicit_prelude", "no_main", "no_std",
                "recursion_limit", "type_length_limit", "windows_subsystem"
            ),
        ),
        (MODULE, attrs!(item, "macro_use", "no_implicit_prelude", "path")),
        (ITEM_LIST, attrs!(item, "no_implicit_prelude")),
        (MACRO_RULES, attrs!(item, "macro_export", "macro_use")),
        (MACRO_DEF, attrs!(item)),
        (EXTERN_CRATE, attrs!(item, "macro_use", "no_link")),
        (USE, attrs!(item)),
        (TYPE_ALIAS, attrs!(item)),
        (STRUCT, attrs!(item, adt, "non_exhaustive")),
        (ENUM, attrs!(item, adt, "non_exhaustive")),
        (UNION, attrs!(item, adt)),
        (CONST, attrs!(item)),
        (
            FN,
            attrs!(
                item, linkable,
                "cold", "ignore", "inline", "must_use", "panic_handler", "proc_macro",
                "proc_macro_derive", "proc_macro_attribute", "should_panic", "target_feature",
                "test", "track_caller"
            ),
        ),
        (STATIC, attrs!(item, linkable, "global_allocator", "used")),
        (TRAIT, attrs!(item, "must_use")),
        (IMPL, attrs!(item, "automatically_derived")),
        (ASSOC_ITEM_LIST, attrs!(item)),
        (EXTERN_BLOCK, attrs!(item, "link")),
        (EXTERN_ITEM_LIST, attrs!(item, "link")),
        (MACRO_CALL, attrs!()),
        (SELF_PARAM, attrs!()),
        (PARAM, attrs!()),
        (RECORD_FIELD, attrs!()),
        (VARIANT, attrs!("non_exhaustive")),
        (TYPE_PARAM, attrs!()),
        (CONST_PARAM, attrs!()),
        (LIFETIME_PARAM, attrs!()),
        (LET_STMT, attrs!()),
        (EXPR_STMT, attrs!()),
        (LITERAL, attrs!()),
        (RECORD_EXPR_FIELD_LIST, attrs!()),
        (RECORD_EXPR_FIELD, attrs!()),
        (MATCH_ARM_LIST, attrs!()),
        (MATCH_ARM, attrs!()),
        (IDENT_PAT, attrs!()),
        (RECORD_PAT_FIELD, attrs!()),
    ]
    .into_iter()
    .collect()
});
const EXPR_ATTRIBUTES: &[&str] = attrs!();

/// <https://doc.rust-lang.org/reference/attributes.html#built-in-attributes-index>
// Keep these sorted for the binary search!
const ATTRIBUTES: &[AttrCompletion] = &[
    attr("allow(…)", Some("allow"), Some("allow(${0:lint})")),
    attr("automatically_derived", None, None),
    attr("cfg(…)", Some("cfg"), Some("cfg(${0:predicate})")),
    attr("cfg_attr(…)", Some("cfg_attr"), Some("cfg_attr(${1:predicate}, ${0:attr})")),
    attr("cold", None, None),
    attr(r#"crate_name = """#, Some("crate_name"), Some(r#"crate_name = "${0:crate_name}""#))
        .prefer_inner(),
    attr("deny(…)", Some("deny"), Some("deny(${0:lint})")),
    attr(r#"deprecated"#, Some("deprecated"), Some(r#"deprecated"#)),
    attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)),
    attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)),
    attr(r#"doc(alias = "…")"#, Some("docalias"), Some(r#"doc(alias = "${0:docs}")"#)),
    attr(r#"doc(hidden)"#, Some("dochidden"), Some(r#"doc(hidden)"#)),
    attr("expect(…)", Some("expect"), Some("expect(${0:lint})")),
    attr(
        r#"export_name = "…""#,
        Some("export_name"),
        Some(r#"export_name = "${0:exported_symbol_name}""#),
    ),
    attr("feature(…)", Some("feature"), Some("feature(${0:flag})")).prefer_inner(),
    attr("forbid(…)", Some("forbid"), Some("forbid(${0:lint})")),
    attr("global_allocator", None, None),
    attr(r#"ignore = "…""#, Some("ignore"), Some(r#"ignore = "${0:reason}""#)),
    attr("inline", Some("inline"), Some("inline")),
    attr("link", None, None),
    attr(r#"link_name = "…""#, Some("link_name"), Some(r#"link_name = "${0:symbol_name}""#)),
    attr(
        r#"link_section = "…""#,
        Some("link_section"),
        Some(r#"link_section = "${0:section_name}""#),
    ),
    attr("macro_export", None, None),
    attr("macro_use", None, None),
    attr(r#"must_use"#, Some("must_use"), Some(r#"must_use"#)),
    attr("no_implicit_prelude", None, None).prefer_inner(),
    attr("no_link", None, None).prefer_inner(),
    attr("no_main", None, None).prefer_inner(),
    attr("no_mangle", None, None),
    attr("no_std", None, None).prefer_inner(),
    attr("non_exhaustive", None, None),
    attr("panic_handler", None, None),
    attr(r#"path = "…""#, Some("path"), Some(r#"path ="${0:path}""#)),
    attr("proc_macro", None, None),
    attr("proc_macro_attribute", None, None),
    attr("proc_macro_derive(…)", Some("proc_macro_derive"), Some("proc_macro_derive(${0:Trait})")),
    attr(
        r#"recursion_limit = "…""#,
        Some("recursion_limit"),
        Some(r#"recursion_limit = "${0:128}""#),
    )
    .prefer_inner(),
    attr("repr(…)", Some("repr"), Some("repr(${0:C})")),
    attr("should_panic", Some("should_panic"), Some(r#"should_panic"#)),
    attr(
        r#"target_feature(enable = "…")"#,
        Some("target_feature"),
        Some(r#"target_feature(enable = "${0:feature}")"#),
    ),
    attr("test", None, None),
    attr("track_caller", None, None),
    attr("type_length_limit = …", Some("type_length_limit"), Some("type_length_limit = ${0:128}"))
        .prefer_inner(),
    attr("used", None, None),
    attr("warn(…)", Some("warn"), Some("warn(${0:lint})")),
    attr(
        r#"windows_subsystem = "…""#,
        Some("windows_subsystem"),
        Some(r#"windows_subsystem = "${0:subsystem}""#),
    )
    .prefer_inner(),
];

fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
    let r_paren = input.r_paren_token()?;
    let tokens = input
        .syntax()
        .children_with_tokens()
        .skip(1)
        .take_while(|it| it.as_token() != Some(&r_paren));
    let input_expressions = tokens.chunk_by(|tok| tok.kind() == T![,]);
    Some(
        input_expressions
            .into_iter()
            .filter_map(|(is_sep, group)| (!is_sep).then_some(group))
            .filter_map(|mut tokens| {
                syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT)
            })
            .collect::<Vec<ast::Expr>>(),
    )
}

#[test]
fn attributes_are_sorted() {
    let mut attrs = ATTRIBUTES.iter().map(|attr| attr.key());
    let mut prev = attrs.next().unwrap();

    attrs.for_each(|next| {
        assert!(
            prev < next,
            r#"ATTRIBUTES array is not sorted, "{prev}" should come after "{next}""#
        );
        prev = next;
    });
}
>941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011
//! An NFA-based parser, which is porting from rustc mbe parsing code
//!
//! See <https://github.com/rust-lang/rust/blob/70b18bc2cbac4712020019f5bf57c00905373205/compiler/rustc_expand/src/mbe/macro_parser.rs>
//! Here is a quick intro to how the parser works, copied from rustc:
//!
//! A 'position' is a dot in the middle of a matcher, usually represented as a
//! dot. For example `· a $( a )* a b` is a position, as is `a $( · a )* a b`.
//!
//! The parser walks through the input a character at a time, maintaining a list
//! of threads consistent with the current position in the input string: `cur_items`.
//!
//! As it processes them, it fills up `eof_items` with threads that would be valid if
//! the macro invocation is now over, `bb_items` with threads that are waiting on
//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
//! on a particular token. Most of the logic concerns moving the · through the
//! repetitions indicated by Kleene stars. The rules for moving the · without
//! consuming any input are called epsilon transitions. It only advances or calls
//! out to the real Rust parser when no `cur_items` threads remain.
//!
//! Example:
//!
//! ```text, ignore
//! Start parsing a a a a b against [· a $( a )* a b].
//!
//! Remaining input: a a a a b
//! next: [· a $( a )* a b]
//!
//! - - - Advance over an a. - - -
//!
//! Remaining input: a a a b
//! cur: [a · $( a )* a b]
//! Descend/Skip (first item).
//! next: [a $( · a )* a b]  [a $( a )* · a b].
//!
//! - - - Advance over an a. - - -
//!
//! Remaining input: a a b
//! cur: [a $( a · )* a b]  [a $( a )* a · b]
//! Follow epsilon transition: Finish/Repeat (first item)
//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
//!
//! - - - Advance over an a. - - - (this looks exactly like the last step)
//!
//! Remaining input: a b
//! cur: [a $( a · )* a b]  [a $( a )* a · b]
//! Follow epsilon transition: Finish/Repeat (first item)
//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
//!
//! - - - Advance over an a. - - - (this looks exactly like the last step)
//!
//! Remaining input: b
//! cur: [a $( a · )* a b]  [a $( a )* a · b]
//! Follow epsilon transition: Finish/Repeat (first item)
//! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
//!
//! - - - Advance over a b. - - -
//!
//! Remaining input: ''
//! eof: [a $( a )* a b ·]
//! ```

use std::rc::Rc;

use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
use tt::{DelimSpan, Span};

use crate::{
    expander::{Binding, Bindings, ExpandResult, Fragment},
    parser::{MetaVarKind, Op, RepeatKind, Separator},
    tt_iter::TtIter,
    ExpandError, MetaTemplate, ValueResult,
};

impl<S: Span> Bindings<S> {
    fn push_optional(&mut self, name: &SmolStr) {
        self.inner.insert(name.clone(), Binding::Fragment(Fragment::Empty));
    }

    fn push_empty(&mut self, name: &SmolStr) {
        self.inner.insert(name.clone(), Binding::Empty);
    }

    fn bindings(&self) -> impl Iterator<Item = &Binding<S>> {
        self.inner.values()
    }
}

#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct Match<S> {
    pub(super) bindings: Bindings<S>,
    /// We currently just keep the first error and count the rest to compare matches.
    pub(super) err: Option<ExpandError>,
    pub(super) err_count: usize,
    /// How many top-level token trees were left to match.
    pub(super) unmatched_tts: usize,
    /// The number of bound variables
    pub(super) bound_count: usize,
}

impl<S> Default for Match<S> {
    fn default() -> Self {
        Self {
            bindings: Default::default(),
            err: Default::default(),
            err_count: Default::default(),
            unmatched_tts: Default::default(),
            bound_count: Default::default(),
        }
    }
}

impl<S> Match<S> {
    fn add_err(&mut self, err: ExpandError) {
        let prev_err = self.err.take();
        self.err = prev_err.or(Some(err));
        self.err_count += 1;
    }
}

/// Matching errors are added to the `Match`.
pub(super) fn match_<S: Span>(
    pattern: &MetaTemplate<S>,
    input: &tt::Subtree<S>,
    is_2021: bool,
) -> Match<S> {
    let mut res = match_loop(pattern, input, is_2021);
    res.bound_count = count(res.bindings.bindings());
    return res;

    fn count<'a, S: 'a>(bindings: impl Iterator<Item = &'a Binding<S>>) -> usize {
        bindings
            .map(|it| match it {
                Binding::Fragment(_) => 1,
                Binding::Empty => 1,
                Binding::Missing(_) => 1,
                Binding::Nested(it) => count(it.iter()),
            })
            .sum()
    }
}

#[derive(Debug, Clone)]
enum BindingKind<S> {
    Empty(SmolStr),
    Optional(SmolStr),
    Fragment(SmolStr, Fragment<S>),
    Missing(SmolStr, MetaVarKind),
    Nested(usize, usize),
}

#[derive(Debug, Clone)]
struct BindingsIdx(usize, usize);

#[derive(Debug, Clone)]
enum LinkNode<T> {
    Node(T),
    Parent { idx: usize, len: usize },
}

struct BindingsBuilder<S> {
    nodes: Vec<Vec<LinkNode<Rc<BindingKind<S>>>>>,
    nested: Vec<Vec<LinkNode<usize>>>,
}

impl<S> Default for BindingsBuilder<S> {
    fn default() -> Self {
        Self { nodes: Default::default(), nested: Default::default() }
    }
}

impl<S: Span> BindingsBuilder<S> {
    fn alloc(&mut self) -> BindingsIdx {
        let idx = self.nodes.len();
        self.nodes.push(Vec::new());
        let nidx = self.nested.len();
        self.nested.push(Vec::new());
        BindingsIdx(idx, nidx)
    }

    fn copy(&mut self, bindings: &BindingsIdx) -> BindingsIdx {
        let idx = copy_parent(bindings.0, &mut self.nodes);
        let nidx = copy_parent(bindings.1, &mut self.nested);
        return BindingsIdx(idx, nidx);

        fn copy_parent<T>(idx: usize, target: &mut Vec<Vec<LinkNode<T>>>) -> usize
        where
            T: Clone,
        {
            let new_idx = target.len();
            let len = target[idx].len();
            if len < 4 {
                target.push(target[idx].clone())
            } else {
                target.push(vec![LinkNode::Parent { idx, len }]);
            }
            new_idx
        }
    }

    fn push_empty(&mut self, idx: &mut BindingsIdx, var: &SmolStr) {
        self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Empty(var.clone()))));
    }

    fn push_optional(&mut self, idx: &mut BindingsIdx, var: &SmolStr) {
        self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
    }

    fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment<S>) {
        self.nodes[idx.0]
            .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
    }

    fn push_missing(&mut self, idx: &mut BindingsIdx, var: &SmolStr, kind: MetaVarKind) {
        self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Missing(var.clone(), kind))));
    }

    fn push_nested(&mut self, parent: &mut BindingsIdx, child: &BindingsIdx) {
        let BindingsIdx(idx, nidx) = self.copy(child);
        self.nodes[parent.0].push(LinkNode::Node(Rc::new(BindingKind::Nested(idx, nidx))));
    }

    fn push_default(&mut self, idx: &mut BindingsIdx) {
        self.nested[idx.1].push(LinkNode::Node(idx.0));
        let new_idx = self.nodes.len();
        self.nodes.push(Vec::new());
        idx.0 = new_idx;
    }

    fn build(self, idx: &BindingsIdx) -> Bindings<S> {
        self.build_inner(&self.nodes[idx.0])
    }

    fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind<S>>>]) -> Bindings<S> {
        let mut bindings = Bindings::default();
        let mut nodes = Vec::new();
        self.collect_nodes(link_nodes, &mut nodes);

        for cmd in nodes {
            match cmd {
                BindingKind::Empty(name) => {
                    bindings.push_empty(name);
                }
                BindingKind::Optional(name) => {
                    bindings.push_optional(name);
                }
                BindingKind::Fragment(name, fragment) => {
                    bindings.inner.insert(name.clone(), Binding::Fragment(fragment.clone()));
                }
                BindingKind::Missing(name, kind) => {
                    bindings.inner.insert(name.clone(), Binding::Missing(*kind));
                }
                BindingKind::Nested(idx, nested_idx) => {
                    let mut nested_nodes = Vec::new();
                    self.collect_nested(*idx, *nested_idx, &mut nested_nodes);

                    for (idx, iter) in nested_nodes.into_iter().enumerate() {
                        for (key, value) in &iter.inner {
                            let bindings = bindings
                                .inner
                                .entry(key.clone())
                                .or_insert_with(|| Binding::Nested(Vec::new()));

                            if let Binding::Nested(it) = bindings {
                                // insert empty nested bindings before this one
                                while it.len() < idx {
                                    it.push(Binding::Nested(Vec::new()));
                                }
                                it.push(value.clone());
                            }
                        }
                    }
                }
            }
        }

        bindings
    }

    fn collect_nested_ref<'a>(
        &'a self,
        id: usize,
        len: usize,
        nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind<S>>>]>,
    ) {
        self.nested[id].iter().take(len).for_each(|it| match it {
            LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
            LinkNode::Parent { idx, len } => self.collect_nested_ref(*idx, *len, nested_refs),
        });
    }

    fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings<S>>) {
        let last = &self.nodes[idx];
        let mut nested_refs: Vec<&[_]> = Vec::new();
        self.nested[nested_idx].iter().for_each(|it| match *it {
            LinkNode::Node(idx) => nested_refs.push(&self.nodes[idx]),
            LinkNode::Parent { idx, len } => self.collect_nested_ref(idx, len, &mut nested_refs),
        });
        nested_refs.push(last);
        nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter)));
    }

    fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind<S>>) {
        self.nodes[id].iter().take(len).for_each(|it| match it {
            LinkNode::Node(it) => nodes.push(it),
            LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
        });
    }

    fn collect_nodes<'a>(
        &'a self,
        link_nodes: &'a [LinkNode<Rc<BindingKind<S>>>],
        nodes: &mut Vec<&'a BindingKind<S>>,
    ) {
        link_nodes.iter().for_each(|it| match it {
            LinkNode::Node(it) => nodes.push(it),
            LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
        });
    }
}

#[derive(Debug, Clone)]
struct MatchState<'t, S> {
    /// The position of the "dot" in this matcher
    dot: OpDelimitedIter<'t, S>,

    /// Token subtree stack
    /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
    /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
    /// that where the bottom of the stack is the outermost matcher.
    stack: SmallVec<[OpDelimitedIter<'t, S>; 4]>,

    /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
    /// before we enter the repetition.
    up: Option<Box<MatchState<'t, S>>>,

    /// The separator if we are in a repetition.
    sep: Option<Separator<S>>,

    /// The KleeneOp of this sequence if we are in a repetition.
    sep_kind: Option<RepeatKind>,

    /// Whether we already matched separator token.
    sep_matched: bool,

    /// Matched meta variables bindings
    bindings: BindingsIdx,

    /// Cached result of meta variable parsing
    meta_result: Option<(TtIter<'t, S>, ExpandResult<Option<Fragment<S>>>)>,

    /// Is error occurred in this state, will `poised` to "parent"
    is_error: bool,
}

/// Process the matcher positions of `cur_items` until it is empty. In the process, this will
/// produce more items in `next_items`, `eof_items`, and `bb_items`.
///
/// For more info about the how this happens, see the module-level doc comments and the inline
/// comments of this function.
///
/// # Parameters
///
/// - `src`: the current token of the parser.
/// - `stack`: the "parent" frames of the token tree
/// - `res`: the match result to store errors
/// - `cur_items`: the set of current items to be processed. This should be empty by the end of a
///   successful execution of this function.
/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
///   the function `parse`.
/// - `eof_items`: the set of items that would be valid if this was the EOF.
/// - `bb_items`: the set of items that are waiting for the black-box parser.
/// - `error_items`: the set of items in errors, used for error-resilient parsing
#[inline]
fn match_loop_inner<'t, S: Span>(
    src: TtIter<'t, S>,
    stack: &[TtIter<'t, S>],
    res: &mut Match<S>,
    bindings_builder: &mut BindingsBuilder<S>,
    cur_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
    bb_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
    next_items: &mut Vec<MatchState<'t, S>>,
    eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
    error_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
    is_2021: bool,
    delim_span: tt::DelimSpan<S>,
) {
    macro_rules! try_push {
        ($items: expr, $it:expr) => {
            if $it.is_error {
                error_items.push($it);
            } else {
                $items.push($it);
            }
        };
    }

    while let Some(mut item) = cur_items.pop() {
        while item.dot.is_eof() {
            match item.stack.pop() {
                Some(frame) => {
                    item.dot = frame;
                    item.dot.next();
                }
                None => break,
            }
        }
        let op = match item.dot.peek() {
            None => {
                // We are at or past the end of the matcher of `item`.
                if let Some(up) = &item.up {
                    if !item.sep_matched {
                        // Get the `up` matcher
                        let mut new_pos = (**up).clone();
                        new_pos.bindings = bindings_builder.copy(&new_pos.bindings);
                        // Add matches from this repetition to the `matches` of `up`
                        bindings_builder.push_nested(&mut new_pos.bindings, &item.bindings);

                        // Move the "dot" past the repetition in `up`
                        new_pos.dot.next();
                        new_pos.is_error = new_pos.is_error || item.is_error;
                        cur_items.push(new_pos);
                    }

                    // Check if we need a separator.
                    if item.sep.is_some() && !item.sep_matched {
                        let sep = item.sep.as_ref().unwrap();
                        let mut fork = src.clone();
                        if fork.expect_separator(sep) {
                            // HACK: here we use `meta_result` to pass `TtIter` back to caller because
                            // it might have been advanced multiple times. `ValueResult` is
                            // insignificant.
                            item.meta_result = Some((fork, ValueResult::ok(None)));
                            item.dot.next();
                            // item.sep_parsed = Some(sep_len);
                            item.sep_matched = true;
                            try_push!(next_items, item);
                        }
                    }
                    // We don't need a separator. Move the "dot" back to the beginning of the matcher
                    // and try to match again UNLESS we are only allowed to have _one_ repetition.
                    else if item.sep_kind != Some(RepeatKind::ZeroOrOne) {
                        item.dot = item.dot.reset();
                        item.sep_matched = false;
                        bindings_builder.push_default(&mut item.bindings);
                        cur_items.push(item);
                    }
                } else {
                    // If we are not in a repetition, then being at the end of a matcher means that we have
                    // reached the potential end of the input.
                    try_push!(eof_items, item);
                }
                continue;
            }
            Some(it) => it,
        };

        // We are in the middle of a matcher.
        match op {
            OpDelimited::Op(Op::Repeat { tokens, kind, separator }) => {
                if matches!(kind, RepeatKind::ZeroOrMore | RepeatKind::ZeroOrOne) {
                    let mut new_item = item.clone();
                    new_item.bindings = bindings_builder.copy(&new_item.bindings);
                    new_item.dot.next();
                    collect_vars(
                        &mut |s| {
                            bindings_builder.push_empty(&mut new_item.bindings, &s);
                        },
                        tokens,
                    );
                    cur_items.push(new_item);
                }
                cur_items.push(MatchState {
                    dot: tokens.iter_delimited(delim_span),
                    stack: Default::default(),
                    up: Some(Box::new(item)),
                    sep: separator.clone(),
                    sep_kind: Some(*kind),
                    sep_matched: false,
                    bindings: bindings_builder.alloc(),
                    meta_result: None,
                    is_error: false,
                })
            }
            OpDelimited::Op(Op::Subtree { tokens, delimiter }) => {
                if let Ok(subtree) = src.clone().expect_subtree() {
                    if subtree.delimiter.kind == delimiter.kind {
                        item.stack.push(item.dot);
                        item.dot = tokens.iter_delimited_with(*delimiter);
                        cur_items.push(item);
                    }
                }
            }
            OpDelimited::Op(Op::Var { kind, name, .. }) => {
                if let &Some(kind) = kind {
                    let mut fork = src.clone();
                    let match_res = match_meta_var(kind, &mut fork, is_2021, delim_span);
                    match match_res.err {
                        None => {
                            // Some meta variables are optional (e.g. vis)
                            if match_res.value.is_some() {
                                item.meta_result = Some((fork, match_res));
                                try_push!(bb_items, item);
                            } else {
                                bindings_builder.push_optional(&mut item.bindings, name);
                                item.dot.next();
                                cur_items.push(item);
                            }
                        }
                        Some(err) => {
                            res.add_err(err);
                            match match_res.value {
                                Some(fragment) => bindings_builder.push_fragment(
                                    &mut item.bindings,
                                    name,
                                    fragment,
                                ),
                                None => {
                                    bindings_builder.push_missing(&mut item.bindings, name, kind)
                                }
                            }
                            item.is_error = true;
                            error_items.push(item);
                        }
                    }
                }
            }
            OpDelimited::Op(Op::Literal(lhs)) => {
                if let Ok(rhs) = src.clone().expect_leaf() {
                    if matches!(rhs, tt::Leaf::Literal(it) if it.text == lhs.text) {
                        item.dot.next();
                    } else {
                        res.add_err(ExpandError::UnexpectedToken);
                        item.is_error = true;
                    }
                } else {
                    res.add_err(ExpandError::binding_error(format!("expected literal: `{lhs}`")));
                    item.is_error = true;
                }
                try_push!(next_items, item);
            }
            OpDelimited::Op(Op::Ident(lhs)) => {
                if let Ok(rhs) = src.clone().expect_leaf() {
                    if matches!(rhs, tt::Leaf::Ident(it) if it.text == lhs.text) {
                        item.dot.next();
                    } else {
                        res.add_err(ExpandError::UnexpectedToken);
                        item.is_error = true;
                    }
                } else {
                    res.add_err(ExpandError::binding_error(format!("expected ident: `{lhs}`")));
                    item.is_error = true;
                }
                try_push!(next_items, item);
            }
            OpDelimited::Op(Op::Punct(lhs)) => {
                let mut fork = src.clone();
                let error = if let Ok(rhs) = fork.expect_glued_punct() {
                    let first_is_single_quote = rhs[0].char == '\'';
                    let lhs = lhs.iter().map(|it| it.char);
                    let rhs = rhs.iter().map(|it| it.char);
                    if lhs.clone().eq(rhs) {
                        // HACK: here we use `meta_result` to pass `TtIter` back to caller because
                        // it might have been advanced multiple times. `ValueResult` is
                        // insignificant.
                        item.meta_result = Some((fork, ValueResult::ok(None)));
                        item.dot.next();
                        next_items.push(item);
                        continue;
                    }

                    if first_is_single_quote {
                        // If the first punct token is a single quote, that's a part of a lifetime
                        // ident, not a punct.
                        ExpandError::UnexpectedToken
                    } else {
                        let lhs: SmolStr = lhs.collect();
                        ExpandError::binding_error(format!("expected punct: `{lhs}`"))
                    }
                } else {
                    ExpandError::UnexpectedToken
                };

                res.add_err(error);
                item.is_error = true;
                error_items.push(item);
            }
            OpDelimited::Op(
                Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. },
            ) => {
                stdx::never!("metavariable expression in lhs found");
            }
            OpDelimited::Open => {
                if matches!(src.peek_n(0), Some(tt::TokenTree::Subtree(..))) {
                    item.dot.next();
                    try_push!(next_items, item);
                }
            }
            OpDelimited::Close => {
                let is_delim_closed = src.peek_n(0).is_none() && !stack.is_empty();
                if is_delim_closed {
                    item.dot.next();
                    try_push!(next_items, item);
                }
            }
        }
    }
}

fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> {
    let span = src.delimiter.delim_span();
    let mut src = TtIter::new(src);
    let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new();
    let mut res = Match::default();
    let mut error_recover_item = None;

    let mut bindings_builder = BindingsBuilder::default();

    let mut cur_items = smallvec![MatchState {
        dot: pattern.iter_delimited(span),
        stack: Default::default(),
        up: None,
        sep: None,
        sep_kind: None,
        sep_matched: false,
        bindings: bindings_builder.alloc(),
        is_error: false,
        meta_result: None,
    }];

    let mut next_items = vec![];

    loop {
        let mut bb_items = SmallVec::new();
        let mut eof_items = SmallVec::new();
        let mut error_items = SmallVec::new();

        stdx::always!(next_items.is_empty());

        match_loop_inner(
            src.clone(),
            &stack,
            &mut res,
            &mut bindings_builder,
            &mut cur_items,
            &mut bb_items,
            &mut next_items,
            &mut eof_items,
            &mut error_items,
            is_2021,
            span,
        );
        stdx::always!(cur_items.is_empty());

        if !error_items.is_empty() {
            error_recover_item = error_items.pop().map(|it| it.bindings);
        } else if let [state, ..] = &*eof_items {
            error_recover_item = Some(state.bindings.clone());
        }

        // We need to do some post processing after the `match_loop_inner`.
        // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
        // either the parse is ambiguous (which should never happen) or there is a syntax error.
        if src.peek_n(0).is_none() && stack.is_empty() {
            if let [state] = &*eof_items {
                // remove all errors, because it is the correct answer !
                res = Match::default();
                res.bindings = bindings_builder.build(&state.bindings);
            } else {
                // Error recovery
                if let Some(item) = error_recover_item {
                    res.bindings = bindings_builder.build(&item);
                }
                res.add_err(ExpandError::UnexpectedToken);
            }
            return res;
        }

        // If there are no possible next positions AND we aren't waiting for the black-box parser,
        // then there is a syntax error.
        //
        // Another possibility is that we need to call out to parse some rust nonterminal
        // (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong.
        let has_leftover_tokens = (bb_items.is_empty() && next_items.is_empty())
            || !(bb_items.is_empty() || next_items.is_empty())
            || bb_items.len() > 1;
        if has_leftover_tokens {
            res.unmatched_tts += src.len();
            while let Some(it) = stack.pop() {
                src = it;
                res.unmatched_tts += src.len();
            }
            res.add_err(ExpandError::LeftoverTokens);

            if let Some(error_recover_item) = error_recover_item {
                res.bindings = bindings_builder.build(&error_recover_item);
            }
            return res;
        }
        // Dump all possible `next_items` into `cur_items` for the next iteration.
        else if !next_items.is_empty() {
            if let Some((iter, _)) = next_items[0].meta_result.take() {
                // We've matched a possibly "glued" punct. The matched punct (hence
                // `meta_result` also) must be the same for all items.
                // FIXME: If there are multiple items, it's definitely redundant (and it's hacky!
                // `meta_result` isn't supposed to be used this way).

                // We already bumped, so no need to call `.next()` like in the other branch.
                src = iter;
                for item in next_items.iter_mut() {
                    item.meta_result = None;
                }
            } else {
                match src.next() {
                    Some(tt::TokenTree::Subtree(subtree)) => {
                        stack.push(src.clone());
                        src = TtIter::new(subtree);
                    }
                    None => {
                        if let Some(iter) = stack.pop() {
                            src = iter;
                        }
                    }
                    _ => (),
                }
            }
            // Now process the next token
            cur_items.extend(next_items.drain(..));
        }
        // Finally, we have the case where we need to call the black-box parser to get some
        // nonterminal.
        else {
            stdx::always!(bb_items.len() == 1);
            let mut item = bb_items.pop().unwrap();

            if let Some(OpDelimited::Op(Op::Var { name, .. })) = item.dot.peek() {
                let (iter, match_res) = item.meta_result.take().unwrap();
                match match_res.value {
                    Some(fragment) => {
                        bindings_builder.push_fragment(&mut item.bindings, name, fragment);
                    }
                    None if match_res.err.is_none() => {
                        bindings_builder.push_optional(&mut item.bindings, name);
                    }
                    None => {}
                }
                if let Some(err) = match_res.err {
                    res.add_err(err);
                }
                src = iter.clone();
                item.dot.next();
            } else {
                unreachable!()
            }
            cur_items.push(item);
        }
        stdx::always!(!cur_items.is_empty());
    }
}

fn match_meta_var<S: Span>(
    kind: MetaVarKind,
    input: &mut TtIter<'_, S>,
    is_2021: bool,
    delim_span: DelimSpan<S>,
) -> ExpandResult<Option<Fragment<S>>> {
    let fragment = match kind {
        MetaVarKind::Path => {
            return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| {
                it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
            });
        }
        MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
        MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
        MetaVarKind::Pat => parser::PrefixEntryPoint::Pat,
        MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat,
        MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt,
        MetaVarKind::Block => parser::PrefixEntryPoint::Block,
        MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem,
        MetaVarKind::Item => parser::PrefixEntryPoint::Item,
        MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
        MetaVarKind::Expr => {
            // `expr` should not match underscores, let expressions, or inline const. The latter
            // two are for [backwards compatibility][0].
            // HACK: Macro expansion should not be done using "rollback and try another alternative".
            // rustc [explicitly checks the next token][1].
            // [0]: https://github.com/rust-lang/rust/issues/86730
            // [1]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576
            match input.peek_n(0) {
                Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it)))
                    if it.text == "_" || it.text == "let" || it.text == "const" =>
                {
                    return ExpandResult::only_err(ExpandError::NoMatchingRule)
                }
                _ => {}
            };
            return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| {
                tt.map(|tt| match tt {
                    tt::TokenTree::Leaf(leaf) => tt::Subtree {
                        delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
                        token_trees: Box::new([leaf.into()]),
                    },
                    tt::TokenTree::Subtree(mut s) => {
                        if s.delimiter.kind == tt::DelimiterKind::Invisible {
                            s.delimiter.kind = tt::DelimiterKind::Parenthesis;
                        }
                        s
                    }
                })
                .map(Fragment::Expr)
            });
        }
        MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
            let tt_result = match kind {
                MetaVarKind::Ident => input
                    .expect_ident()
                    .map(|ident| tt::Leaf::from(ident.clone()).into())
                    .map_err(|()| ExpandError::binding_error("expected ident")),
                MetaVarKind::Tt => input
                    .expect_tt()
                    .map_err(|()| ExpandError::binding_error("expected token tree")),
                MetaVarKind::Lifetime => input
                    .expect_lifetime()
                    .map_err(|()| ExpandError::binding_error("expected lifetime")),
                MetaVarKind::Literal => {
                    let neg = input.eat_char('-');
                    input
                        .expect_literal()
                        .map(|literal| {
                            let lit = literal.clone();
                            match neg {
                                None => lit.into(),
                                Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
                                    delimiter: tt::Delimiter::invisible_spanned(*literal.span()),
                                    token_trees: Box::new([neg, lit.into()]),
                                }),
                            }
                        })
                        .map_err(|()| ExpandError::binding_error("expected literal"))
                }
                _ => unreachable!(),
            };
            return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
        }
    };
    input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
}

fn collect_vars<S: Span>(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate<S>) {
    for op in pattern.iter() {
        match op {
            Op::Var { name, .. } => collector_fun(name.clone()),
            Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens),
            Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens),
            Op::Literal(_) | Op::Ident(_) | Op::Punct(_) => {}
            Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. } => {
                stdx::never!("metavariable expression in lhs found");
            }
        }
    }
}
impl<S: Span> MetaTemplate<S> {
    fn iter_delimited_with(&self, delimiter: tt::Delimiter<S>) -> OpDelimitedIter<'_, S> {
        OpDelimitedIter { inner: &self.0, idx: 0, delimited: delimiter }
    }
    fn iter_delimited(&self, span: tt::DelimSpan<S>) -> OpDelimitedIter<'_, S> {
        OpDelimitedIter {
            inner: &self.0,
            idx: 0,
            delimited: tt::Delimiter::invisible_delim_spanned(span),
        }
    }
}

#[derive(Debug, Clone, Copy)]
enum OpDelimited<'a, S> {
    Op(&'a Op<S>),
    Open,
    Close,
}

#[derive(Debug, Clone, Copy)]
struct OpDelimitedIter<'a, S> {
    inner: &'a [Op<S>],
    delimited: tt::Delimiter<S>,
    idx: usize,
}

impl<'a, S: Span> OpDelimitedIter<'a, S> {
    fn is_eof(&self) -> bool {
        let len = self.inner.len()
            + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 };
        self.idx >= len
    }

    fn peek(&self) -> Option<OpDelimited<'a, S>> {
        match self.delimited.kind {
            tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op),
            _ => match self.idx {
                0 => Some(OpDelimited::Open),
                i if i == self.inner.len() + 1 => Some(OpDelimited::Close),
                i => self.inner.get(i - 1).map(OpDelimited::Op),
            },
        }
    }

    fn reset(&self) -> Self {
        Self { inner: self.inner, idx: 0, delimited: self.delimited }
    }
}

impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> {
    type Item = OpDelimited<'a, S>;

    fn next(&mut self) -> Option<Self::Item> {
        let res = self.peek();
        self.idx += 1;
        res
    }

    fn size_hint(&self) -> (usize, Option<usize>) {
        let len = self.inner.len()
            + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 };
        let remain = len.saturating_sub(self.idx);
        (remain, Some(remain))
    }
}

impl<S: Span> TtIter<'_, S> {
    fn expect_separator(&mut self, separator: &Separator<S>) -> bool {
        let mut fork = self.clone();
        let ok = match separator {
            Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
                Ok(rhs) => rhs.text == lhs.text,
                Err(_) => false,
            },
            Separator::Literal(lhs) => match fork.expect_literal() {
                Ok(rhs) => match rhs {
                    tt::Leaf::Literal(rhs) => rhs.text == lhs.text,
                    tt::Leaf::Ident(rhs) => rhs.text == lhs.text,
                    tt::Leaf::Punct(_) => false,
                },
                Err(_) => false,
            },
            Separator::Puncts(lhs) => match fork.expect_glued_punct() {
                Ok(rhs) => {
                    let lhs = lhs.iter().map(|it| it.char);
                    let rhs = rhs.iter().map(|it| it.char);
                    lhs.eq(rhs)
                }
                Err(_) => false,
            },
        };
        if ok {
            *self = fork;
        }
        ok
    }

    fn expect_tt(&mut self) -> Result<tt::TokenTree<S>, ()> {
        if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) {
            if punct.char == '\'' {
                self.expect_lifetime()
            } else {
                let puncts = self.expect_glued_punct()?;
                let delimiter = tt::Delimiter {
                    open: puncts.first().unwrap().span,
                    close: puncts.last().unwrap().span,
                    kind: tt::DelimiterKind::Invisible,
                };
                let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
                Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees }))
            }
        } else {
            self.next().ok_or(()).cloned()
        }
    }

    fn expect_lifetime(&mut self) -> Result<tt::TokenTree<S>, ()> {
        let punct = self.expect_single_punct()?;
        if punct.char != '\'' {
            return Err(());
        }
        let ident = self.expect_ident_or_underscore()?;

        Ok(tt::Subtree {
            delimiter: tt::Delimiter {
                open: punct.span,
                close: ident.span,
                kind: tt::DelimiterKind::Invisible,
            },
            token_trees: Box::new([
                tt::Leaf::Punct(*punct).into(),
                tt::Leaf::Ident(ident.clone()).into(),
            ]),
        }
        .into())
    }

    fn eat_char(&mut self, c: char) -> Option<tt::TokenTree<S>> {
        let mut fork = self.clone();
        match fork.expect_char(c) {
            Ok(_) => {
                let tt = self.next().cloned();
                *self = fork;
                tt
            }
            Err(_) => None,
        }
    }
}