Unnamed repository; edit this file 'description' to name the repository.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
//! Exports a few trivial procedural macros for testing.

#![feature(proc_macro_span, proc_macro_def_site)]
#![allow(clippy::all)]

use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};

#[proc_macro]
pub fn fn_like_noop(args: TokenStream) -> TokenStream {
    args
}

#[proc_macro]
pub fn fn_like_panic(args: TokenStream) -> TokenStream {
    panic!("fn_like_panic!({})", args);
}

#[proc_macro]
pub fn fn_like_error(args: TokenStream) -> TokenStream {
    format!("compile_error!(\"fn_like_error!({})\");", args).parse().unwrap()
}

#[proc_macro]
pub fn fn_like_clone_tokens(args: TokenStream) -> TokenStream {
    clone_stream(args)
}

#[proc_macro]
pub fn fn_like_mk_literals(_args: TokenStream) -> TokenStream {
    let trees: Vec<TokenTree> = vec![
        TokenTree::from(Literal::byte_string(b"byte_string")),
        TokenTree::from(Literal::character('c')),
        TokenTree::from(Literal::string("string")),
        TokenTree::from(Literal::string("-string")),
        TokenTree::from(Literal::c_string(c"cstring")),
        // as of 2022-07-21, there's no method on `Literal` to build a raw
        // string or a raw byte string
        TokenTree::from(Literal::f64_suffixed(3.14)),
        TokenTree::from(Literal::f64_suffixed(-3.14)),
        TokenTree::from(Literal::f64_unsuffixed(3.14)),
        TokenTree::from(Literal::f64_unsuffixed(-3.14)),
        TokenTree::from(Literal::i64_suffixed(123)),
        TokenTree::from(Literal::i64_suffixed(-123)),
        TokenTree::from(Literal::i64_unsuffixed(123)),
        TokenTree::from(Literal::i64_unsuffixed(-123)),
    ];
    TokenStream::from_iter(trees)
}

#[proc_macro]
pub fn fn_like_mk_idents(_args: TokenStream) -> TokenStream {
    let trees: Vec<TokenTree> = vec![
        TokenTree::from(Ident::new("standard", Span::call_site())),
        TokenTree::from(Ident::new_raw("raw", Span::call_site())),
    ];
    TokenStream::from_iter(trees)
}

#[proc_macro]
pub fn fn_like_span_join(args: TokenStream) -> TokenStream {
    let args = &mut args.into_iter();
    let first = args.next().unwrap();
    let second = args.next().unwrap();
    TokenStream::from(TokenTree::from(Ident::new_raw(
        "joined",
        first.span().join(second.span()).unwrap(),
    )))
}

#[proc_macro]
pub fn fn_like_span_ops(args: TokenStream) -> TokenStream {
    let args = &mut args.into_iter();
    let mut first = args.next().unwrap();
    first.set_span(Span::def_site());
    let mut second = args.next().unwrap();
    second.set_span(second.span().resolved_at(Span::def_site()));
    let mut third = args.next().unwrap();
    third.set_span(third.span().start());
    TokenStream::from_iter(vec![first, second, third])
}

/// Returns the line and column of the first token's span as two integer literals.
#[proc_macro]
pub fn fn_like_span_line_column(args: TokenStream) -> TokenStream {
    let first = args.into_iter().next().unwrap();
    let span = first.span();
    let line = Literal::usize_unsuffixed(span.line());
    let column = Literal::usize_unsuffixed(span.column());
    TokenStream::from_iter(vec![TokenTree::Literal(line), TokenTree::Literal(column)])
}

#[proc_macro_attribute]
pub fn attr_noop(_args: TokenStream, item: TokenStream) -> TokenStream {
    item
}

#[proc_macro_attribute]
pub fn attr_panic(args: TokenStream, item: TokenStream) -> TokenStream {
    panic!("#[attr_panic {}] {}", args, item);
}

#[proc_macro_attribute]
pub fn attr_error(args: TokenStream, item: TokenStream) -> TokenStream {
    format!("compile_error!(\"#[attr_error({})] {}\");", args, item).parse().unwrap()
}

#[proc_macro_derive(DeriveReemit, attributes(helper))]
pub fn derive_reemit(item: TokenStream) -> TokenStream {
    item
}

#[proc_macro_derive(DeriveEmpty)]
pub fn derive_empty(_item: TokenStream) -> TokenStream {
    TokenStream::default()
}

#[proc_macro_derive(DerivePanic)]
pub fn derive_panic(item: TokenStream) -> TokenStream {
    panic!("#[derive(DerivePanic)] {}", item);
}

#[proc_macro_derive(DeriveError)]
pub fn derive_error(item: TokenStream) -> TokenStream {
    format!("compile_error!(\"#[derive(DeriveError)] {}\");", item).parse().unwrap()
}

fn clone_stream(ts: TokenStream) -> TokenStream {
    ts.into_iter().map(clone_tree).collect()
}

fn clone_tree(t: TokenTree) -> TokenTree {
    match t {
        TokenTree::Group(orig) => {
            let mut new = Group::new(orig.delimiter(), clone_stream(orig.stream()));
            new.set_span(orig.span());
            TokenTree::Group(new)
        }
        TokenTree::Ident(orig) => {
            let s = orig.to_string();
            if let Some(rest) = s.strip_prefix("r#") {
                TokenTree::Ident(Ident::new_raw(rest, orig.span()))
            } else {
                TokenTree::Ident(Ident::new(&s, orig.span()))
            }
        }
        TokenTree::Punct(orig) => {
            let mut new = Punct::new(orig.as_char(), orig.spacing());
            new.set_span(orig.span());
            TokenTree::Punct(new)
        }
        TokenTree::Literal(orig) => {
            // this goes through `literal_from_str` as of 2022-07-18, cf.
            // https://github.com/rust-lang/rust/commit/b34c79f8f1ef4d0149ad4bf77e1759c07a9a01a8
            let mut new: Literal = orig.to_string().parse().unwrap();
            new.set_span(orig.span());
            TokenTree::Literal(new)
        }
    }
}