Unnamed repository; edit this file 'description' to name the repository.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
//! Mapping between `TokenId`s and the token's position in macro definitions or inputs.

use std::hash::Hash;

use stdx::never;
use syntax::TextRange;
use tt::Span;

// pub type HirFile = u32;
// pub type FileRange = (HirFile, TextRange);
// Option<MacroCallId>, LocalSyntaxContet
// pub type SyntaxContext = ();
// pub type LocalSyntaxContext = u32;

/// Maps absolute text ranges for the corresponding file to the relevant span data.
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
// FIXME: Rename to SpanMap
pub struct TokenMap<S> {
    // FIXME: This needs to be sorted by (FileId, AstId)
    // Then we can do a binary search on the file id,
    // then a bin search on the ast id
    pub span_map: Vec<(TextRange, S)>,
    // span_map2: rustc_hash::FxHashMap<TextRange, usize>,
    pub real_file: bool,
}

impl<S> Default for TokenMap<S> {
    fn default() -> Self {
        Self { span_map: Vec::new(), real_file: true }
    }
}

impl<S: Span> TokenMap<S> {
    pub(crate) fn shrink_to_fit(&mut self) {
        self.span_map.shrink_to_fit();
    }

    pub(crate) fn insert(&mut self, range: TextRange, span: S) {
        self.span_map.push((range, span));
    }

    pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ {
        self.span_map.iter().filter_map(
            move |(range, s)| {
                if s == &span {
                    Some(*range)
                } else {
                    None
                }
            },
        )
    }

    // FIXME: Should be infallible
    pub fn span_for_range(&self, range: TextRange) -> Option<S> {
        // TODO FIXME: make this proper
        self.span_map
            .iter()
            .filter_map(|(r, s)| Some((r, s, r.intersect(range)?)))
            .max_by_key(|(_, _, intersection)| intersection.len())
            .map(|(_, &s, _)| s)
            .or_else(|| {
                if !self.real_file {
                    never!("no span for range {:?} in {:#?}", range, self.span_map);
                }
                None
            })
    }

    // pub fn ranges_by_token(
    //     &self,
    //     token_id: tt::TokenId,
    //     kind: SyntaxKind,
    // ) -> impl Iterator<Item = TextRange> + '_ {
    //     self.entries
    //         .iter()
    //         .filter(move |&&(tid, _)| tid == token_id)
    //         .filter_map(move |(_, range)| range.by_kind(kind))
    // }

    // pub(crate) fn remove_delim(&mut self, idx: usize) {
    //     // FIXME: This could be accidentally quadratic
    //     self.entries.remove(idx);
    // }

    // pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ {
    //     self.entries.iter().filter_map(|&(tid, tr)| match tr {
    //         TokenTextRange::Token(range) => Some((tid, range)),
    //         TokenTextRange::Delimiter(_) => None,
    //     })
    // }

    // pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) {
    //     self.entries.retain(|&(tid, _)| id(tid));
    // }
    // pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
    //     self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
    // }

    // pub fn first_range_by_token(
    //     &self,
    //     token_id: tt::TokenId,
    //     kind: SyntaxKind,
    // ) -> Option<TextRange> {
    //     self.ranges_by_token(token_id, kind).next()
    // }

    // pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
    //     self.entries.push((token_id, TokenTextRange::Token(relative_range)));
    // }

    // pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
    //     self.synthetic_entries.push((token_id, id));
    // }

    // pub(crate) fn insert_delim(
    //     &mut self,
    //     token_id: tt::TokenId,
    //     open_relative_range: TextRange,
    //     close_relative_range: TextRange,
    // ) -> usize {
    //     let res = self.entries.len();
    //     let cover = open_relative_range.cover(close_relative_range);

    //     self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
    //     res
    // }

    // pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
    //     let (_, token_text_range) = &mut self.entries[idx];
    //     if let TokenTextRange::Delimiter(dim) = token_text_range {
    //         let cover = dim.cover(close_relative_range);
    //         *token_text_range = TokenTextRange::Delimiter(cover);
    //     }
    // }
}