]> git.proxmox.com Git - rustc.git/blame - src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
New upstream version 1.65.0+dfsg1
[rustc.git] / src / tools / rust-analyzer / crates / rust-analyzer / src / semantic_tokens.rs
CommitLineData
064997fb
FG
1//! Semantic Tokens helpers
2
3use std::ops;
4
5use lsp_types::{
6 Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
7 SemanticTokensEdit,
8};
9
10macro_rules! define_semantic_token_types {
f2b60f7d
FG
11 (
12 standard {
13 $($standard:ident),*$(,)?
14 }
15 custom {
16 $(($custom:ident, $string:literal)),*$(,)?
17 }
18
19 ) => {
20 $(pub(crate) const $standard: SemanticTokenType = SemanticTokenType::$standard;)*
21 $(pub(crate) const $custom: SemanticTokenType = SemanticTokenType::new($string);)*
064997fb
FG
22
23 pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
f2b60f7d
FG
24 $(SemanticTokenType::$standard,)*
25 $($custom),*
064997fb
FG
26 ];
27 };
28}
29
30define_semantic_token_types![
f2b60f7d
FG
31 standard {
32 COMMENT,
33 DECORATOR,
34 ENUM_MEMBER,
35 ENUM,
36 FUNCTION,
37 INTERFACE,
38 KEYWORD,
39 MACRO,
40 METHOD,
41 NAMESPACE,
42 NUMBER,
43 OPERATOR,
44 PARAMETER,
45 PROPERTY,
46 STRING,
47 STRUCT,
48 TYPE_PARAMETER,
49 VARIABLE,
50 }
51
52 custom {
53 (ANGLE, "angle"),
54 (ARITHMETIC, "arithmetic"),
55 (ATTRIBUTE, "attribute"),
56 (ATTRIBUTE_BRACKET, "attributeBracket"),
57 (BITWISE, "bitwise"),
58 (BOOLEAN, "boolean"),
59 (BRACE, "brace"),
60 (BRACKET, "bracket"),
61 (BUILTIN_ATTRIBUTE, "builtinAttribute"),
62 (BUILTIN_TYPE, "builtinType"),
63 (CHAR, "character"),
64 (COLON, "colon"),
65 (COMMA, "comma"),
66 (COMPARISON, "comparison"),
67 (CONST_PARAMETER, "constParameter"),
68 (DERIVE, "derive"),
69 (DERIVE_HELPER, "deriveHelper"),
70 (DOT, "dot"),
71 (ESCAPE_SEQUENCE, "escapeSequence"),
72 (FORMAT_SPECIFIER, "formatSpecifier"),
73 (GENERIC, "generic"),
74 (LABEL, "label"),
75 (LIFETIME, "lifetime"),
76 (LOGICAL, "logical"),
77 (MACRO_BANG, "macroBang"),
78 (PARENTHESIS, "parenthesis"),
79 (PUNCTUATION, "punctuation"),
80 (SELF_KEYWORD, "selfKeyword"),
81 (SELF_TYPE_KEYWORD, "selfTypeKeyword"),
82 (SEMICOLON, "semicolon"),
83 (TYPE_ALIAS, "typeAlias"),
84 (TOOL_MODULE, "toolModule"),
85 (UNION, "union"),
86 (UNRESOLVED_REFERENCE, "unresolvedReference"),
87 }
064997fb
FG
88];
89
90macro_rules! define_semantic_token_modifiers {
f2b60f7d
FG
91 (
92 standard {
93 $($standard:ident),*$(,)?
94 }
95 custom {
96 $(($custom:ident, $string:literal)),*$(,)?
97 }
98
99 ) => {
100
101 $(pub(crate) const $standard: SemanticTokenModifier = SemanticTokenModifier::$standard;)*
102 $(pub(crate) const $custom: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
064997fb
FG
103
104 pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
f2b60f7d
FG
105 $(SemanticTokenModifier::$standard,)*
106 $($custom),*
064997fb
FG
107 ];
108 };
109}
110
111define_semantic_token_modifiers![
f2b60f7d
FG
112 standard {
113 DOCUMENTATION,
114 DECLARATION,
115 STATIC,
116 DEFAULT_LIBRARY,
117 }
118 custom {
119 (ASYNC, "async"),
120 (ATTRIBUTE_MODIFIER, "attribute"),
121 (CALLABLE, "callable"),
122 (CONSTANT, "constant"),
123 (CONSUMING, "consuming"),
124 (CONTROL_FLOW, "controlFlow"),
125 (CRATE_ROOT, "crateRoot"),
126 (INJECTED, "injected"),
127 (INTRA_DOC_LINK, "intraDocLink"),
128 (LIBRARY, "library"),
129 (MUTABLE, "mutable"),
130 (PUBLIC, "public"),
131 (REFERENCE, "reference"),
132 (TRAIT_MODIFIER, "trait"),
133 (UNSAFE, "unsafe"),
134 }
064997fb
FG
135];
136
137#[derive(Default)]
138pub(crate) struct ModifierSet(pub(crate) u32);
139
140impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
141 fn bitor_assign(&mut self, rhs: SemanticTokenModifier) {
142 let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap();
143 self.0 |= 1 << idx;
144 }
145}
146
147/// Tokens are encoded relative to each other.
148///
149/// This is a direct port of <https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45>
150pub(crate) struct SemanticTokensBuilder {
151 id: String,
152 prev_line: u32,
153 prev_char: u32,
154 data: Vec<SemanticToken>,
155}
156
157impl SemanticTokensBuilder {
158 pub(crate) fn new(id: String) -> Self {
159 SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
160 }
161
162 /// Push a new token onto the builder
163 pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
164 let mut push_line = range.start.line as u32;
165 let mut push_char = range.start.character as u32;
166
167 if !self.data.is_empty() {
168 push_line -= self.prev_line;
169 if push_line == 0 {
170 push_char -= self.prev_char;
171 }
172 }
173
174 // A token cannot be multiline
175 let token_len = range.end.character - range.start.character;
176
177 let token = SemanticToken {
178 delta_line: push_line,
179 delta_start: push_char,
180 length: token_len as u32,
181 token_type: token_index,
182 token_modifiers_bitset: modifier_bitset,
183 };
184
185 self.data.push(token);
186
187 self.prev_line = range.start.line as u32;
188 self.prev_char = range.start.character as u32;
189 }
190
191 pub(crate) fn build(self) -> SemanticTokens {
192 SemanticTokens { result_id: Some(self.id), data: self.data }
193 }
194}
195
196pub(crate) fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
197 let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
198
199 let (_, old) = old.split_at(offset);
200 let (_, new) = new.split_at(offset);
201
202 let offset_from_end =
203 new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
204
205 let (old, _) = old.split_at(old.len() - offset_from_end);
206 let (new, _) = new.split_at(new.len() - offset_from_end);
207
208 if old.is_empty() && new.is_empty() {
209 vec![]
210 } else {
211 // The lsp data field is actually a byte-diff but we
212 // travel in tokens so `start` and `delete_count` are in multiples of the
213 // serialized size of `SemanticToken`.
214 vec![SemanticTokensEdit {
215 start: 5 * offset as u32,
216 delete_count: 5 * old.len() as u32,
217 data: Some(new.into()),
218 }]
219 }
220}
221
222pub(crate) fn type_index(ty: SemanticTokenType) -> u32 {
223 SUPPORTED_TYPES.iter().position(|it| *it == ty).unwrap() as u32
224}
225
226#[cfg(test)]
227mod tests {
228 use super::*;
229
230 fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
231 SemanticToken {
232 delta_line: t.0,
233 delta_start: t.1,
234 length: t.2,
235 token_type: t.3,
236 token_modifiers_bitset: t.4,
237 }
238 }
239
240 #[test]
241 fn test_diff_insert_at_end() {
242 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
243 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
244
245 let edits = diff_tokens(&before, &after);
246 assert_eq!(
247 edits[0],
248 SemanticTokensEdit {
249 start: 10,
250 delete_count: 0,
251 data: Some(vec![from((11, 12, 13, 14, 15))])
252 }
253 );
254 }
255
256 #[test]
257 fn test_diff_insert_at_beginning() {
258 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
259 let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
260
261 let edits = diff_tokens(&before, &after);
262 assert_eq!(
263 edits[0],
264 SemanticTokensEdit {
265 start: 0,
266 delete_count: 0,
267 data: Some(vec![from((11, 12, 13, 14, 15))])
268 }
269 );
270 }
271
272 #[test]
273 fn test_diff_insert_in_middle() {
274 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
275 let after = [
276 from((1, 2, 3, 4, 5)),
277 from((10, 20, 30, 40, 50)),
278 from((60, 70, 80, 90, 100)),
279 from((6, 7, 8, 9, 10)),
280 ];
281
282 let edits = diff_tokens(&before, &after);
283 assert_eq!(
284 edits[0],
285 SemanticTokensEdit {
286 start: 5,
287 delete_count: 0,
288 data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
289 }
290 );
291 }
292
293 #[test]
294 fn test_diff_remove_from_end() {
295 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
296 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
297
298 let edits = diff_tokens(&before, &after);
299 assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
300 }
301
302 #[test]
303 fn test_diff_remove_from_beginning() {
304 let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
305 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
306
307 let edits = diff_tokens(&before, &after);
308 assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
309 }
310
311 #[test]
312 fn test_diff_remove_from_middle() {
313 let before = [
314 from((1, 2, 3, 4, 5)),
315 from((10, 20, 30, 40, 50)),
316 from((60, 70, 80, 90, 100)),
317 from((6, 7, 8, 9, 10)),
318 ];
319 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
320
321 let edits = diff_tokens(&before, &after);
322 assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
323 }
324}