]>
Commit | Line | Data |
---|---|---|
29967ef6 | 1 | use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; |
f035d41b | 2 | use std::str::{self, FromStr}; |
e74abb32 XL |
3 | |
4 | #[test] | |
5 | fn idents() { | |
6 | assert_eq!( | |
7 | Ident::new("String", Span::call_site()).to_string(), | |
8 | "String" | |
9 | ); | |
10 | assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn"); | |
11 | assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_"); | |
12 | } | |
13 | ||
14 | #[test] | |
15 | #[cfg(procmacro2_semver_exempt)] | |
16 | fn raw_idents() { | |
17 | assert_eq!( | |
18 | Ident::new_raw("String", Span::call_site()).to_string(), | |
19 | "r#String" | |
20 | ); | |
21 | assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn"); | |
22 | assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_"); | |
23 | } | |
24 | ||
25 | #[test] | |
26 | #[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")] | |
27 | fn ident_empty() { | |
28 | Ident::new("", Span::call_site()); | |
29 | } | |
30 | ||
31 | #[test] | |
32 | #[should_panic(expected = "Ident cannot be a number; use Literal instead")] | |
33 | fn ident_number() { | |
34 | Ident::new("255", Span::call_site()); | |
35 | } | |
36 | ||
37 | #[test] | |
38 | #[should_panic(expected = "\"a#\" is not a valid Ident")] | |
39 | fn ident_invalid() { | |
40 | Ident::new("a#", Span::call_site()); | |
41 | } | |
42 | ||
43 | #[test] | |
44 | #[should_panic(expected = "not a valid Ident")] | |
45 | fn raw_ident_empty() { | |
46 | Ident::new("r#", Span::call_site()); | |
47 | } | |
48 | ||
49 | #[test] | |
50 | #[should_panic(expected = "not a valid Ident")] | |
51 | fn raw_ident_number() { | |
52 | Ident::new("r#255", Span::call_site()); | |
53 | } | |
54 | ||
55 | #[test] | |
56 | #[should_panic(expected = "\"r#a#\" is not a valid Ident")] | |
57 | fn raw_ident_invalid() { | |
58 | Ident::new("r#a#", Span::call_site()); | |
59 | } | |
60 | ||
61 | #[test] | |
62 | #[should_panic(expected = "not a valid Ident")] | |
63 | fn lifetime_empty() { | |
64 | Ident::new("'", Span::call_site()); | |
65 | } | |
66 | ||
67 | #[test] | |
68 | #[should_panic(expected = "not a valid Ident")] | |
69 | fn lifetime_number() { | |
70 | Ident::new("'255", Span::call_site()); | |
71 | } | |
72 | ||
73 | #[test] | |
74 | #[should_panic(expected = r#""\'a#" is not a valid Ident"#)] | |
75 | fn lifetime_invalid() { | |
76 | Ident::new("'a#", Span::call_site()); | |
77 | } | |
78 | ||
79 | #[test] | |
80 | fn literal_string() { | |
81 | assert_eq!(Literal::string("foo").to_string(), "\"foo\""); | |
82 | assert_eq!(Literal::string("\"").to_string(), "\"\\\"\""); | |
83 | assert_eq!(Literal::string("didn't").to_string(), "\"didn't\""); | |
84 | } | |
85 | ||
29967ef6 XL |
86 | #[test] |
87 | fn literal_raw_string() { | |
88 | "r\"\r\n\"".parse::<TokenStream>().unwrap(); | |
89 | } | |
90 | ||
e74abb32 XL |
91 | #[test] |
92 | fn literal_character() { | |
93 | assert_eq!(Literal::character('x').to_string(), "'x'"); | |
94 | assert_eq!(Literal::character('\'').to_string(), "'\\''"); | |
95 | assert_eq!(Literal::character('"').to_string(), "'\"'"); | |
96 | } | |
97 | ||
98 | #[test] | |
99 | fn literal_float() { | |
100 | assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0"); | |
101 | } | |
102 | ||
103 | #[test] | |
104 | fn literal_suffix() { | |
105 | fn token_count(p: &str) -> usize { | |
106 | p.parse::<TokenStream>().unwrap().into_iter().count() | |
107 | } | |
108 | ||
109 | assert_eq!(token_count("999u256"), 1); | |
110 | assert_eq!(token_count("999r#u256"), 3); | |
111 | assert_eq!(token_count("1."), 1); | |
112 | assert_eq!(token_count("1.f32"), 3); | |
113 | assert_eq!(token_count("1.0_0"), 1); | |
114 | assert_eq!(token_count("1._0"), 3); | |
115 | assert_eq!(token_count("1._m"), 3); | |
116 | assert_eq!(token_count("\"\"s"), 1); | |
f035d41b XL |
117 | assert_eq!(token_count("r\"\"r"), 1); |
118 | assert_eq!(token_count("b\"\"b"), 1); | |
119 | assert_eq!(token_count("br\"\"br"), 1); | |
120 | assert_eq!(token_count("r#\"\"#r"), 1); | |
121 | assert_eq!(token_count("'c'c"), 1); | |
122 | assert_eq!(token_count("b'b'b"), 1); | |
29967ef6 XL |
123 | assert_eq!(token_count("0E"), 1); |
124 | assert_eq!(token_count("0o0A"), 1); | |
125 | assert_eq!(token_count("0E--0"), 4); | |
126 | assert_eq!(token_count("0.0ECMA"), 1); | |
f035d41b XL |
127 | } |
128 | ||
129 | #[test] | |
130 | fn literal_iter_negative() { | |
131 | let negative_literal = Literal::i32_suffixed(-3); | |
132 | let tokens = TokenStream::from(TokenTree::Literal(negative_literal)); | |
133 | let mut iter = tokens.into_iter(); | |
134 | match iter.next().unwrap() { | |
135 | TokenTree::Punct(punct) => { | |
136 | assert_eq!(punct.as_char(), '-'); | |
137 | assert_eq!(punct.spacing(), Spacing::Alone); | |
138 | } | |
139 | unexpected => panic!("unexpected token {:?}", unexpected), | |
140 | } | |
141 | match iter.next().unwrap() { | |
142 | TokenTree::Literal(literal) => { | |
143 | assert_eq!(literal.to_string(), "3i32"); | |
144 | } | |
145 | unexpected => panic!("unexpected token {:?}", unexpected), | |
146 | } | |
147 | assert!(iter.next().is_none()); | |
e74abb32 XL |
148 | } |
149 | ||
150 | #[test] | |
151 | fn roundtrip() { | |
152 | fn roundtrip(p: &str) { | |
153 | println!("parse: {}", p); | |
154 | let s = p.parse::<TokenStream>().unwrap().to_string(); | |
155 | println!("first: {}", s); | |
156 | let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string(); | |
157 | assert_eq!(s, s2); | |
158 | } | |
159 | roundtrip("a"); | |
160 | roundtrip("<<"); | |
161 | roundtrip("<<="); | |
162 | roundtrip( | |
163 | " | |
164 | 1 | |
165 | 1.0 | |
166 | 1f32 | |
167 | 2f64 | |
168 | 1usize | |
169 | 4isize | |
170 | 4e10 | |
171 | 1_000 | |
172 | 1_0i32 | |
173 | 8u8 | |
174 | 9 | |
175 | 0 | |
176 | 0xffffffffffffffffffffffffffffffff | |
177 | 1x | |
178 | 1u80 | |
179 | 1f320 | |
180 | ", | |
181 | ); | |
182 | roundtrip("'a"); | |
183 | roundtrip("'_"); | |
184 | roundtrip("'static"); | |
185 | roundtrip("'\\u{10__FFFF}'"); | |
186 | roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\""); | |
187 | } | |
188 | ||
189 | #[test] | |
190 | fn fail() { | |
191 | fn fail(p: &str) { | |
192 | if let Ok(s) = p.parse::<TokenStream>() { | |
193 | panic!("should have failed to parse: {}\n{:#?}", p, s); | |
194 | } | |
195 | } | |
196 | fail("' static"); | |
197 | fail("r#1"); | |
198 | fail("r#_"); | |
29967ef6 XL |
199 | fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits) |
200 | fail("\"\\u{999999}\""); // outside of valid range of char | |
201 | fail("\"\\u{_0}\""); // leading underscore | |
202 | fail("\"\\u{}\""); // empty | |
203 | fail("b\"\r\""); // bare carriage return in byte string | |
204 | fail("r\"\r\""); // bare carriage return in raw string | |
205 | fail("\"\\\r \""); // backslash carriage return | |
206 | fail("'aa'aa"); | |
207 | fail("br##\"\"#"); | |
208 | fail("\"\\\n\u{85}\r\""); | |
e74abb32 XL |
209 | } |
210 | ||
211 | #[cfg(span_locations)] | |
212 | #[test] | |
213 | fn span_test() { | |
e74abb32 XL |
214 | check_spans( |
215 | "\ | |
216 | /// This is a document comment | |
217 | testing 123 | |
218 | { | |
219 | testing 234 | |
220 | }", | |
221 | &[ | |
222 | (1, 0, 1, 30), // # | |
223 | (1, 0, 1, 30), // [ ... ] | |
224 | (1, 0, 1, 30), // doc | |
225 | (1, 0, 1, 30), // = | |
226 | (1, 0, 1, 30), // "This is..." | |
227 | (2, 0, 2, 7), // testing | |
228 | (2, 8, 2, 11), // 123 | |
229 | (3, 0, 5, 1), // { ... } | |
230 | (4, 2, 4, 9), // testing | |
231 | (4, 10, 4, 13), // 234 | |
232 | ], | |
233 | ); | |
234 | } | |
235 | ||
236 | #[cfg(procmacro2_semver_exempt)] | |
237 | #[cfg(not(nightly))] | |
238 | #[test] | |
239 | fn default_span() { | |
240 | let start = Span::call_site().start(); | |
241 | assert_eq!(start.line, 1); | |
242 | assert_eq!(start.column, 0); | |
243 | let end = Span::call_site().end(); | |
244 | assert_eq!(end.line, 1); | |
245 | assert_eq!(end.column, 0); | |
246 | let source_file = Span::call_site().source_file(); | |
247 | assert_eq!(source_file.path().to_string_lossy(), "<unspecified>"); | |
248 | assert!(!source_file.is_real()); | |
249 | } | |
250 | ||
251 | #[cfg(procmacro2_semver_exempt)] | |
252 | #[test] | |
253 | fn span_join() { | |
254 | let source1 = "aaa\nbbb" | |
255 | .parse::<TokenStream>() | |
256 | .unwrap() | |
257 | .into_iter() | |
258 | .collect::<Vec<_>>(); | |
259 | let source2 = "ccc\nddd" | |
260 | .parse::<TokenStream>() | |
261 | .unwrap() | |
262 | .into_iter() | |
263 | .collect::<Vec<_>>(); | |
264 | ||
265 | assert!(source1[0].span().source_file() != source2[0].span().source_file()); | |
266 | assert_eq!( | |
267 | source1[0].span().source_file(), | |
268 | source1[1].span().source_file() | |
269 | ); | |
270 | ||
271 | let joined1 = source1[0].span().join(source1[1].span()); | |
272 | let joined2 = source1[0].span().join(source2[0].span()); | |
273 | assert!(joined1.is_some()); | |
274 | assert!(joined2.is_none()); | |
275 | ||
276 | let start = joined1.unwrap().start(); | |
277 | let end = joined1.unwrap().end(); | |
278 | assert_eq!(start.line, 1); | |
279 | assert_eq!(start.column, 0); | |
280 | assert_eq!(end.line, 2); | |
281 | assert_eq!(end.column, 3); | |
282 | ||
283 | assert_eq!( | |
284 | joined1.unwrap().source_file(), | |
285 | source1[0].span().source_file() | |
286 | ); | |
287 | } | |
288 | ||
289 | #[test] | |
290 | fn no_panic() { | |
291 | let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap(); | |
f035d41b | 292 | assert!(s.parse::<TokenStream>().is_err()); |
e74abb32 XL |
293 | } |
294 | ||
295 | #[test] | |
29967ef6 | 296 | fn punct_before_comment() { |
e74abb32 XL |
297 | let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter(); |
298 | match tts.next().unwrap() { | |
299 | TokenTree::Punct(tt) => { | |
300 | assert_eq!(tt.as_char(), '~'); | |
301 | assert_eq!(tt.spacing(), Spacing::Alone); | |
302 | } | |
303 | wrong => panic!("wrong token {:?}", wrong), | |
304 | } | |
305 | } | |
306 | ||
29967ef6 XL |
307 | #[test] |
308 | fn joint_last_token() { | |
309 | // This test verifies that we match the behavior of libproc_macro *not* in | |
310 | // the range nightly-2020-09-06 through nightly-2020-09-10, in which this | |
311 | // behavior was temporarily broken. | |
312 | // See https://github.com/rust-lang/rust/issues/76399 | |
313 | ||
314 | let joint_punct = Punct::new(':', Spacing::Joint); | |
315 | let stream = TokenStream::from(TokenTree::Punct(joint_punct)); | |
316 | let punct = match stream.into_iter().next().unwrap() { | |
317 | TokenTree::Punct(punct) => punct, | |
318 | _ => unreachable!(), | |
319 | }; | |
320 | assert_eq!(punct.spacing(), Spacing::Joint); | |
321 | } | |
322 | ||
e74abb32 XL |
323 | #[test] |
324 | fn raw_identifier() { | |
325 | let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter(); | |
326 | match tts.next().unwrap() { | |
327 | TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()), | |
328 | wrong => panic!("wrong token {:?}", wrong), | |
329 | } | |
330 | assert!(tts.next().is_none()); | |
331 | } | |
332 | ||
333 | #[test] | |
334 | fn test_debug_ident() { | |
335 | let ident = Ident::new("proc_macro", Span::call_site()); | |
336 | ||
f035d41b | 337 | #[cfg(not(span_locations))] |
e74abb32 XL |
338 | let expected = "Ident(proc_macro)"; |
339 | ||
f035d41b XL |
340 | #[cfg(span_locations)] |
341 | let expected = "Ident { sym: proc_macro }"; | |
e74abb32 XL |
342 | |
343 | assert_eq!(expected, format!("{:?}", ident)); | |
344 | } | |
345 | ||
346 | #[test] | |
347 | fn test_debug_tokenstream() { | |
348 | let tts = TokenStream::from_str("[a + 1]").unwrap(); | |
349 | ||
f035d41b | 350 | #[cfg(not(span_locations))] |
e74abb32 XL |
351 | let expected = "\ |
352 | TokenStream [ | |
353 | Group { | |
354 | delimiter: Bracket, | |
355 | stream: TokenStream [ | |
356 | Ident { | |
357 | sym: a, | |
358 | }, | |
359 | Punct { | |
29967ef6 | 360 | char: '+', |
e74abb32 XL |
361 | spacing: Alone, |
362 | }, | |
363 | Literal { | |
364 | lit: 1, | |
365 | }, | |
366 | ], | |
367 | }, | |
368 | ]\ | |
369 | "; | |
370 | ||
f035d41b | 371 | #[cfg(not(span_locations))] |
e74abb32 XL |
372 | let expected_before_trailing_commas = "\ |
373 | TokenStream [ | |
374 | Group { | |
375 | delimiter: Bracket, | |
376 | stream: TokenStream [ | |
377 | Ident { | |
378 | sym: a | |
379 | }, | |
380 | Punct { | |
29967ef6 | 381 | char: '+', |
e74abb32 XL |
382 | spacing: Alone |
383 | }, | |
384 | Literal { | |
385 | lit: 1 | |
386 | } | |
387 | ] | |
388 | } | |
389 | ]\ | |
390 | "; | |
391 | ||
f035d41b | 392 | #[cfg(span_locations)] |
e74abb32 XL |
393 | let expected = "\ |
394 | TokenStream [ | |
395 | Group { | |
396 | delimiter: Bracket, | |
397 | stream: TokenStream [ | |
398 | Ident { | |
399 | sym: a, | |
400 | span: bytes(2..3), | |
401 | }, | |
402 | Punct { | |
29967ef6 | 403 | char: '+', |
e74abb32 XL |
404 | spacing: Alone, |
405 | span: bytes(4..5), | |
406 | }, | |
407 | Literal { | |
408 | lit: 1, | |
409 | span: bytes(6..7), | |
410 | }, | |
411 | ], | |
412 | span: bytes(1..8), | |
413 | }, | |
414 | ]\ | |
415 | "; | |
416 | ||
f035d41b | 417 | #[cfg(span_locations)] |
e74abb32 XL |
418 | let expected_before_trailing_commas = "\ |
419 | TokenStream [ | |
420 | Group { | |
421 | delimiter: Bracket, | |
422 | stream: TokenStream [ | |
423 | Ident { | |
424 | sym: a, | |
425 | span: bytes(2..3) | |
426 | }, | |
427 | Punct { | |
29967ef6 | 428 | char: '+', |
e74abb32 XL |
429 | spacing: Alone, |
430 | span: bytes(4..5) | |
431 | }, | |
432 | Literal { | |
433 | lit: 1, | |
434 | span: bytes(6..7) | |
435 | } | |
436 | ], | |
437 | span: bytes(1..8) | |
438 | } | |
439 | ]\ | |
440 | "; | |
441 | ||
442 | let actual = format!("{:#?}", tts); | |
443 | if actual.ends_with(",\n]") { | |
444 | assert_eq!(expected, actual); | |
445 | } else { | |
446 | assert_eq!(expected_before_trailing_commas, actual); | |
447 | } | |
448 | } | |
449 | ||
450 | #[test] | |
451 | fn default_tokenstream_is_empty() { | |
452 | let default_token_stream: TokenStream = Default::default(); | |
453 | ||
454 | assert!(default_token_stream.is_empty()); | |
455 | } | |
f035d41b XL |
456 | |
457 | #[test] | |
458 | fn tuple_indexing() { | |
459 | // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322 | |
460 | let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter(); | |
461 | assert_eq!("tuple", tokens.next().unwrap().to_string()); | |
462 | assert_eq!(".", tokens.next().unwrap().to_string()); | |
463 | assert_eq!("0.0", tokens.next().unwrap().to_string()); | |
464 | assert!(tokens.next().is_none()); | |
465 | } | |
466 | ||
467 | #[cfg(span_locations)] | |
468 | #[test] | |
469 | fn non_ascii_tokens() { | |
470 | check_spans("// abc", &[]); | |
471 | check_spans("// ábc", &[]); | |
472 | check_spans("// abc x", &[]); | |
473 | check_spans("// ábc x", &[]); | |
474 | check_spans("/* abc */ x", &[(1, 10, 1, 11)]); | |
475 | check_spans("/* ábc */ x", &[(1, 10, 1, 11)]); | |
476 | check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]); | |
477 | check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]); | |
478 | check_spans("/*** abc */ x", &[(1, 12, 1, 13)]); | |
479 | check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]); | |
480 | check_spans(r#""abc""#, &[(1, 0, 1, 5)]); | |
481 | check_spans(r#""ábc""#, &[(1, 0, 1, 5)]); | |
482 | check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]); | |
483 | check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]); | |
484 | check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]); | |
485 | check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]); | |
486 | check_spans("'a'", &[(1, 0, 1, 3)]); | |
487 | check_spans("'á'", &[(1, 0, 1, 3)]); | |
488 | check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); | |
489 | check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); | |
490 | check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); | |
491 | check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); | |
492 | check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); | |
493 | check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); | |
494 | check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); | |
495 | check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); | |
496 | check_spans("abc", &[(1, 0, 1, 3)]); | |
497 | check_spans("ábc", &[(1, 0, 1, 3)]); | |
498 | check_spans("ábć", &[(1, 0, 1, 3)]); | |
499 | check_spans("abc// foo", &[(1, 0, 1, 3)]); | |
500 | check_spans("ábc// foo", &[(1, 0, 1, 3)]); | |
501 | check_spans("ábć// foo", &[(1, 0, 1, 3)]); | |
502 | check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]); | |
503 | check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]); | |
504 | } | |
505 | ||
506 | #[cfg(span_locations)] | |
507 | fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) { | |
508 | let ts = p.parse::<TokenStream>().unwrap(); | |
509 | check_spans_internal(ts, &mut lines); | |
510 | assert!(lines.is_empty(), "leftover ranges: {:?}", lines); | |
511 | } | |
512 | ||
513 | #[cfg(span_locations)] | |
514 | fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) { | |
515 | for i in ts { | |
516 | if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() { | |
517 | *lines = rest; | |
518 | ||
519 | let start = i.span().start(); | |
520 | assert_eq!(start.line, sline, "sline did not match for {}", i); | |
521 | assert_eq!(start.column, scol, "scol did not match for {}", i); | |
522 | ||
523 | let end = i.span().end(); | |
524 | assert_eq!(end.line, eline, "eline did not match for {}", i); | |
525 | assert_eq!(end.column, ecol, "ecol did not match for {}", i); | |
526 | ||
527 | if let TokenTree::Group(g) = i { | |
528 | check_spans_internal(g.stream().clone(), lines); | |
529 | } | |
530 | } | |
531 | } | |
532 | } |