]> git.proxmox.com Git - rustc.git/blob - vendor/proc-macro2/tests/test.rs
New upstream version 1.58.1+dfsg1
[rustc.git] / vendor / proc-macro2 / tests / test.rs
1 #![allow(clippy::non_ascii_literal)]
2
3 use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
4 use std::panic;
5 use std::str::{self, FromStr};
6
7 #[test]
8 fn idents() {
9 assert_eq!(
10 Ident::new("String", Span::call_site()).to_string(),
11 "String"
12 );
13 assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
14 assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
15 }
16
17 #[test]
18 #[cfg(procmacro2_semver_exempt)]
19 fn raw_idents() {
20 assert_eq!(
21 Ident::new_raw("String", Span::call_site()).to_string(),
22 "r#String"
23 );
24 assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
25 assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
26 }
27
28 #[test]
29 #[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
30 fn ident_empty() {
31 Ident::new("", Span::call_site());
32 }
33
34 #[test]
35 #[should_panic(expected = "Ident cannot be a number; use Literal instead")]
36 fn ident_number() {
37 Ident::new("255", Span::call_site());
38 }
39
40 #[test]
41 #[should_panic(expected = "\"a#\" is not a valid Ident")]
42 fn ident_invalid() {
43 Ident::new("a#", Span::call_site());
44 }
45
46 #[test]
47 #[should_panic(expected = "not a valid Ident")]
48 fn raw_ident_empty() {
49 Ident::new("r#", Span::call_site());
50 }
51
52 #[test]
53 #[should_panic(expected = "not a valid Ident")]
54 fn raw_ident_number() {
55 Ident::new("r#255", Span::call_site());
56 }
57
58 #[test]
59 #[should_panic(expected = "\"r#a#\" is not a valid Ident")]
60 fn raw_ident_invalid() {
61 Ident::new("r#a#", Span::call_site());
62 }
63
64 #[test]
65 #[should_panic(expected = "not a valid Ident")]
66 fn lifetime_empty() {
67 Ident::new("'", Span::call_site());
68 }
69
70 #[test]
71 #[should_panic(expected = "not a valid Ident")]
72 fn lifetime_number() {
73 Ident::new("'255", Span::call_site());
74 }
75
76 #[test]
77 fn lifetime_invalid() {
78 let result = panic::catch_unwind(|| Ident::new("'a#", Span::call_site()));
79 match result {
80 Err(box_any) => {
81 let message = box_any.downcast_ref::<String>().unwrap();
82 let expected1 = r#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0
83 let expected2 = r#""'a#" is not a valid Ident"#; // 1.53.0 ..
84 assert!(
85 message == expected1 || message == expected2,
86 "panic message does not match expected string\n\
87 \x20 panic message: `{:?}`\n\
88 \x20expected message: `{:?}`",
89 message,
90 expected2,
91 );
92 }
93 Ok(_) => panic!("test did not panic as expected"),
94 }
95 }
96
97 #[test]
98 fn literal_string() {
99 assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
100 assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
101 assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
102 }
103
104 #[test]
105 fn literal_raw_string() {
106 "r\"\r\n\"".parse::<TokenStream>().unwrap();
107 }
108
109 #[test]
110 fn literal_character() {
111 assert_eq!(Literal::character('x').to_string(), "'x'");
112 assert_eq!(Literal::character('\'').to_string(), "'\\''");
113 assert_eq!(Literal::character('"').to_string(), "'\"'");
114 }
115
116 #[test]
117 fn literal_float() {
118 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
119 }
120
121 #[test]
122 fn literal_suffix() {
123 fn token_count(p: &str) -> usize {
124 p.parse::<TokenStream>().unwrap().into_iter().count()
125 }
126
127 assert_eq!(token_count("999u256"), 1);
128 assert_eq!(token_count("999r#u256"), 3);
129 assert_eq!(token_count("1."), 1);
130 assert_eq!(token_count("1.f32"), 3);
131 assert_eq!(token_count("1.0_0"), 1);
132 assert_eq!(token_count("1._0"), 3);
133 assert_eq!(token_count("1._m"), 3);
134 assert_eq!(token_count("\"\"s"), 1);
135 assert_eq!(token_count("r\"\"r"), 1);
136 assert_eq!(token_count("b\"\"b"), 1);
137 assert_eq!(token_count("br\"\"br"), 1);
138 assert_eq!(token_count("r#\"\"#r"), 1);
139 assert_eq!(token_count("'c'c"), 1);
140 assert_eq!(token_count("b'b'b"), 1);
141 assert_eq!(token_count("0E"), 1);
142 assert_eq!(token_count("0o0A"), 1);
143 assert_eq!(token_count("0E--0"), 4);
144 assert_eq!(token_count("0.0ECMA"), 1);
145 }
146
147 #[test]
148 fn literal_iter_negative() {
149 let negative_literal = Literal::i32_suffixed(-3);
150 let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
151 let mut iter = tokens.into_iter();
152 match iter.next().unwrap() {
153 TokenTree::Punct(punct) => {
154 assert_eq!(punct.as_char(), '-');
155 assert_eq!(punct.spacing(), Spacing::Alone);
156 }
157 unexpected => panic!("unexpected token {:?}", unexpected),
158 }
159 match iter.next().unwrap() {
160 TokenTree::Literal(literal) => {
161 assert_eq!(literal.to_string(), "3i32");
162 }
163 unexpected => panic!("unexpected token {:?}", unexpected),
164 }
165 assert!(iter.next().is_none());
166 }
167
168 #[test]
169 fn literal_parse() {
170 assert!("1".parse::<Literal>().is_ok());
171 assert!("-1".parse::<Literal>().is_ok());
172 assert!("-1u12".parse::<Literal>().is_ok());
173 assert!("1.0".parse::<Literal>().is_ok());
174 assert!("-1.0".parse::<Literal>().is_ok());
175 assert!("-1.0f12".parse::<Literal>().is_ok());
176 assert!("'a'".parse::<Literal>().is_ok());
177 assert!("\"\n\"".parse::<Literal>().is_ok());
178 assert!("0 1".parse::<Literal>().is_err());
179 assert!(" 0".parse::<Literal>().is_err());
180 assert!("0 ".parse::<Literal>().is_err());
181 assert!("/* comment */0".parse::<Literal>().is_err());
182 assert!("0/* comment */".parse::<Literal>().is_err());
183 assert!("0// comment".parse::<Literal>().is_err());
184 assert!("- 1".parse::<Literal>().is_err());
185 assert!("- 1.0".parse::<Literal>().is_err());
186 assert!("-\"\"".parse::<Literal>().is_err());
187 }
188
189 #[test]
190 fn roundtrip() {
191 fn roundtrip(p: &str) {
192 println!("parse: {}", p);
193 let s = p.parse::<TokenStream>().unwrap().to_string();
194 println!("first: {}", s);
195 let s2 = s.parse::<TokenStream>().unwrap().to_string();
196 assert_eq!(s, s2);
197 }
198 roundtrip("a");
199 roundtrip("<<");
200 roundtrip("<<=");
201 roundtrip(
202 "
203 1
204 1.0
205 1f32
206 2f64
207 1usize
208 4isize
209 4e10
210 1_000
211 1_0i32
212 8u8
213 9
214 0
215 0xffffffffffffffffffffffffffffffff
216 1x
217 1u80
218 1f320
219 ",
220 );
221 roundtrip("'a");
222 roundtrip("'_");
223 roundtrip("'static");
224 roundtrip("'\\u{10__FFFF}'");
225 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
226 }
227
228 #[test]
229 fn fail() {
230 fn fail(p: &str) {
231 if let Ok(s) = p.parse::<TokenStream>() {
232 panic!("should have failed to parse: {}\n{:#?}", p, s);
233 }
234 }
235 fail("' static");
236 fail("r#1");
237 fail("r#_");
238 fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
239 fail("\"\\u{999999}\""); // outside of valid range of char
240 fail("\"\\u{_0}\""); // leading underscore
241 fail("\"\\u{}\""); // empty
242 fail("b\"\r\""); // bare carriage return in byte string
243 fail("r\"\r\""); // bare carriage return in raw string
244 fail("\"\\\r \""); // backslash carriage return
245 fail("'aa'aa");
246 fail("br##\"\"#");
247 fail("\"\\\n\u{85}\r\"");
248 }
249
250 #[cfg(span_locations)]
251 #[test]
252 fn span_test() {
253 check_spans(
254 "\
255 /// This is a document comment
256 testing 123
257 {
258 testing 234
259 }",
260 &[
261 (1, 0, 1, 30), // #
262 (1, 0, 1, 30), // [ ... ]
263 (1, 0, 1, 30), // doc
264 (1, 0, 1, 30), // =
265 (1, 0, 1, 30), // "This is..."
266 (2, 0, 2, 7), // testing
267 (2, 8, 2, 11), // 123
268 (3, 0, 5, 1), // { ... }
269 (4, 2, 4, 9), // testing
270 (4, 10, 4, 13), // 234
271 ],
272 );
273 }
274
275 #[cfg(procmacro2_semver_exempt)]
276 #[cfg(not(nightly))]
277 #[test]
278 fn default_span() {
279 let start = Span::call_site().start();
280 assert_eq!(start.line, 1);
281 assert_eq!(start.column, 0);
282 let end = Span::call_site().end();
283 assert_eq!(end.line, 1);
284 assert_eq!(end.column, 0);
285 let source_file = Span::call_site().source_file();
286 assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
287 assert!(!source_file.is_real());
288 }
289
290 #[cfg(procmacro2_semver_exempt)]
291 #[test]
292 fn span_join() {
293 let source1 = "aaa\nbbb"
294 .parse::<TokenStream>()
295 .unwrap()
296 .into_iter()
297 .collect::<Vec<_>>();
298 let source2 = "ccc\nddd"
299 .parse::<TokenStream>()
300 .unwrap()
301 .into_iter()
302 .collect::<Vec<_>>();
303
304 assert!(source1[0].span().source_file() != source2[0].span().source_file());
305 assert_eq!(
306 source1[0].span().source_file(),
307 source1[1].span().source_file()
308 );
309
310 let joined1 = source1[0].span().join(source1[1].span());
311 let joined2 = source1[0].span().join(source2[0].span());
312 assert!(joined1.is_some());
313 assert!(joined2.is_none());
314
315 let start = joined1.unwrap().start();
316 let end = joined1.unwrap().end();
317 assert_eq!(start.line, 1);
318 assert_eq!(start.column, 0);
319 assert_eq!(end.line, 2);
320 assert_eq!(end.column, 3);
321
322 assert_eq!(
323 joined1.unwrap().source_file(),
324 source1[0].span().source_file()
325 );
326 }
327
328 #[test]
329 fn no_panic() {
330 let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
331 assert!(s.parse::<TokenStream>().is_err());
332 }
333
334 #[test]
335 fn punct_before_comment() {
336 let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
337 match tts.next().unwrap() {
338 TokenTree::Punct(tt) => {
339 assert_eq!(tt.as_char(), '~');
340 assert_eq!(tt.spacing(), Spacing::Alone);
341 }
342 wrong => panic!("wrong token {:?}", wrong),
343 }
344 }
345
346 #[test]
347 fn joint_last_token() {
348 // This test verifies that we match the behavior of libproc_macro *not* in
349 // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
350 // behavior was temporarily broken.
351 // See https://github.com/rust-lang/rust/issues/76399
352
353 let joint_punct = Punct::new(':', Spacing::Joint);
354 let stream = TokenStream::from(TokenTree::Punct(joint_punct));
355 let punct = match stream.into_iter().next().unwrap() {
356 TokenTree::Punct(punct) => punct,
357 _ => unreachable!(),
358 };
359 assert_eq!(punct.spacing(), Spacing::Joint);
360 }
361
362 #[test]
363 fn raw_identifier() {
364 let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
365 match tts.next().unwrap() {
366 TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
367 wrong => panic!("wrong token {:?}", wrong),
368 }
369 assert!(tts.next().is_none());
370 }
371
372 #[test]
373 fn test_debug_ident() {
374 let ident = Ident::new("proc_macro", Span::call_site());
375
376 #[cfg(not(span_locations))]
377 let expected = "Ident(proc_macro)";
378
379 #[cfg(span_locations)]
380 let expected = "Ident { sym: proc_macro }";
381
382 assert_eq!(expected, format!("{:?}", ident));
383 }
384
385 #[test]
386 fn test_debug_tokenstream() {
387 let tts = TokenStream::from_str("[a + 1]").unwrap();
388
389 #[cfg(not(span_locations))]
390 let expected = "\
391 TokenStream [
392 Group {
393 delimiter: Bracket,
394 stream: TokenStream [
395 Ident {
396 sym: a,
397 },
398 Punct {
399 char: '+',
400 spacing: Alone,
401 },
402 Literal {
403 lit: 1,
404 },
405 ],
406 },
407 ]\
408 ";
409
410 #[cfg(not(span_locations))]
411 let expected_before_trailing_commas = "\
412 TokenStream [
413 Group {
414 delimiter: Bracket,
415 stream: TokenStream [
416 Ident {
417 sym: a
418 },
419 Punct {
420 char: '+',
421 spacing: Alone
422 },
423 Literal {
424 lit: 1
425 }
426 ]
427 }
428 ]\
429 ";
430
431 #[cfg(span_locations)]
432 let expected = "\
433 TokenStream [
434 Group {
435 delimiter: Bracket,
436 stream: TokenStream [
437 Ident {
438 sym: a,
439 span: bytes(2..3),
440 },
441 Punct {
442 char: '+',
443 spacing: Alone,
444 span: bytes(4..5),
445 },
446 Literal {
447 lit: 1,
448 span: bytes(6..7),
449 },
450 ],
451 span: bytes(1..8),
452 },
453 ]\
454 ";
455
456 #[cfg(span_locations)]
457 let expected_before_trailing_commas = "\
458 TokenStream [
459 Group {
460 delimiter: Bracket,
461 stream: TokenStream [
462 Ident {
463 sym: a,
464 span: bytes(2..3)
465 },
466 Punct {
467 char: '+',
468 spacing: Alone,
469 span: bytes(4..5)
470 },
471 Literal {
472 lit: 1,
473 span: bytes(6..7)
474 }
475 ],
476 span: bytes(1..8)
477 }
478 ]\
479 ";
480
481 let actual = format!("{:#?}", tts);
482 if actual.ends_with(",\n]") {
483 assert_eq!(expected, actual);
484 } else {
485 assert_eq!(expected_before_trailing_commas, actual);
486 }
487 }
488
489 #[test]
490 fn default_tokenstream_is_empty() {
491 let default_token_stream = <TokenStream as Default>::default();
492
493 assert!(default_token_stream.is_empty());
494 }
495
496 #[test]
497 fn tuple_indexing() {
498 // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
499 let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
500 assert_eq!("tuple", tokens.next().unwrap().to_string());
501 assert_eq!(".", tokens.next().unwrap().to_string());
502 assert_eq!("0.0", tokens.next().unwrap().to_string());
503 assert!(tokens.next().is_none());
504 }
505
506 #[cfg(span_locations)]
507 #[test]
508 fn non_ascii_tokens() {
509 check_spans("// abc", &[]);
510 check_spans("// ábc", &[]);
511 check_spans("// abc x", &[]);
512 check_spans("// ábc x", &[]);
513 check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
514 check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
515 check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
516 check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
517 check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
518 check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
519 check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
520 check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
521 check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
522 check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
523 check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
524 check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
525 check_spans("'a'", &[(1, 0, 1, 3)]);
526 check_spans("'á'", &[(1, 0, 1, 3)]);
527 check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
528 check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
529 check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
530 check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
531 check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
532 check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
533 check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
534 check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
535 check_spans("abc", &[(1, 0, 1, 3)]);
536 check_spans("ábc", &[(1, 0, 1, 3)]);
537 check_spans("ábć", &[(1, 0, 1, 3)]);
538 check_spans("abc// foo", &[(1, 0, 1, 3)]);
539 check_spans("ábc// foo", &[(1, 0, 1, 3)]);
540 check_spans("ábć// foo", &[(1, 0, 1, 3)]);
541 check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
542 check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
543 }
544
545 #[cfg(span_locations)]
546 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
547 let ts = p.parse::<TokenStream>().unwrap();
548 check_spans_internal(ts, &mut lines);
549 assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
550 }
551
552 #[cfg(span_locations)]
553 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
554 for i in ts {
555 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
556 *lines = rest;
557
558 let start = i.span().start();
559 assert_eq!(start.line, sline, "sline did not match for {}", i);
560 assert_eq!(start.column, scol, "scol did not match for {}", i);
561
562 let end = i.span().end();
563 assert_eq!(end.line, eline, "eline did not match for {}", i);
564 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
565
566 if let TokenTree::Group(g) = i {
567 check_spans_internal(g.stream().clone(), lines);
568 }
569 }
570 }
571 }