1 #![allow(clippy::non_ascii_literal)]
3 use proc_macro2
::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}
;
5 use std
::str::{self, FromStr}
;
10 Ident
::new("String", Span
::call_site()).to_string(),
13 assert_eq
!(Ident
::new("fn", Span
::call_site()).to_string(), "fn");
14 assert_eq
!(Ident
::new("_", Span
::call_site()).to_string(), "_");
18 #[cfg(procmacro2_semver_exempt)]
21 Ident
::new_raw("String", Span
::call_site()).to_string(),
24 assert_eq
!(Ident
::new_raw("fn", Span
::call_site()).to_string(), "r#fn");
25 assert_eq
!(Ident
::new_raw("_", Span
::call_site()).to_string(), "r#_");
29 #[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
31 Ident
::new("", Span
::call_site());
35 #[should_panic(expected = "Ident cannot be a number; use Literal instead")]
37 Ident
::new("255", Span
::call_site());
41 #[should_panic(expected = "\"a#\" is not a valid Ident")]
43 Ident
::new("a#", Span
::call_site());
47 #[should_panic(expected = "not a valid Ident")]
48 fn raw_ident_empty() {
49 Ident
::new("r#", Span
::call_site());
53 #[should_panic(expected = "not a valid Ident")]
54 fn raw_ident_number() {
55 Ident
::new("r#255", Span
::call_site());
59 #[should_panic(expected = "\"r#a#\" is not a valid Ident")]
60 fn raw_ident_invalid() {
61 Ident
::new("r#a#", Span
::call_site());
65 #[should_panic(expected = "not a valid Ident")]
67 Ident
::new("'", Span
::call_site());
71 #[should_panic(expected = "not a valid Ident")]
72 fn lifetime_number() {
73 Ident
::new("'255", Span
::call_site());
77 fn lifetime_invalid() {
78 let result
= panic
::catch_unwind(|| Ident
::new("'a#", Span
::call_site()));
81 let message
= box_any
.downcast_ref
::<String
>().unwrap();
82 let expected1
= r
#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0
83 let expected2
= r
#""'a#" is not a valid Ident"#; // 1.53.0 ..
85 message
== expected1
|| message
== expected2
,
86 "panic message does not match expected string\n\
87 \x20 panic message: `{:?}`\n\
88 \x20expected message: `{:?}`",
93 Ok(_
) => panic
!("test did not panic as expected"),
99 assert_eq
!(Literal
::string("foo").to_string(), "\"foo\"");
100 assert_eq
!(Literal
::string("\"").to_string(), "\"\\\"\"");
101 assert_eq
!(Literal
::string("didn't").to_string(), "\"didn't\"");
105 fn literal_raw_string() {
106 "r\"\r\n\"".parse
::<TokenStream
>().unwrap();
110 fn literal_character() {
111 assert_eq
!(Literal
::character('x'
).to_string(), "'x'");
112 assert_eq
!(Literal
::character('
\''
).to_string(), "'\\''");
113 assert_eq
!(Literal
::character('
"').to_string(), "'
\"'
");
118 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
122 fn literal_suffix() {
123 fn token_count(p: &str) -> usize {
124 p.parse::<TokenStream>().unwrap().into_iter().count()
127 assert_eq!(token_count("999u256"), 1);
128 assert_eq!(token_count("999r
#u256"), 3);
129 assert_eq!(token_count("1."), 1);
130 assert_eq!(token_count("1.f32"), 3);
131 assert_eq!(token_count("1.0_0"), 1);
132 assert_eq!(token_count("1._0"), 3);
133 assert_eq!(token_count("1._m"), 3);
134 assert_eq!(token_count("\"\"s"), 1);
135 assert_eq!(token_count("r\"\"r"), 1);
136 assert_eq!(token_count("b\"\"b"), 1);
137 assert_eq!(token_count("br\"\"br"), 1);
138 assert_eq!(token_count("r#\"\"#r"), 1);
139 assert_eq!(token_count("'c'c"), 1);
140 assert_eq!(token_count("b'b'b"), 1);
141 assert_eq!(token_count("0E"), 1);
142 assert_eq!(token_count("0o0A"), 1);
143 assert_eq!(token_count("0E--0"), 4);
144 assert_eq!(token_count("0.0ECMA"), 1);
148 fn literal_iter_negative() {
149 let negative_literal = Literal::i32_suffixed(-3);
150 let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
151 let mut iter = tokens.into_iter();
152 match iter.next().unwrap() {
153 TokenTree::Punct(punct) => {
154 assert_eq!(punct.as_char(), '-');
155 assert_eq!(punct.spacing(), Spacing::Alone);
157 unexpected => panic!("unexpected token {:?}", unexpected),
159 match iter.next().unwrap() {
160 TokenTree::Literal(literal) => {
161 assert_eq!(literal.to_string(), "3i32");
163 unexpected => panic!("unexpected token {:?}", unexpected),
165 assert!(iter.next().is_none());
170 assert!("1".parse::<Literal>().is_ok());
171 assert!("-1".parse::<Literal>().is_ok());
172 assert!("-1u12".parse::<Literal>().is_ok());
173 assert!("1.0".parse::<Literal>().is_ok());
174 assert!("-1.0".parse::<Literal>().is_ok());
175 assert!("-1.0f12".parse::<Literal>().is_ok());
176 assert!("'a'".parse::<Literal>().is_ok());
177 assert!("\"\n\"".parse::<Literal>().is_ok());
178 assert!("0 1".parse::<Literal>().is_err());
179 assert!(" 0".parse::<Literal>().is_err());
180 assert!("0 ".parse::<Literal>().is_err());
181 assert!("/* comment */0".parse::<Literal>().is_err());
182 assert!("0/* comment */".parse::<Literal>().is_err());
183 assert!("0// comment".parse::<Literal>().is_err());
184 assert
!("- 1".parse
::<Literal
>().is_err());
185 assert
!("- 1.0".parse
::<Literal
>().is_err());
186 assert
!("-\"\"".parse
::<Literal
>().is_err());
191 fn roundtrip(p
: &str) {
192 println
!("parse: {}", p
);
193 let s
= p
.parse
::<TokenStream
>().unwrap().to_string();
194 println
!("first: {}", s
);
195 let s2
= s
.parse
::<TokenStream
>().unwrap().to_string();
215 0xffffffffffffffffffffffffffffffff
223 roundtrip("'static");
224 roundtrip("'\\u{10__FFFF}'");
225 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
231 if let Ok(s
) = p
.parse
::<TokenStream
>() {
232 panic
!("should have failed to parse: {}\n{:#?}", p
, s
);
238 fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
239 fail("\"\\u{999999}\""); // outside of valid range of char
240 fail("\"\\u{_0}\""); // leading underscore
241 fail("\"\\u{}\""); // empty
242 fail("b\"\r\""); // bare carriage return in byte string
243 fail("r\"\r\""); // bare carriage return in raw string
244 fail("\"\\\r \""); // backslash carriage return
247 fail("\"\\\n\u{85}\r\"");
250 #[cfg(span_locations)]
255 /// This is a document comment
262 (1, 0, 1, 30), // [ ... ]
263 (1, 0, 1, 30), // doc
265 (1, 0, 1, 30), // "This is..."
266 (2, 0, 2, 7), // testing
267 (2, 8, 2, 11), // 123
268 (3, 0, 5, 1), // { ... }
269 (4, 2, 4, 9), // testing
270 (4, 10, 4, 13), // 234
275 #[cfg(procmacro2_semver_exempt)]
279 let start
= Span
::call_site().start();
280 assert_eq
!(start
.line
, 1);
281 assert_eq
!(start
.column
, 0);
282 let end
= Span
::call_site().end();
283 assert_eq
!(end
.line
, 1);
284 assert_eq
!(end
.column
, 0);
285 let source_file
= Span
::call_site().source_file();
286 assert_eq
!(source_file
.path().to_string_lossy(), "<unspecified>");
287 assert
!(!source_file
.is_real());
290 #[cfg(procmacro2_semver_exempt)]
293 let source1
= "aaa\nbbb"
294 .parse
::<TokenStream
>()
297 .collect
::<Vec
<_
>>();
298 let source2
= "ccc\nddd"
299 .parse
::<TokenStream
>()
302 .collect
::<Vec
<_
>>();
304 assert
!(source1
[0].span().source_file() != source2
[0].span().source_file());
306 source1
[0].span().source_file(),
307 source1
[1].span().source_file()
310 let joined1
= source1
[0].span().join(source1
[1].span());
311 let joined2
= source1
[0].span().join(source2
[0].span());
312 assert
!(joined1
.is_some());
313 assert
!(joined2
.is_none());
315 let start
= joined1
.unwrap().start();
316 let end
= joined1
.unwrap().end();
317 assert_eq
!(start
.line
, 1);
318 assert_eq
!(start
.column
, 0);
319 assert_eq
!(end
.line
, 2);
320 assert_eq
!(end
.column
, 3);
323 joined1
.unwrap().source_file(),
324 source1
[0].span().source_file()
330 let s
= str::from_utf8(b
"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
331 assert
!(s
.parse
::<TokenStream
>().is_err());
335 fn punct_before_comment() {
336 let mut tts
= TokenStream
::from_str("~// comment").unwrap().into_iter();
337 match tts
.next().unwrap() {
338 TokenTree
::Punct(tt
) => {
339 assert_eq
!(tt
.as_char(), '
~'
);
340 assert_eq
!(tt
.spacing(), Spacing
::Alone
);
342 wrong
=> panic
!("wrong token {:?}", wrong
),
347 fn joint_last_token() {
348 // This test verifies that we match the behavior of libproc_macro *not* in
349 // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
350 // behavior was temporarily broken.
351 // See https://github.com/rust-lang/rust/issues/76399
353 let joint_punct
= Punct
::new('
:'
, Spacing
::Joint
);
354 let stream
= TokenStream
::from(TokenTree
::Punct(joint_punct
));
355 let punct
= match stream
.into_iter().next().unwrap() {
356 TokenTree
::Punct(punct
) => punct
,
359 assert_eq
!(punct
.spacing(), Spacing
::Joint
);
363 fn raw_identifier() {
364 let mut tts
= TokenStream
::from_str("r#dyn").unwrap().into_iter();
365 match tts
.next().unwrap() {
366 TokenTree
::Ident(raw
) => assert_eq
!("r#dyn", raw
.to_string()),
367 wrong
=> panic
!("wrong token {:?}", wrong
),
369 assert
!(tts
.next().is_none());
373 fn test_debug_ident() {
374 let ident
= Ident
::new("proc_macro", Span
::call_site());
376 #[cfg(not(span_locations))]
377 let expected
= "Ident(proc_macro)";
379 #[cfg(span_locations)]
380 let expected
= "Ident { sym: proc_macro }";
382 assert_eq
!(expected
, format
!("{:?}", ident
));
386 fn test_debug_tokenstream() {
387 let tts
= TokenStream
::from_str("[a + 1]").unwrap();
389 #[cfg(not(span_locations))]
394 stream: TokenStream [
410 #[cfg(not(span_locations))]
411 let expected_before_trailing_commas
= "\
415 stream: TokenStream [
431 #[cfg(span_locations)]
436 stream: TokenStream [
456 #[cfg(span_locations)]
457 let expected_before_trailing_commas
= "\
461 stream: TokenStream [
481 let actual
= format
!("{:#?}", tts
);
482 if actual
.ends_with(",\n]") {
483 assert_eq
!(expected
, actual
);
485 assert_eq
!(expected_before_trailing_commas
, actual
);
490 fn default_tokenstream_is_empty() {
491 let default_token_stream
= <TokenStream
as Default
>::default();
493 assert
!(default_token_stream
.is_empty());
497 fn tuple_indexing() {
498 // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
499 let mut tokens
= "tuple.0.0".parse
::<TokenStream
>().unwrap().into_iter();
500 assert_eq
!("tuple", tokens
.next().unwrap().to_string());
501 assert_eq
!(".", tokens
.next().unwrap().to_string());
502 assert_eq
!("0.0", tokens
.next().unwrap().to_string());
503 assert
!(tokens
.next().is_none());
506 #[cfg(span_locations)]
508 fn non_ascii_tokens() {
509 check_spans("// abc", &[]);
510 check_spans("// ábc", &[]);
511 check_spans("// abc x", &[]);
512 check_spans("// ábc x", &[]);
513 check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
514 check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
515 check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
516 check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
517 check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
518 check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
519 check_spans(r
#""abc""#, &[(1, 0, 1, 5)]);
520 check_spans(r
#""ábc""#, &[(1, 0, 1, 5)]);
521 check_spans(r
###"r#"abc"#"###, &[(1, 0, 1, 8)]);
522 check_spans(r
###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
523 check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
524 check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
525 check_spans("'a'", &[(1, 0, 1, 3)]);
526 check_spans("'á'", &[(1, 0, 1, 3)]);
527 check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
528 check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
529 check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
530 check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
531 check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
532 check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
533 check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
534 check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
535 check_spans("abc", &[(1, 0, 1, 3)]);
536 check_spans("ábc", &[(1, 0, 1, 3)]);
537 check_spans("ábć", &[(1, 0, 1, 3)]);
538 check_spans("abc// foo", &[(1, 0, 1, 3)]);
539 check_spans("ábc// foo", &[(1, 0, 1, 3)]);
540 check_spans("ábć// foo", &[(1, 0, 1, 3)]);
541 check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
542 check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
545 #[cfg(span_locations)]
546 fn check_spans(p
: &str, mut lines
: &[(usize, usize, usize, usize)]) {
547 let ts
= p
.parse
::<TokenStream
>().unwrap();
548 check_spans_internal(ts
, &mut lines
);
549 assert
!(lines
.is_empty(), "leftover ranges: {:?}", lines
);
552 #[cfg(span_locations)]
553 fn check_spans_internal(ts
: TokenStream
, lines
: &mut &[(usize, usize, usize, usize)]) {
555 if let Some((&(sline
, scol
, eline
, ecol
), rest
)) = lines
.split_first() {
558 let start
= i
.span().start();
559 assert_eq
!(start
.line
, sline
, "sline did not match for {}", i
);
560 assert_eq
!(start
.column
, scol
, "scol did not match for {}", i
);
562 let end
= i
.span().end();
563 assert_eq
!(end
.line
, eline
, "eline did not match for {}", i
);
564 assert_eq
!(end
.column
, ecol
, "ecol did not match for {}", i
);
566 if let TokenTree
::Group(g
) = i
{
567 check_spans_internal(g
.stream().clone(), lines
);