13 use crate::lexer
::UnmatchedDelim
;
14 pub use attr_wrapper
::AttrWrapper
;
15 pub use diagnostics
::AttemptLocalParseRecovery
;
16 pub(crate) use item
::FnParseMode
;
17 pub use pat
::{CommaRecoveryMode, RecoverColon, RecoverComma}
;
18 pub use path
::PathStyle
;
20 use rustc_ast
::ptr
::P
;
21 use rustc_ast
::token
::{self, Delimiter, Nonterminal, Token, TokenKind}
;
22 use rustc_ast
::tokenstream
::{AttributesData, DelimSpan, Spacing}
;
23 use rustc_ast
::tokenstream
::{TokenStream, TokenTree, TokenTreeCursor}
;
24 use rustc_ast
::util
::case
::Case
;
25 use rustc_ast
::AttrId
;
26 use rustc_ast
::DUMMY_NODE_ID
;
27 use rustc_ast
::{self as ast, AnonConst, AttrStyle, Const, DelimArgs, Extern}
;
28 use rustc_ast
::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, MacDelimiter, Mutability, StrLit}
;
29 use rustc_ast
::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind}
;
30 use rustc_ast_pretty
::pprust
;
31 use rustc_data_structures
::fx
::FxHashMap
;
32 use rustc_data_structures
::sync
::Ordering
;
33 use rustc_errors
::PResult
;
35 Applicability
, DiagnosticBuilder
, ErrorGuaranteed
, FatalError
, IntoDiagnostic
, MultiSpan
,
37 use rustc_session
::parse
::ParseSess
;
38 use rustc_span
::source_map
::{Span, DUMMY_SP}
;
39 use rustc_span
::symbol
::{kw, sym, Ident, Symbol}
;
41 use std
::{cmp, mem, slice}
;
42 use thin_vec
::ThinVec
;
46 IncorrectVisibilityRestriction
, MismatchedClosingDelimiter
, NonStringAbiLiteral
,
50 struct Restrictions
: u8 {
51 const STMT_EXPR
= 1 << 0;
52 const NO_STRUCT_LITERAL
= 1 << 1;
53 const CONST_EXPR
= 1 << 2;
54 const ALLOW_LET
= 1 << 3;
58 #[derive(Clone, Copy, PartialEq, Debug)]
65 #[derive(Clone, Copy, PartialEq, Debug)]
71 /// Whether or not we should force collection of tokens for an AST node,
72 /// regardless of whether or not it has attributes
73 #[derive(Clone, Copy, PartialEq)]
74 pub enum ForceCollect
{
79 #[derive(Debug, Eq, PartialEq)]
80 pub enum TrailingToken
{
84 /// If the trailing token is a comma, then capture it
85 /// Otherwise, ignore the trailing token
89 /// Like `maybe_whole_expr`, but for things other than expressions.
91 macro_rules
! maybe_whole
{
92 ($p
:expr
, $constructor
:ident
, |$x
:ident
| $e
:expr
) => {
93 if let token
::Interpolated(nt
) = &$p
.token
.kind
{
94 if let token
::$
constructor(x
) = &**nt
{
103 /// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
105 macro_rules
! maybe_recover_from_interpolated_ty_qpath
{
106 ($
self: expr
, $allow_qpath_recovery
: expr
) => {
107 if $allow_qpath_recovery
108 && $
self.may_recover()
109 && $
self.look_ahead(1, |t
| t
== &token
::ModSep
)
110 && let token
::Interpolated(nt
) = &$
self.token
.kind
111 && let token
::NtTy(ty
) = &**nt
115 return $
self.maybe_recover_from_bad_qpath_stage_2($
self.prev_token
.span
, ty
);
120 #[derive(Clone, Copy)]
127 pub struct Parser
<'a
> {
128 pub sess
: &'a ParseSess
,
129 /// The current token.
131 /// The spacing for the current token
132 pub token_spacing
: Spacing
,
133 /// The previous token.
134 pub prev_token
: Token
,
135 pub capture_cfg
: bool
,
136 restrictions
: Restrictions
,
137 expected_tokens
: Vec
<TokenType
>,
138 // Important: This must only be advanced from `bump` to ensure that
139 // `token_cursor.num_next_calls` is updated properly.
140 token_cursor
: TokenCursor
,
141 desugar_doc_comments
: bool
,
142 /// This field is used to keep track of how many left angle brackets we have seen. This is
143 /// required in order to detect extra leading left angle brackets (`<` characters) and error
146 /// See the comments in the `parse_path_segment` function for more details.
147 unmatched_angle_bracket_count
: u32,
148 max_angle_bracket_count
: u32,
150 last_unexpected_token_span
: Option
<Span
>,
151 /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
152 /// looked like it could have been a mistyped path or literal `Option:Some(42)`).
153 pub last_type_ascription
: Option
<(Span
, bool
/* likely path typo */)>,
154 /// If present, this `Parser` is not parsing Rust code but rather a macro call.
155 subparser_name
: Option
<&'
static str>,
156 capture_state
: CaptureState
,
157 /// This allows us to recover when the user forget to add braces around
158 /// multiple statements in the closure body.
159 pub current_closure
: Option
<ClosureSpans
>,
160 /// Whether the parser is allowed to do recovery.
161 /// This is disabled when parsing macro arguments, see #103534
162 pub recovery
: Recovery
,
165 // This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
166 // it doesn't unintentionally get bigger.
167 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
168 rustc_data_structures
::static_assert_size
!(Parser
<'_
>, 288);
170 /// Stores span information about a closure.
172 pub struct ClosureSpans
{
173 pub whole_closure
: Span
,
174 pub closing_pipe
: Span
,
178 /// Indicates a range of tokens that should be replaced by
179 /// the tokens in the provided vector. This is used in two
180 /// places during token collection:
182 /// 1. During the parsing of an AST node that may have a `#[derive]`
183 /// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
184 /// In this case, we use a `ReplaceRange` to replace the entire inner AST node
185 /// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
186 /// on an `AttrTokenStream`.
188 /// 2. When we parse an inner attribute while collecting tokens. We
189 /// remove inner attributes from the token stream entirely, and
190 /// instead track them through the `attrs` field on the AST node.
191 /// This allows us to easily manipulate them (for example, removing
192 /// the first macro inner attribute to invoke a proc-macro).
193 /// When create a `TokenStream`, the inner attributes get inserted
194 /// into the proper place in the token stream.
195 pub type ReplaceRange
= (Range
<u32>, Vec
<(FlatToken
, Spacing
)>);
197 /// Controls how we capture tokens. Capturing can be expensive,
198 /// so we try to avoid performing capturing in cases where
199 /// we will never need an `AttrTokenStream`.
200 #[derive(Copy, Clone)]
202 /// We aren't performing any capturing - this is the default mode.
204 /// We are capturing tokens
209 struct CaptureState
{
210 capturing
: Capturing
,
211 replace_ranges
: Vec
<ReplaceRange
>,
212 inner_attr_ranges
: FxHashMap
<AttrId
, ReplaceRange
>,
215 /// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
216 /// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
217 /// use this type to emit them as a linear sequence. But a linear sequence is
218 /// what the parser expects, for the most part.
221 // Cursor for the current (innermost) token stream. The delimiters for this
222 // token stream are found in `self.stack.last()`; when that is `None` then
223 // we are in the outermost token stream which never has delimiters.
224 tree_cursor
: TokenTreeCursor
,
226 // Token streams surrounding the current one. The delimiters for stack[n]'s
227 // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
228 // because it's the outermost token stream which never has delimiters.
229 stack
: Vec
<(TokenTreeCursor
, Delimiter
, DelimSpan
)>,
231 desugar_doc_comments
: bool
,
233 // Counts the number of calls to `{,inlined_}next`.
234 num_next_calls
: usize,
236 // During parsing, we may sometimes need to 'unglue' a
237 // glued token into two component tokens
238 // (e.g. '>>' into '>' and '>), so that the parser
239 // can consume them one at a time. This process
240 // bypasses the normal capturing mechanism
241 // (e.g. `num_next_calls` will not be incremented),
242 // since the 'unglued' tokens due not exist in
243 // the original `TokenStream`.
245 // If we end up consuming both unglued tokens,
246 // then this is not an issue - we'll end up
247 // capturing the single 'glued' token.
249 // However, in certain circumstances, we may
250 // want to capture just the first 'unglued' token.
251 // For example, capturing the `Vec<u8>`
252 // in `Option<Vec<u8>>` requires us to unglue
253 // the trailing `>>` token. The `break_last_token`
254 // field is used to track this token - it gets
255 // appended to the captured stream when
256 // we evaluate a `LazyAttrTokenStream`.
257 break_last_token
: bool
,
261 fn next(&mut self, desugar_doc_comments
: bool
) -> (Token
, Spacing
) {
262 self.inlined_next(desugar_doc_comments
)
265 /// This always-inlined version should only be used on hot code paths.
267 fn inlined_next(&mut self, desugar_doc_comments
: bool
) -> (Token
, Spacing
) {
269 // FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
270 // need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
272 if let Some(tree
) = self.tree_cursor
.next_ref() {
274 &TokenTree
::Token(ref token
, spacing
) => match (desugar_doc_comments
, token
) {
275 (true, &Token { kind: token::DocComment(_, attr_style, data), span }
) => {
276 let desugared
= self.desugar(attr_style
, data
, span
);
277 self.tree_cursor
.replace_prev_and_rewind(desugared
);
278 // Continue to get the first token of the desugared doc comment.
281 debug_assert
!(!matches
!(
283 token
::OpenDelim(_
) | token
::CloseDelim(_
)
285 return (token
.clone(), spacing
);
288 &TokenTree
::Delimited(sp
, delim
, ref tts
) => {
289 let trees
= tts
.clone().into_trees();
290 self.stack
.push((mem
::replace(&mut self.tree_cursor
, trees
), delim
, sp
));
291 if delim
!= Delimiter
::Invisible
{
292 return (Token
::new(token
::OpenDelim(delim
), sp
.open
), Spacing
::Alone
);
294 // No open delimiter to return; continue on to the next iteration.
297 } else if let Some((tree_cursor
, delim
, span
)) = self.stack
.pop() {
298 // We have exhausted this token stream. Move back to its parent token stream.
299 self.tree_cursor
= tree_cursor
;
300 if delim
!= Delimiter
::Invisible
{
301 return (Token
::new(token
::CloseDelim(delim
), span
.close
), Spacing
::Alone
);
303 // No close delimiter to return; continue on to the next iteration.
305 // We have exhausted the outermost token stream.
306 return (Token
::new(token
::Eof
, DUMMY_SP
), Spacing
::Alone
);
311 // Desugar a doc comment into something like `#[doc = r"foo"]`.
312 fn desugar(&mut self, attr_style
: AttrStyle
, data
: Symbol
, span
: Span
) -> Vec
<TokenTree
> {
313 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
314 // required to wrap the text. E.g.
315 // - `abc d` is wrapped as `r"abc d"` (num_of_hashes = 0)
316 // - `abc "d"` is wrapped as `r#"abc "d""#` (num_of_hashes = 1)
317 // - `abc "##d##"` is wrapped as `r###"abc ##"d"##"###` (num_of_hashes = 3)
318 let mut num_of_hashes
= 0;
320 for ch
in data
.as_str().chars() {
323 '#' if count > 0 => count + 1,
326 num_of_hashes = cmp::max(num_of_hashes, count);
329 // `/// foo` becomes `doc = r"foo
"`.
330 let delim_span = DelimSpan::from_single(span);
331 let body = TokenTree::Delimited(
335 TokenTree::token_alone(token::Ident(sym::doc, false), span),
336 TokenTree::token_alone(token::Eq, span),
337 TokenTree::token_alone(
338 TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
343 .collect::<TokenStream>(),
346 if attr_style == AttrStyle::Inner {
348 TokenTree::token_alone(token::Pound, span),
349 TokenTree::token_alone(token::Not, span),
353 vec![TokenTree::token_alone(token::Pound, span), body]
358 #[derive(Debug, Clone, PartialEq)]
371 fn to_string(&self) -> String {
373 TokenType::Token(t) => format!("`{}`
", pprust::token_kind_to_string(t)),
374 TokenType::Keyword(kw) => format!("`{}`
", kw),
375 TokenType::Operator => "an operator
".to_string(),
376 TokenType::Lifetime => "lifetime
".to_string(),
377 TokenType::Ident => "identifier
".to_string(),
378 TokenType::Path => "path
".to_string(),
379 TokenType::Type => "type".to_string(),
380 TokenType::Const => "a
const expression
".to_string(),
385 #[derive(Copy, Clone, Debug)]
386 enum TokenExpectType {
391 /// A sequence separator.
393 /// The separator token.
394 sep: Option<TokenKind>,
395 /// `true` if a trailing separator is allowed.
396 trailing_sep_allowed: bool,
400 fn trailing_allowed(t: TokenKind) -> SeqSep {
401 SeqSep { sep: Some(t), trailing_sep_allowed: true }
404 fn none() -> SeqSep {
405 SeqSep { sep: None, trailing_sep_allowed: false }
409 pub enum FollowedByType {
414 #[derive(Clone, Copy, PartialEq, Eq)]
415 pub enum TokenDescription {
422 impl TokenDescription {
423 pub fn from_token(token: &Token) -> Option<Self> {
425 _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
426 _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
427 _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
428 token::DocComment(..) => Some(TokenDescription::DocComment),
434 pub(super) fn token_descr(token: &Token) -> String {
435 let name = pprust::token_to_string(token).to_string();
437 let kind = TokenDescription::from_token(token).map(|kind| match kind {
438 TokenDescription::ReservedIdentifier => "reserved identifier
",
439 TokenDescription::Keyword => "keyword
",
440 TokenDescription::ReservedKeyword => "reserved keyword
",
441 TokenDescription::DocComment => "doc comment
",
444 if let Some(kind) = kind { format!("{} `{}`", kind
, name
) } else { format!("`{}`
", name) }
447 impl<'a> Parser<'a> {
451 desugar_doc_comments: bool,
452 subparser_name: Option<&'static str>,
454 let mut parser = Parser {
456 token: Token::dummy(),
457 token_spacing: Spacing::Alone,
458 prev_token: Token::dummy(),
460 restrictions: Restrictions::empty(),
461 expected_tokens: Vec::new(),
462 token_cursor: TokenCursor {
463 tree_cursor: tokens.into_trees(),
466 desugar_doc_comments,
467 break_last_token: false,
469 desugar_doc_comments,
470 unmatched_angle_bracket_count: 0,
471 max_angle_bracket_count: 0,
472 last_unexpected_token_span: None,
473 last_type_ascription: None,
475 capture_state: CaptureState {
476 capturing: Capturing::No,
477 replace_ranges: Vec::new(),
478 inner_attr_ranges: Default::default(),
480 current_closure: None,
481 recovery: Recovery::Allowed,
484 // Make parser point to the first token.
490 pub fn recovery(mut self, recovery: Recovery) -> Self {
491 self.recovery = recovery;
495 /// Whether the parser is allowed to recover from broken code.
497 /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
498 /// is not allowed. All recovery done by the parser must be gated behind this check.
500 /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
501 /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
502 fn may_recover(&self) -> bool {
503 matches!(self.recovery, Recovery::Allowed)
506 pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
507 match self.expect_one_of(&[], &[]) {
509 // We can get `Ok(true)` from `recover_closing_delimiter`
510 // which is called in `expected_one_of_not_found`.
511 Ok(_) => FatalError.raise(),
515 /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
516 pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
517 if self.expected_tokens.is_empty() {
518 if self.token == *t {
522 self.unexpected_try_recover(t)
525 self.expect_one_of(slice::from_ref(t), &[])
529 /// Expect next token to be edible or inedible token. If edible,
530 /// then consume it; if inedible, then return without consuming
531 /// anything. Signal a fatal error if next token is unexpected.
532 pub fn expect_one_of(
534 edible: &[TokenKind],
535 inedible: &[TokenKind],
536 ) -> PResult<'a, bool /* recovered */> {
537 if edible.contains(&self.token.kind) {
540 } else if inedible.contains(&self.token.kind) {
541 // leave it in the input
543 } else if self.last_unexpected_token_span == Some(self.token.span) {
546 self.expected_one_of_not_found(edible, inedible)
550 // Public for rustfmt usage.
551 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
552 self.parse_ident_common(true)
555 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
556 let (ident, is_raw) = self.ident_or_err(recover)?;
558 if !is_raw && ident.is_reserved() {
559 let mut err = self.expected_ident_found_err();
570 fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> {
571 let result = self.token.ident().ok_or_else(|| self.expected_ident_found(recover));
573 let (ident, is_raw) = match result {
575 Err(err) => match err {
578 Err(err) => return Err(err),
585 /// Checks if the next token is `tok`, and returns `true` if so.
587 /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
589 fn check(&mut self, tok: &TokenKind) -> bool {
590 let is_present = self.token == *tok;
592 self.expected_tokens.push(TokenType::Token(tok.clone()));
597 fn check_noexpect(&self, tok: &TokenKind) -> bool {
601 /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
603 /// the main purpose of this function is to reduce the cluttering of the suggestions list
604 /// which using the normal eat method could introduce in some cases.
605 pub fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
606 let is_present = self.check_noexpect(tok);
613 /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
614 pub fn eat(&mut self, tok: &TokenKind) -> bool {
615 let is_present = self.check(tok);
622 /// If the next token is the given keyword, returns `true` without eating it.
623 /// An expectation is also added for diagnostics purposes.
624 fn check_keyword(&mut self, kw: Symbol) -> bool {
625 self.expected_tokens.push(TokenType::Keyword(kw));
626 self.token.is_keyword(kw)
629 fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
630 if self.check_keyword(kw) {
634 if case == Case::Insensitive
635 && let Some((ident, /* is_raw */ false)) = self.token.ident()
636 && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
643 /// If the next token is the given keyword, eats it and returns `true`.
644 /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
645 // Public for rustfmt usage.
646 pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
647 if self.check_keyword(kw) {
655 /// Eats a keyword, optionally ignoring the case.
656 /// If the case differs (and is ignored) an error is issued.
657 /// This is useful for recovery.
658 fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
659 if self.eat_keyword(kw) {
663 if case == Case::Insensitive
664 && let Some((ident, /* is_raw */ false)) = self.token.ident()
665 && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
667 .struct_span_err(ident.span, format!("keyword `{kw}` is written
in a wrong case
"))
670 "write it
in the correct case
",
672 Applicability::MachineApplicable
682 fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
683 if self.token.is_keyword(kw) {
691 /// If the given word is not a keyword, signals an error.
692 /// If the next token is not the given word, signals an error.
693 /// Otherwise, eats it.
694 fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
695 if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
698 /// Is the given keyword `kw` followed by a non-reserved identifier?
699 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
700 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
703 fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
707 self.expected_tokens.push(typ);
712 fn check_ident(&mut self) -> bool {
713 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
716 fn check_path(&mut self) -> bool {
717 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
720 fn check_type(&mut self) -> bool {
721 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
724 fn check_const_arg(&mut self) -> bool {
725 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
728 fn check_const_closure(&self) -> bool {
729 self.is_keyword_ahead(0, &[kw::Const])
730 && self.look_ahead(1, |t| match &t.kind {
731 // async closures do not work with const closures, so we do not parse that here.
732 token::Ident(kw::Move | kw::Static, _) | token::OrOr | token::BinOp(token::Or) => {
739 fn check_inline_const(&self, dist: usize) -> bool {
740 self.is_keyword_ahead(dist, &[kw::Const])
741 && self.look_ahead(dist + 1, |t| match &t.kind {
742 token::Interpolated(nt) => matches!(**nt, token::NtBlock(..)),
743 token::OpenDelim(Delimiter::Brace) => true,
748 /// Checks to see if the next token is either `+` or `+=`.
749 /// Otherwise returns `false`.
750 fn check_plus(&mut self) -> bool {
751 self.check_or_expected(
752 self.token.is_like_plus(),
753 TokenType::Token(token::BinOp(token::Plus)),
757 /// Eats the expected token if it's present possibly breaking
758 /// compound tokens like multi-character operators in process.
759 /// Returns `true` if the token was eaten.
760 fn break_and_eat(&mut self, expected: TokenKind) -> bool {
761 if self.token.kind == expected {
765 match self.token.kind.break_two_token_op() {
766 Some((first, second)) if first == expected => {
767 let first_span = self.sess.source_map().start_point(self.token.span);
768 let second_span = self.token.span.with_lo(first_span.hi());
769 self.token = Token::new(first, first_span);
770 // Keep track of this token - if we end token capturing now,
771 // we'll want to append this token to the captured stream.
773 // If we consume any additional tokens, then this token
774 // is not needed (we'll capture the entire 'glued' token),
775 // and `bump` will set this field to `None`
776 self.token_cursor.break_last_token = true;
777 // Use the spacing of the glued token as the spacing
778 // of the unglued second token.
779 self.bump_with((Token::new(second, second_span), self.token_spacing));
783 self.expected_tokens.push(TokenType::Token(expected));
789 /// Eats `+` possibly breaking tokens like `+=` in process.
790 fn eat_plus(&mut self) -> bool {
791 self.break_and_eat(token::BinOp(token::Plus))
794 /// Eats `&` possibly breaking tokens like `&&` in process.
795 /// Signals an error if `&` is not eaten.
796 fn expect_and(&mut self) -> PResult<'a, ()> {
797 if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
800 /// Eats `|` possibly breaking tokens like `||` in process.
801 /// Signals an error if `|` was not eaten.
802 fn expect_or(&mut self) -> PResult<'a, ()> {
803 if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
806 /// Eats `<` possibly breaking tokens like `<<` in process.
807 fn eat_lt(&mut self) -> bool {
808 let ate = self.break_and_eat(token::Lt);
810 // See doc comment for `unmatched_angle_bracket_count`.
811 self.unmatched_angle_bracket_count += 1;
812 self.max_angle_bracket_count += 1;
813 debug!("eat_lt
: (increment
) count
={:?}
", self.unmatched_angle_bracket_count);
818 /// Eats `<` possibly breaking tokens like `<<` in process.
819 /// Signals an error if `<` was not eaten.
820 fn expect_lt(&mut self) -> PResult<'a, ()> {
821 if self.eat_lt() { Ok(()) } else { self.unexpected() }
824 /// Eats `>` possibly breaking tokens like `>>` in process.
825 /// Signals an error if `>` was not eaten.
826 fn expect_gt(&mut self) -> PResult<'a, ()> {
827 if self.break_and_eat(token::Gt) {
828 // See doc comment for `unmatched_angle_bracket_count`.
829 if self.unmatched_angle_bracket_count > 0 {
830 self.unmatched_angle_bracket_count -= 1;
831 debug!("expect_gt
: (decrement
) count
={:?}
", self.unmatched_angle_bracket_count);
839 fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
840 kets.iter().any(|k| match expect {
841 TokenExpectType::Expect => self.check(k),
842 TokenExpectType::NoExpect => self.token == **k,
846 fn parse_seq_to_before_tokens<T>(
850 expect: TokenExpectType,
851 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
852 ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
853 let mut first = true;
854 let mut recovered = false;
855 let mut trailing = false;
856 let mut v = ThinVec::new();
858 while !self.expect_any_with_type(kets, expect) {
859 if let token::CloseDelim(..) | token::Eof = self.token.kind {
862 if let Some(t) = &sep.sep {
866 match self.expect(t) {
868 self.current_closure.take();
871 self.current_closure.take();
875 Err(mut expect_err) => {
876 let sp = self.prev_token.span.shrink_to_hi();
877 let token_str = pprust::token_kind_to_string(t);
879 match self.current_closure.take() {
880 Some(closure_spans) if self.token.kind == TokenKind::Semi => {
881 // Finding a semicolon instead of a comma
882 // after a closure body indicates that the
883 // closure body may be a block but the user
884 // forgot to put braces around its
887 self.recover_missing_braces_around_closure_body(
896 // Attempt to keep parsing if it was a similar separator.
897 if let Some(tokens) = t.similar_tokens() {
898 if tokens.contains(&self.token.kind) {
905 // If this was a missing `@` in a binding pattern
906 // bail with a suggestion
907 // https://github.com/rust-lang/rust/issues/72373
908 if self.prev_token.is_ident() && self.token.kind == token::DotDot {
910 "if you meant to bind the contents of
\
911 the rest of the array pattern into `{}`
, use `@`
",
912 pprust::token_to_string(&self.prev_token)
915 .span_suggestion_verbose(
916 self.prev_token.span.shrink_to_hi().until(self.token.span),
919 Applicability::MaybeIncorrect,
925 // Attempt to keep parsing if it was an omitted separator.
928 // Parsed successfully, therefore most probably the code only
929 // misses a separator.
931 .span_suggestion_short(
933 &format!("missing `{}`
", token_str),
935 Applicability::MaybeIncorrect,
943 // Parsing failed, therefore it must be something more serious
944 // than just a missing separator.
945 for xx in &e.children {
946 // propagate the help message from sub error 'e' to main error 'expect_err;
947 expect_err.children.push(xx.clone());
959 if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
968 Ok((v, trailing, recovered))
971 fn recover_missing_braces_around_closure_body(
973 closure_spans: ClosureSpans,
974 mut expect_err: DiagnosticBuilder<'_, ErrorGuaranteed>,
975 ) -> PResult<'a, ()> {
976 let initial_semicolon = self.token.span;
978 while self.eat(&TokenKind::Semi) {
980 self.parse_stmt_without_recovery(false, ForceCollect::Yes).unwrap_or_else(|e| {
986 expect_err.set_primary_message(
987 "closure bodies that contain statements must be surrounded by braces
",
990 let preceding_pipe_span = closure_spans.closing_pipe;
991 let following_token_span = self.token.span;
993 let mut first_note = MultiSpan::from(vec![initial_semicolon]);
994 first_note.push_span_label(
996 "this `
;` turns the preceding closure into a statement
",
998 first_note.push_span_label(
1000 "this expression is a statement because of the trailing semicolon
",
1002 expect_err.span_note(first_note, "statement found outside of a block
");
1004 let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1005 second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure
...");
1006 second_note.push_span_label(
1007 following_token_span,
1008 "...but likely you meant the closure to end here
",
1010 expect_err.span_note(second_note, "the closure body may be incorrectly delimited
");
1012 expect_err.set_span(vec![preceding_pipe_span, following_token_span]);
1014 let opening_suggestion_str = " {".to_string();
1015 let closing_suggestion_str = "}".to_string();
1017 expect_err.multipart_suggestion(
1018 "try adding braces
",
1020 (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1021 (following_token_span.shrink_to_lo(), closing_suggestion_str),
1023 Applicability::MaybeIncorrect,
1031 /// Parses a sequence, not including the closing delimiter. The function
1032 /// `f` must consume tokens until reaching the next separator or
1033 /// closing bracket.
1034 fn parse_seq_to_before_end<T>(
1038 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1039 ) -> PResult<'a, (ThinVec<T>, bool, bool)> {
1040 self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
1043 /// Parses a sequence, including the closing delimiter. The function
1044 /// `f` must consume tokens until reaching the next separator or
1045 /// closing bracket.
1046 fn parse_seq_to_end<T>(
1050 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1051 ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
1052 let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
1059 /// Parses a sequence, including the closing delimiter. The function
1060 /// `f` must consume tokens until reaching the next separator or
1061 /// closing bracket.
1062 fn parse_unspanned_seq<T>(
1067 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1068 ) -> PResult<'a, (ThinVec<T>, bool)> {
1070 self.parse_seq_to_end(ket, sep, f)
1073 fn parse_delim_comma_seq<T>(
1076 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1077 ) -> PResult<'a, (ThinVec<T>, bool)> {
1078 self.parse_unspanned_seq(
1079 &token::OpenDelim(delim),
1080 &token::CloseDelim(delim),
1081 SeqSep::trailing_allowed(token::Comma),
1086 fn parse_paren_comma_seq<T>(
1088 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1089 ) -> PResult<'a, (ThinVec<T>, bool)> {
1090 self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
1093 /// Advance the parser by one token using provided token as the next one.
1094 fn bump_with(&mut self, next: (Token, Spacing)) {
1095 self.inlined_bump_with(next)
1098 /// This always-inlined version should only be used on hot code paths.
1100 fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1101 // Update the current and previous tokens.
1102 self.prev_token = mem::replace(&mut self.token, next_token);
1103 self.token_spacing = next_spacing;
1106 self.expected_tokens.clear();
1109 /// Advance the parser by one token.
1110 pub fn bump(&mut self) {
1111 // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
1112 // than `.0`/`.1` access.
1113 let mut next = self.token_cursor.inlined_next(self.desugar_doc_comments);
1114 self.token_cursor.num_next_calls += 1;
1115 // We've retrieved an token from the underlying
1116 // cursor, so we no longer need to worry about
1117 // an unglued token. See `break_and_eat` for more details
1118 self.token_cursor.break_last_token = false;
1119 if next.0.span.is_dummy() {
1120 // Tweak the location for better diagnostics, but keep syntactic context intact.
1121 let fallback_span = self.token.span;
1122 next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1124 debug_assert!(!matches!(
1126 token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
1128 self.inlined_bump_with(next)
1131 /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
1132 /// When `dist == 0` then the current token is looked at.
1133 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1135 return looker(&self.token);
1138 let tree_cursor = &self.token_cursor.tree_cursor;
1139 if let Some(&(_, delim, span)) = self.token_cursor.stack.last()
1140 && delim != Delimiter::Invisible
1142 let all_normal = (0..dist).all(|i| {
1143 let token = tree_cursor.look_ahead(i);
1144 !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
1147 return match tree_cursor.look_ahead(dist - 1) {
1148 Some(tree) => match tree {
1149 TokenTree::Token(token, _) => looker(token),
1150 TokenTree::Delimited(dspan, delim, _) => {
1151 looker(&Token::new(token::OpenDelim(*delim), dspan.open))
1154 None => looker(&Token::new(token::CloseDelim(delim), span.close)),
1159 let mut cursor = self.token_cursor.clone();
1161 let mut token = Token::dummy();
1163 token = cursor.next(/* desugar_doc_comments */ false).0;
1166 token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
1172 return looker(&token);
1175 /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
1176 fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1177 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1180 /// Parses asyncness: `async` or nothing.
1181 fn parse_asyncness(&mut self, case: Case) -> Async {
1182 if self.eat_keyword_case(kw::Async, case) {
1183 let span = self.prev_token.uninterpolated_span();
1184 Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
1190 /// Parses unsafety: `unsafe` or nothing.
1191 fn parse_unsafety(&mut self, case: Case) -> Unsafe {
1192 if self.eat_keyword_case(kw::Unsafe, case) {
1193 Unsafe::Yes(self.prev_token.uninterpolated_span())
1199 /// Parses constness: `const` or nothing.
1200 fn parse_constness(&mut self, case: Case) -> Const {
1201 self.parse_constness_(case, false)
1204 /// Parses constness for closures (case sensitive, feature-gated)
1205 fn parse_closure_constness(&mut self) -> Const {
1206 let constness = self.parse_constness_(Case::Sensitive, true);
1207 if let Const::Yes(span) = constness {
1208 self.sess.gated_spans.gate(sym::const_closures, span);
1213 fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
1214 // Avoid const blocks and const closures to be parsed as const items
1215 if (self.check_const_closure() == is_closure)
1216 && self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
1217 && self.eat_keyword_case(kw::Const, case)
1219 Const::Yes(self.prev_token.uninterpolated_span())
1225 /// Parses inline const expressions.
1226 fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1228 self.sess.gated_spans.gate(sym::inline_const_pat, span);
1230 self.sess.gated_spans.gate(sym::inline_const, span);
1232 self.eat_keyword(kw::Const);
1233 let (attrs, blk) = self.parse_inner_attrs_and_block()?;
1234 let anon_const = AnonConst {
1236 value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1238 let blk_span = anon_const.value.span;
1239 Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
1242 /// Parses mutability (`mut` or nothing).
1243 fn parse_mutability(&mut self) -> Mutability {
1244 if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
1247 /// Possibly parses mutability (`const` or `mut`).
1248 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1249 if self.eat_keyword(kw::Mut) {
1250 Some(Mutability::Mut)
1251 } else if self.eat_keyword(kw::Const) {
1252 Some(Mutability::Not)
1258 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1259 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1261 if let Some(suffix) = suffix {
1262 self.expect_no_tuple_index_suffix(self.token.span, suffix);
1265 Ok(Ident::new(symbol, self.prev_token.span))
1267 self.parse_ident_common(true)
1271 fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
1272 if let Some(args) = self.parse_delim_args_inner() { Ok(P(args)) } else { self.unexpected() }
1275 fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1276 Ok(if let Some(args) = self.parse_delim_args_inner() {
1277 AttrArgs::Delimited(args)
1279 if self.eat(&token::Eq) {
1280 let eq_span = self.prev_token.span;
1281 AttrArgs::Eq(eq_span, AttrArgsEq::Ast(self.parse_expr_force_collect()?))
1288 fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1289 let delimited = self.check(&token::OpenDelim(Delimiter::Parenthesis))
1290 || self.check(&token::OpenDelim(Delimiter::Bracket))
1291 || self.check(&token::OpenDelim(Delimiter::Brace));
1294 // We've confirmed above that there is a delimiter so unwrapping is OK.
1295 let TokenTree::Delimited(dspan, delim, tokens) = self.parse_token_tree() else { unreachable!() };
1297 DelimArgs { dspan, delim: MacDelimiter::from_token(delim).unwrap(), tokens }
1301 fn parse_or_use_outer_attributes(
1303 already_parsed_attrs: Option<AttrWrapper>,
1304 ) -> PResult<'a, AttrWrapper> {
1305 if let Some(attrs) = already_parsed_attrs {
1308 self.parse_outer_attributes()
1312 /// Parses a single token tree from the input.
1313 pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
1314 match self.token.kind {
1315 token::OpenDelim(..) => {
1316 // Grab the tokens within the delimiters.
1317 let tree_cursor = &self.token_cursor.tree_cursor;
1318 let stream = tree_cursor.stream.clone();
1319 let (_, delim, span) = *self.token_cursor.stack.last().unwrap();
1321 // Advance the token cursor through the entire delimited
1322 // sequence. After getting the `OpenDelim` we are *within* the
1323 // delimited sequence, i.e. at depth `d`. After getting the
1324 // matching `CloseDelim` we are *after* the delimited sequence,
1325 // i.e. at depth `d - 1`.
1326 let target_depth = self.token_cursor.stack.len() - 1;
1328 // Advance one token at a time, so `TokenCursor::next()`
1329 // can capture these tokens if necessary.
1331 if self.token_cursor.stack.len() == target_depth {
1332 debug_assert!(matches!(self.token.kind, token::CloseDelim(_)));
1337 // Consume close delimiter
1339 TokenTree::Delimited(span, delim, stream)
1341 token::CloseDelim(_) | token::Eof => unreachable!(),
1344 TokenTree::Token(self.prev_token.clone(), Spacing::Alone)
1349 /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
1350 pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
1351 let mut tts = Vec::new();
1352 while self.token != token::Eof {
1353 tts.push(self.parse_token_tree());
1358 pub fn parse_tokens(&mut self) -> TokenStream {
1359 let mut result = Vec::new();
1361 match self.token.kind {
1362 token::Eof | token::CloseDelim(..) => break,
1363 _ => result.push(self.parse_token_tree()),
1366 TokenStream::new(result)
1369 /// Evaluates the closure with restrictions in place.
1371 /// Afters the closure is evaluated, restrictions are reset.
1372 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1373 let old = self.restrictions;
1374 self.restrictions = res;
1376 self.restrictions = old;
1380 /// Parses `pub` and `pub(in path)` plus shortcuts `pub(crate)` for `pub(in crate)`, `pub(self)`
1381 /// for `pub(in self)` and `pub(super)` for `pub(in super)`.
1382 /// If the following element can't be a tuple (i.e., it's a function definition), then
1383 /// it's not a tuple struct field), and the contents within the parentheses aren't valid,
1384 /// so emit a proper diagnostic.
1385 // Public for rustfmt usage.
1386 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1387 maybe_whole!(self, NtVis, |x| x.into_inner());
1389 if !self.eat_keyword(kw::Pub) {
1390 // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1391 // keyword to grab a span from for inherited visibility; an empty span at the
1392 // beginning of the current token would seem to be the "Schelling span
".
1393 return Ok(Visibility {
1394 span: self.token.span.shrink_to_lo(),
1395 kind: VisibilityKind::Inherited,
1399 let lo = self.prev_token.span;
1401 if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
1402 // We don't `self.bump()` the `(` yet because this might be a struct definition where
1403 // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1404 // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1405 // by the following tokens.
1406 if self.is_keyword_ahead(1, &[kw::In]) {
1407 // Parse `pub(in path)`.
1409 self.bump(); // `in`
1410 let path = self.parse_path(PathStyle::Mod)?; // `path`
1411 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
1412 let vis = VisibilityKind::Restricted {
1414 id: ast::DUMMY_NODE_ID,
1417 return Ok(Visibility {
1418 span: lo.to(self.prev_token.span),
1422 } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
1423 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1425 // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
1427 let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
1428 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
1429 let vis = VisibilityKind::Restricted {
1431 id: ast::DUMMY_NODE_ID,
1434 return Ok(Visibility {
1435 span: lo.to(self.prev_token.span),
1439 } else if let FollowedByType::No = fbt {
1440 // Provide this diagnostic if a type cannot follow;
1441 // in particular, if this is not a tuple struct.
1442 self.recover_incorrect_vis_restriction()?;
1443 // Emit diagnostic, but continue with public visibility.
1447 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1450 /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1451 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1453 let path = self.parse_path(PathStyle::Mod)?;
1454 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
1456 let path_str = pprust::path_to_string(&path);
1457 self.sess.emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1462 /// Parses `extern string_literal?`.
1463 fn parse_extern(&mut self, case: Case) -> Extern {
1464 if self.eat_keyword_case(kw::Extern, case) {
1465 let mut extern_span = self.prev_token.span;
1466 let abi = self.parse_abi();
1467 if let Some(abi) = abi {
1468 extern_span = extern_span.to(abi.span);
1470 Extern::from_abi(abi, extern_span)
1476 /// Parses a string literal as an ABI spec.
1477 fn parse_abi(&mut self) -> Option<StrLit> {
1478 match self.parse_str_lit() {
1479 Ok(str_lit) => Some(str_lit),
1480 Err(Some(lit)) => match lit.kind {
1481 ast::LitKind::Err => None,
1483 self.sess.emit_err(NonStringAbiLiteral { span: lit.span });
1491 pub fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1493 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1494 ) -> PResult<'a, R> {
1495 // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1496 // `ForceCollect::Yes`
1497 self.collect_tokens_trailing_token(
1498 AttrWrapper::empty(),
1500 |this, _attrs| Ok((f(this)?, TrailingToken::None)),
1505 fn is_import_coupler(&mut self) -> bool {
1506 self.check(&token::ModSep)
1507 && self.look_ahead(1, |t| {
1508 *t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star)
1512 pub fn clear_expected_tokens(&mut self) {
1513 self.expected_tokens.clear();
1516 pub fn approx_token_stream_pos(&self) -> usize {
1517 self.token_cursor.num_next_calls
1521 pub(crate) fn make_unclosed_delims_error(
1522 unmatched: UnmatchedDelim,
1524 ) -> Option<DiagnosticBuilder<'_, ErrorGuaranteed>> {
1525 // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
1526 // `unmatched_delims` only for error recovery in the `Parser`.
1527 let found_delim = unmatched.found_delim?;
1528 let mut spans = vec![unmatched.found_span];
1529 if let Some(sp) = unmatched.unclosed_span {
1532 let err = MismatchedClosingDelimiter {
1534 delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
1535 unmatched: unmatched.found_span,
1536 opening_candidate: unmatched.candidate_span,
1537 unclosed: unmatched.unclosed_span,
1539 .into_diagnostic(&sess.span_diagnostic);
1543 pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedDelim>, sess: &ParseSess) {
1544 let _ = sess.reached_eof.fetch_or(
1545 unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none()),
1548 for unmatched in unclosed_delims.drain(..) {
1549 if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
1555 /// A helper struct used when building an `AttrTokenStream` from
1556 /// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
1557 /// are stored as `FlatToken::Token`. A vector of `FlatToken`s
1558 /// is then 'parsed' to build up an `AttrTokenStream` with nested
1559 /// `AttrTokenTree::Delimited` tokens.
1560 #[derive(Debug, Clone)]
1561 pub enum FlatToken {
1562 /// A token - this holds both delimiter (e.g. '{' and '}')
1563 /// and non-delimiter tokens
1565 /// Holds the `AttributesData` for an AST node. The
1566 /// `AttributesData` is inserted directly into the
1567 /// constructed `AttrTokenStream` as
1568 /// an `AttrTokenTree::Attributes`.
1569 AttrTarget(AttributesData),
1570 /// A special 'empty' token that is ignored during the conversion
1571 /// to an `AttrTokenStream`. This is used to simplify the
1572 /// handling of replace ranges.