]> git.proxmox.com Git - rustc.git/blob - vendor/rustc-ap-rustc_parse/src/parser/mod.rs
New upstream version 1.52.1+dfsg1
[rustc.git] / vendor / rustc-ap-rustc_parse / src / parser / mod.rs
1 pub mod attr;
2 mod diagnostics;
3 mod expr;
4 mod generics;
5 mod item;
6 mod nonterminal;
7 mod pat;
8 mod path;
9 mod stmt;
10 mod ty;
11
12 use crate::lexer::UnmatchedBrace;
13 pub use diagnostics::AttemptLocalParseRecovery;
14 use diagnostics::Error;
15 pub use path::PathStyle;
16
17 use rustc_ast::ptr::P;
18 use rustc_ast::token::{self, DelimToken, Token, TokenKind};
19 use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
20 use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
21 use rustc_ast::DUMMY_NODE_ID;
22 use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, HasTokens};
23 use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
24 use rustc_ast::{Visibility, VisibilityKind};
25 use rustc_ast_pretty::pprust;
26 use rustc_data_structures::sync::Lrc;
27 use rustc_errors::PResult;
28 use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError};
29 use rustc_session::parse::ParseSess;
30 use rustc_span::source_map::{Span, DUMMY_SP};
31 use rustc_span::symbol::{kw, sym, Ident, Symbol};
32 use tracing::debug;
33
34 use std::{cmp, mem, slice};
35
36 bitflags::bitflags! {
37 struct Restrictions: u8 {
38 const STMT_EXPR = 1 << 0;
39 const NO_STRUCT_LITERAL = 1 << 1;
40 const CONST_EXPR = 1 << 2;
41 }
42 }
43
44 #[derive(Clone, Copy, PartialEq, Debug)]
45 enum SemiColonMode {
46 Break,
47 Ignore,
48 Comma,
49 }
50
51 #[derive(Clone, Copy, PartialEq, Debug)]
52 enum BlockMode {
53 Break,
54 Ignore,
55 }
56
57 /// Whether or not we should force collection of tokens for an AST node,
58 /// regardless of whether or not it has attributes
59 pub enum ForceCollect {
60 Yes,
61 No,
62 }
63
64 pub enum TrailingToken {
65 None,
66 Semi,
67 }
68
69 /// Like `maybe_whole_expr`, but for things other than expressions.
70 #[macro_export]
71 macro_rules! maybe_whole {
72 ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
73 if let token::Interpolated(nt) = &$p.token.kind {
74 if let token::$constructor(x) = &**nt {
75 let $x = x.clone();
76 $p.bump();
77 return Ok($e);
78 }
79 }
80 };
81 }
82
83 /// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
84 #[macro_export]
85 macro_rules! maybe_recover_from_interpolated_ty_qpath {
86 ($self: expr, $allow_qpath_recovery: expr) => {
87 if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
88 if let token::Interpolated(nt) = &$self.token.kind {
89 if let token::NtTy(ty) = &**nt {
90 let ty = ty.clone();
91 $self.bump();
92 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
93 }
94 }
95 }
96 };
97 }
98
99 #[derive(Clone)]
100 pub struct Parser<'a> {
101 pub sess: &'a ParseSess,
102 /// The current token.
103 pub token: Token,
104 /// The spacing for the current token
105 pub token_spacing: Spacing,
106 /// The previous token.
107 pub prev_token: Token,
108 restrictions: Restrictions,
109 expected_tokens: Vec<TokenType>,
110 // Important: This must only be advanced from `next_tok`
111 // to ensure that `token_cursor.num_next_calls` is updated properly
112 token_cursor: TokenCursor,
113 desugar_doc_comments: bool,
114 /// This field is used to keep track of how many left angle brackets we have seen. This is
115 /// required in order to detect extra leading left angle brackets (`<` characters) and error
116 /// appropriately.
117 ///
118 /// See the comments in the `parse_path_segment` function for more details.
119 unmatched_angle_bracket_count: u32,
120 max_angle_bracket_count: u32,
121 /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
122 /// it gets removed from here. Every entry left at the end gets emitted as an independent
123 /// error.
124 pub(super) unclosed_delims: Vec<UnmatchedBrace>,
125 last_unexpected_token_span: Option<Span>,
126 /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
127 /// looked like it could have been a mistyped path or literal `Option:Some(42)`).
128 pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
129 /// If present, this `Parser` is not parsing Rust code but rather a macro call.
130 subparser_name: Option<&'static str>,
131 }
132
133 impl<'a> Drop for Parser<'a> {
134 fn drop(&mut self) {
135 emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
136 }
137 }
138
139 #[derive(Clone)]
140 struct TokenCursor {
141 frame: TokenCursorFrame,
142 stack: Vec<TokenCursorFrame>,
143 desugar_doc_comments: bool,
144 // Counts the number of calls to `next` or `next_desugared`,
145 // depending on whether `desugar_doc_comments` is set.
146 num_next_calls: usize,
147 // During parsing, we may sometimes need to 'unglue' a
148 // glued token into two component tokens
149 // (e.g. '>>' into '>' and '>), so that the parser
150 // can consume them one at a time. This process
151 // bypasses the normal capturing mechanism
152 // (e.g. `num_next_calls` will not be incremented),
153 // since the 'unglued' tokens due not exist in
154 // the original `TokenStream`.
155 //
156 // If we end up consuming both unglued tokens,
157 // then this is not an issue - we'll end up
158 // capturing the single 'glued' token.
159 //
160 // However, in certain circumstances, we may
161 // want to capture just the first 'unglued' token.
162 // For example, capturing the `Vec<u8>`
163 // in `Option<Vec<u8>>` requires us to unglue
164 // the trailing `>>` token. The `append_unglued_token`
165 // field is used to track this token - it gets
166 // appended to the captured stream when
167 // we evaluate a `LazyTokenStream`
168 append_unglued_token: Option<TreeAndSpacing>,
169 }
170
171 #[derive(Clone)]
172 struct TokenCursorFrame {
173 delim: token::DelimToken,
174 span: DelimSpan,
175 open_delim: bool,
176 tree_cursor: tokenstream::Cursor,
177 close_delim: bool,
178 }
179
180 impl TokenCursorFrame {
181 fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
182 TokenCursorFrame {
183 delim,
184 span,
185 open_delim: delim == token::NoDelim,
186 tree_cursor: tts.into_trees(),
187 close_delim: delim == token::NoDelim,
188 }
189 }
190 }
191
192 impl TokenCursor {
193 fn next(&mut self) -> (Token, Spacing) {
194 loop {
195 let (tree, spacing) = if !self.frame.open_delim {
196 self.frame.open_delim = true;
197 TokenTree::open_tt(self.frame.span, self.frame.delim).into()
198 } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() {
199 tree
200 } else if !self.frame.close_delim {
201 self.frame.close_delim = true;
202 TokenTree::close_tt(self.frame.span, self.frame.delim).into()
203 } else if let Some(frame) = self.stack.pop() {
204 self.frame = frame;
205 continue;
206 } else {
207 (TokenTree::Token(Token::new(token::Eof, DUMMY_SP)), Spacing::Alone)
208 };
209
210 match tree {
211 TokenTree::Token(token) => {
212 return (token, spacing);
213 }
214 TokenTree::Delimited(sp, delim, tts) => {
215 let frame = TokenCursorFrame::new(sp, delim, tts);
216 self.stack.push(mem::replace(&mut self.frame, frame));
217 }
218 }
219 }
220 }
221
222 fn next_desugared(&mut self) -> (Token, Spacing) {
223 let (data, attr_style, sp) = match self.next() {
224 (Token { kind: token::DocComment(_, attr_style, data), span }, _) => {
225 (data, attr_style, span)
226 }
227 tok => return tok,
228 };
229
230 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
231 // required to wrap the text.
232 let mut num_of_hashes = 0;
233 let mut count = 0;
234 for ch in data.as_str().chars() {
235 count = match ch {
236 '"' => 1,
237 '#' if count > 0 => count + 1,
238 _ => 0,
239 };
240 num_of_hashes = cmp::max(num_of_hashes, count);
241 }
242
243 let delim_span = DelimSpan::from_single(sp);
244 let body = TokenTree::Delimited(
245 delim_span,
246 token::Bracket,
247 [
248 TokenTree::token(token::Ident(sym::doc, false), sp),
249 TokenTree::token(token::Eq, sp),
250 TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), sp),
251 ]
252 .iter()
253 .cloned()
254 .collect::<TokenStream>(),
255 );
256
257 self.stack.push(mem::replace(
258 &mut self.frame,
259 TokenCursorFrame::new(
260 delim_span,
261 token::NoDelim,
262 if attr_style == AttrStyle::Inner {
263 [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
264 .iter()
265 .cloned()
266 .collect::<TokenStream>()
267 } else {
268 [TokenTree::token(token::Pound, sp), body]
269 .iter()
270 .cloned()
271 .collect::<TokenStream>()
272 },
273 ),
274 ));
275
276 self.next()
277 }
278 }
279
280 #[derive(Debug, Clone, PartialEq)]
281 enum TokenType {
282 Token(TokenKind),
283 Keyword(Symbol),
284 Operator,
285 Lifetime,
286 Ident,
287 Path,
288 Type,
289 Const,
290 }
291
292 impl TokenType {
293 fn to_string(&self) -> String {
294 match *self {
295 TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
296 TokenType::Keyword(kw) => format!("`{}`", kw),
297 TokenType::Operator => "an operator".to_string(),
298 TokenType::Lifetime => "lifetime".to_string(),
299 TokenType::Ident => "identifier".to_string(),
300 TokenType::Path => "path".to_string(),
301 TokenType::Type => "type".to_string(),
302 TokenType::Const => "a const expression".to_string(),
303 }
304 }
305 }
306
307 #[derive(Copy, Clone, Debug)]
308 enum TokenExpectType {
309 Expect,
310 NoExpect,
311 }
312
313 /// A sequence separator.
314 struct SeqSep {
315 /// The separator token.
316 sep: Option<TokenKind>,
317 /// `true` if a trailing separator is allowed.
318 trailing_sep_allowed: bool,
319 }
320
321 impl SeqSep {
322 fn trailing_allowed(t: TokenKind) -> SeqSep {
323 SeqSep { sep: Some(t), trailing_sep_allowed: true }
324 }
325
326 fn none() -> SeqSep {
327 SeqSep { sep: None, trailing_sep_allowed: false }
328 }
329 }
330
331 pub enum FollowedByType {
332 Yes,
333 No,
334 }
335
336 fn token_descr_opt(token: &Token) -> Option<&'static str> {
337 Some(match token.kind {
338 _ if token.is_special_ident() => "reserved identifier",
339 _ if token.is_used_keyword() => "keyword",
340 _ if token.is_unused_keyword() => "reserved keyword",
341 token::DocComment(..) => "doc comment",
342 _ => return None,
343 })
344 }
345
346 pub(super) fn token_descr(token: &Token) -> String {
347 let token_str = pprust::token_to_string(token);
348 match token_descr_opt(token) {
349 Some(prefix) => format!("{} `{}`", prefix, token_str),
350 _ => format!("`{}`", token_str),
351 }
352 }
353
354 impl<'a> Parser<'a> {
355 pub fn new(
356 sess: &'a ParseSess,
357 tokens: TokenStream,
358 desugar_doc_comments: bool,
359 subparser_name: Option<&'static str>,
360 ) -> Self {
361 let mut parser = Parser {
362 sess,
363 token: Token::dummy(),
364 token_spacing: Spacing::Alone,
365 prev_token: Token::dummy(),
366 restrictions: Restrictions::empty(),
367 expected_tokens: Vec::new(),
368 token_cursor: TokenCursor {
369 frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens),
370 stack: Vec::new(),
371 num_next_calls: 0,
372 desugar_doc_comments,
373 append_unglued_token: None,
374 },
375 desugar_doc_comments,
376 unmatched_angle_bracket_count: 0,
377 max_angle_bracket_count: 0,
378 unclosed_delims: Vec::new(),
379 last_unexpected_token_span: None,
380 last_type_ascription: None,
381 subparser_name,
382 };
383
384 // Make parser point to the first token.
385 parser.bump();
386
387 parser
388 }
389
390 fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) {
391 let (mut next, spacing) = if self.desugar_doc_comments {
392 self.token_cursor.next_desugared()
393 } else {
394 self.token_cursor.next()
395 };
396 self.token_cursor.num_next_calls += 1;
397 // We've retrieved an token from the underlying
398 // cursor, so we no longer need to worry about
399 // an unglued token. See `break_and_eat` for more details
400 self.token_cursor.append_unglued_token = None;
401 if next.span.is_dummy() {
402 // Tweak the location for better diagnostics, but keep syntactic context intact.
403 next.span = fallback_span.with_ctxt(next.span.ctxt());
404 }
405 (next, spacing)
406 }
407
408 pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
409 match self.expect_one_of(&[], &[]) {
410 Err(e) => Err(e),
411 // We can get `Ok(true)` from `recover_closing_delimiter`
412 // which is called in `expected_one_of_not_found`.
413 Ok(_) => FatalError.raise(),
414 }
415 }
416
417 /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
418 pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
419 if self.expected_tokens.is_empty() {
420 if self.token == *t {
421 self.bump();
422 Ok(false)
423 } else {
424 self.unexpected_try_recover(t)
425 }
426 } else {
427 self.expect_one_of(slice::from_ref(t), &[])
428 }
429 }
430
431 /// Expect next token to be edible or inedible token. If edible,
432 /// then consume it; if inedible, then return without consuming
433 /// anything. Signal a fatal error if next token is unexpected.
434 pub fn expect_one_of(
435 &mut self,
436 edible: &[TokenKind],
437 inedible: &[TokenKind],
438 ) -> PResult<'a, bool /* recovered */> {
439 if edible.contains(&self.token.kind) {
440 self.bump();
441 Ok(false)
442 } else if inedible.contains(&self.token.kind) {
443 // leave it in the input
444 Ok(false)
445 } else if self.last_unexpected_token_span == Some(self.token.span) {
446 FatalError.raise();
447 } else {
448 self.expected_one_of_not_found(edible, inedible)
449 }
450 }
451
452 // Public for rustfmt usage.
453 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
454 self.parse_ident_common(true)
455 }
456
457 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
458 match self.token.ident() {
459 Some((ident, is_raw)) => {
460 if !is_raw && ident.is_reserved() {
461 let mut err = self.expected_ident_found();
462 if recover {
463 err.emit();
464 } else {
465 return Err(err);
466 }
467 }
468 self.bump();
469 Ok(ident)
470 }
471 _ => Err(match self.prev_token.kind {
472 TokenKind::DocComment(..) => {
473 self.span_fatal_err(self.prev_token.span, Error::UselessDocComment)
474 }
475 _ => self.expected_ident_found(),
476 }),
477 }
478 }
479
480 /// Checks if the next token is `tok`, and returns `true` if so.
481 ///
482 /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
483 /// encountered.
484 fn check(&mut self, tok: &TokenKind) -> bool {
485 let is_present = self.token == *tok;
486 if !is_present {
487 self.expected_tokens.push(TokenType::Token(tok.clone()));
488 }
489 is_present
490 }
491
492 /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
493 pub fn eat(&mut self, tok: &TokenKind) -> bool {
494 let is_present = self.check(tok);
495 if is_present {
496 self.bump()
497 }
498 is_present
499 }
500
501 /// If the next token is the given keyword, returns `true` without eating it.
502 /// An expectation is also added for diagnostics purposes.
503 fn check_keyword(&mut self, kw: Symbol) -> bool {
504 self.expected_tokens.push(TokenType::Keyword(kw));
505 self.token.is_keyword(kw)
506 }
507
508 /// If the next token is the given keyword, eats it and returns `true`.
509 /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
510 // Public for rustfmt usage.
511 pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
512 if self.check_keyword(kw) {
513 self.bump();
514 true
515 } else {
516 false
517 }
518 }
519
520 fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
521 if self.token.is_keyword(kw) {
522 self.bump();
523 true
524 } else {
525 false
526 }
527 }
528
529 /// If the given word is not a keyword, signals an error.
530 /// If the next token is not the given word, signals an error.
531 /// Otherwise, eats it.
532 fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
533 if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
534 }
535
536 /// Is the given keyword `kw` followed by a non-reserved identifier?
537 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
538 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
539 }
540
541 fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
542 if ok {
543 true
544 } else {
545 self.expected_tokens.push(typ);
546 false
547 }
548 }
549
550 fn check_ident(&mut self) -> bool {
551 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
552 }
553
554 fn check_path(&mut self) -> bool {
555 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
556 }
557
558 fn check_type(&mut self) -> bool {
559 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
560 }
561
562 fn check_const_arg(&mut self) -> bool {
563 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
564 }
565
566 fn check_inline_const(&self, dist: usize) -> bool {
567 self.is_keyword_ahead(dist, &[kw::Const])
568 && self.look_ahead(dist + 1, |t| match t.kind {
569 token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
570 token::OpenDelim(DelimToken::Brace) => true,
571 _ => false,
572 })
573 }
574
575 /// Checks to see if the next token is either `+` or `+=`.
576 /// Otherwise returns `false`.
577 fn check_plus(&mut self) -> bool {
578 self.check_or_expected(
579 self.token.is_like_plus(),
580 TokenType::Token(token::BinOp(token::Plus)),
581 )
582 }
583
584 /// Eats the expected token if it's present possibly breaking
585 /// compound tokens like multi-character operators in process.
586 /// Returns `true` if the token was eaten.
587 fn break_and_eat(&mut self, expected: TokenKind) -> bool {
588 if self.token.kind == expected {
589 self.bump();
590 return true;
591 }
592 match self.token.kind.break_two_token_op() {
593 Some((first, second)) if first == expected => {
594 let first_span = self.sess.source_map().start_point(self.token.span);
595 let second_span = self.token.span.with_lo(first_span.hi());
596 self.token = Token::new(first, first_span);
597 // Keep track of this token - if we end token capturing now,
598 // we'll want to append this token to the captured stream.
599 //
600 // If we consume any additional tokens, then this token
601 // is not needed (we'll capture the entire 'glued' token),
602 // and `next_tok` will set this field to `None`
603 self.token_cursor.append_unglued_token =
604 Some((TokenTree::Token(self.token.clone()), Spacing::Alone));
605 // Use the spacing of the glued token as the spacing
606 // of the unglued second token.
607 self.bump_with((Token::new(second, second_span), self.token_spacing));
608 true
609 }
610 _ => {
611 self.expected_tokens.push(TokenType::Token(expected));
612 false
613 }
614 }
615 }
616
617 /// Eats `+` possibly breaking tokens like `+=` in process.
618 fn eat_plus(&mut self) -> bool {
619 self.break_and_eat(token::BinOp(token::Plus))
620 }
621
622 /// Eats `&` possibly breaking tokens like `&&` in process.
623 /// Signals an error if `&` is not eaten.
624 fn expect_and(&mut self) -> PResult<'a, ()> {
625 if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
626 }
627
628 /// Eats `|` possibly breaking tokens like `||` in process.
629 /// Signals an error if `|` was not eaten.
630 fn expect_or(&mut self) -> PResult<'a, ()> {
631 if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
632 }
633
634 /// Eats `<` possibly breaking tokens like `<<` in process.
635 fn eat_lt(&mut self) -> bool {
636 let ate = self.break_and_eat(token::Lt);
637 if ate {
638 // See doc comment for `unmatched_angle_bracket_count`.
639 self.unmatched_angle_bracket_count += 1;
640 self.max_angle_bracket_count += 1;
641 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
642 }
643 ate
644 }
645
646 /// Eats `<` possibly breaking tokens like `<<` in process.
647 /// Signals an error if `<` was not eaten.
648 fn expect_lt(&mut self) -> PResult<'a, ()> {
649 if self.eat_lt() { Ok(()) } else { self.unexpected() }
650 }
651
652 /// Eats `>` possibly breaking tokens like `>>` in process.
653 /// Signals an error if `>` was not eaten.
654 fn expect_gt(&mut self) -> PResult<'a, ()> {
655 if self.break_and_eat(token::Gt) {
656 // See doc comment for `unmatched_angle_bracket_count`.
657 if self.unmatched_angle_bracket_count > 0 {
658 self.unmatched_angle_bracket_count -= 1;
659 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
660 }
661 Ok(())
662 } else {
663 self.unexpected()
664 }
665 }
666
667 fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
668 kets.iter().any(|k| match expect {
669 TokenExpectType::Expect => self.check(k),
670 TokenExpectType::NoExpect => self.token == **k,
671 })
672 }
673
674 fn parse_seq_to_before_tokens<T>(
675 &mut self,
676 kets: &[&TokenKind],
677 sep: SeqSep,
678 expect: TokenExpectType,
679 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
680 ) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
681 let mut first = true;
682 let mut recovered = false;
683 let mut trailing = false;
684 let mut v = vec![];
685 while !self.expect_any_with_type(kets, expect) {
686 if let token::CloseDelim(..) | token::Eof = self.token.kind {
687 break;
688 }
689 if let Some(ref t) = sep.sep {
690 if first {
691 first = false;
692 } else {
693 match self.expect(t) {
694 Ok(false) => {}
695 Ok(true) => {
696 recovered = true;
697 break;
698 }
699 Err(mut expect_err) => {
700 let sp = self.prev_token.span.shrink_to_hi();
701 let token_str = pprust::token_kind_to_string(t);
702
703 // Attempt to keep parsing if it was a similar separator.
704 if let Some(ref tokens) = t.similar_tokens() {
705 if tokens.contains(&self.token.kind) {
706 self.bump();
707 }
708 }
709
710 // If this was a missing `@` in a binding pattern
711 // bail with a suggestion
712 // https://github.com/rust-lang/rust/issues/72373
713 if self.prev_token.is_ident() && self.token.kind == token::DotDot {
714 let msg = format!(
715 "if you meant to bind the contents of \
716 the rest of the array pattern into `{}`, use `@`",
717 pprust::token_to_string(&self.prev_token)
718 );
719 expect_err
720 .span_suggestion_verbose(
721 self.prev_token.span.shrink_to_hi().until(self.token.span),
722 &msg,
723 " @ ".to_string(),
724 Applicability::MaybeIncorrect,
725 )
726 .emit();
727 break;
728 }
729
730 // Attempt to keep parsing if it was an omitted separator.
731 match f(self) {
732 Ok(t) => {
733 // Parsed successfully, therefore most probably the code only
734 // misses a separator.
735 expect_err
736 .span_suggestion_short(
737 sp,
738 &format!("missing `{}`", token_str),
739 token_str,
740 Applicability::MaybeIncorrect,
741 )
742 .emit();
743
744 v.push(t);
745 continue;
746 }
747 Err(mut e) => {
748 // Parsing failed, therefore it must be something more serious
749 // than just a missing separator.
750 expect_err.emit();
751
752 e.cancel();
753 break;
754 }
755 }
756 }
757 }
758 }
759 }
760 if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
761 trailing = true;
762 break;
763 }
764
765 let t = f(self)?;
766 v.push(t);
767 }
768
769 Ok((v, trailing, recovered))
770 }
771
772 /// Parses a sequence, not including the closing delimiter. The function
773 /// `f` must consume tokens until reaching the next separator or
774 /// closing bracket.
775 fn parse_seq_to_before_end<T>(
776 &mut self,
777 ket: &TokenKind,
778 sep: SeqSep,
779 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
780 ) -> PResult<'a, (Vec<T>, bool, bool)> {
781 self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
782 }
783
784 /// Parses a sequence, including the closing delimiter. The function
785 /// `f` must consume tokens until reaching the next separator or
786 /// closing bracket.
787 fn parse_seq_to_end<T>(
788 &mut self,
789 ket: &TokenKind,
790 sep: SeqSep,
791 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
792 ) -> PResult<'a, (Vec<T>, bool /* trailing */)> {
793 let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
794 if !recovered {
795 self.eat(ket);
796 }
797 Ok((val, trailing))
798 }
799
800 /// Parses a sequence, including the closing delimiter. The function
801 /// `f` must consume tokens until reaching the next separator or
802 /// closing bracket.
803 fn parse_unspanned_seq<T>(
804 &mut self,
805 bra: &TokenKind,
806 ket: &TokenKind,
807 sep: SeqSep,
808 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
809 ) -> PResult<'a, (Vec<T>, bool)> {
810 self.expect(bra)?;
811 self.parse_seq_to_end(ket, sep, f)
812 }
813
814 fn parse_delim_comma_seq<T>(
815 &mut self,
816 delim: DelimToken,
817 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
818 ) -> PResult<'a, (Vec<T>, bool)> {
819 self.parse_unspanned_seq(
820 &token::OpenDelim(delim),
821 &token::CloseDelim(delim),
822 SeqSep::trailing_allowed(token::Comma),
823 f,
824 )
825 }
826
827 fn parse_paren_comma_seq<T>(
828 &mut self,
829 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
830 ) -> PResult<'a, (Vec<T>, bool)> {
831 self.parse_delim_comma_seq(token::Paren, f)
832 }
833
834 /// Advance the parser by one token using provided token as the next one.
835 fn bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
836 // Bumping after EOF is a bad sign, usually an infinite loop.
837 if self.prev_token.kind == TokenKind::Eof {
838 let msg = "attempted to bump the parser past EOF (may be stuck in a loop)";
839 self.span_bug(self.token.span, msg);
840 }
841
842 // Update the current and previous tokens.
843 self.prev_token = mem::replace(&mut self.token, next_token);
844 self.token_spacing = next_spacing;
845
846 // Diagnostics.
847 self.expected_tokens.clear();
848 }
849
850 /// Advance the parser by one token.
851 pub fn bump(&mut self) {
852 let next_token = self.next_tok(self.token.span);
853 self.bump_with(next_token);
854 }
855
856 /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
857 /// When `dist == 0` then the current token is looked at.
858 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
859 if dist == 0 {
860 return looker(&self.token);
861 }
862
863 let frame = &self.token_cursor.frame;
864 match frame.tree_cursor.look_ahead(dist - 1) {
865 Some(tree) => match tree {
866 TokenTree::Token(token) => looker(token),
867 TokenTree::Delimited(dspan, delim, _) => {
868 looker(&Token::new(token::OpenDelim(*delim), dspan.open))
869 }
870 },
871 None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
872 }
873 }
874
875 /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
876 fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
877 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
878 }
879
880 /// Parses asyncness: `async` or nothing.
881 fn parse_asyncness(&mut self) -> Async {
882 if self.eat_keyword(kw::Async) {
883 let span = self.prev_token.uninterpolated_span();
884 Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
885 } else {
886 Async::No
887 }
888 }
889
890 /// Parses unsafety: `unsafe` or nothing.
891 fn parse_unsafety(&mut self) -> Unsafe {
892 if self.eat_keyword(kw::Unsafe) {
893 Unsafe::Yes(self.prev_token.uninterpolated_span())
894 } else {
895 Unsafe::No
896 }
897 }
898
899 /// Parses constness: `const` or nothing.
900 fn parse_constness(&mut self) -> Const {
901 // Avoid const blocks to be parsed as const items
902 if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace))
903 && self.eat_keyword(kw::Const)
904 {
905 Const::Yes(self.prev_token.uninterpolated_span())
906 } else {
907 Const::No
908 }
909 }
910
911 /// Parses inline const expressions.
912 fn parse_const_block(&mut self, span: Span) -> PResult<'a, P<Expr>> {
913 self.sess.gated_spans.gate(sym::inline_const, span);
914 self.eat_keyword(kw::Const);
915 let blk = self.parse_block()?;
916 let anon_const = AnonConst {
917 id: DUMMY_NODE_ID,
918 value: self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()),
919 };
920 let blk_span = anon_const.value.span;
921 Ok(self.mk_expr(span.to(blk_span), ExprKind::ConstBlock(anon_const), AttrVec::new()))
922 }
923
924 /// Parses mutability (`mut` or nothing).
925 fn parse_mutability(&mut self) -> Mutability {
926 if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
927 }
928
929 /// Possibly parses mutability (`const` or `mut`).
930 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
931 if self.eat_keyword(kw::Mut) {
932 Some(Mutability::Mut)
933 } else if self.eat_keyword(kw::Const) {
934 Some(Mutability::Not)
935 } else {
936 None
937 }
938 }
939
940 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
941 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
942 {
943 self.expect_no_suffix(self.token.span, "a tuple index", suffix);
944 self.bump();
945 Ok(Ident::new(symbol, self.prev_token.span))
946 } else {
947 self.parse_ident_common(false)
948 }
949 }
950
951 fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
952 self.parse_mac_args_common(true).map(P)
953 }
954
955 fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
956 self.parse_mac_args_common(false)
957 }
958
959 fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
960 Ok(
961 if self.check(&token::OpenDelim(DelimToken::Paren))
962 || self.check(&token::OpenDelim(DelimToken::Bracket))
963 || self.check(&token::OpenDelim(DelimToken::Brace))
964 {
965 match self.parse_token_tree() {
966 TokenTree::Delimited(dspan, delim, tokens) =>
967 // We've confirmed above that there is a delimiter so unwrapping is OK.
968 {
969 MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens)
970 }
971 _ => unreachable!(),
972 }
973 } else if !delimited_only {
974 if self.eat(&token::Eq) {
975 let eq_span = self.prev_token.span;
976 let mut is_interpolated_expr = false;
977 if let token::Interpolated(nt) = &self.token.kind {
978 if let token::NtExpr(..) = **nt {
979 is_interpolated_expr = true;
980 }
981 }
982
983 // Collect tokens because they are used during lowering to HIR.
984 let expr = self.collect_tokens(|this| this.parse_expr())?;
985 let span = expr.span;
986
987 match &expr.kind {
988 // Not gated to supporte things like `doc = $expr` that work on stable.
989 _ if is_interpolated_expr => {}
990 ExprKind::Lit(lit) if lit.kind.is_unsuffixed() => {}
991 _ => self.sess.gated_spans.gate(sym::extended_key_value_attributes, span),
992 }
993
994 let token_kind = token::Interpolated(Lrc::new(token::NtExpr(expr)));
995 MacArgs::Eq(eq_span, Token::new(token_kind, span))
996 } else {
997 MacArgs::Empty
998 }
999 } else {
1000 return self.unexpected();
1001 },
1002 )
1003 }
1004
1005 fn parse_or_use_outer_attributes(
1006 &mut self,
1007 already_parsed_attrs: Option<AttrVec>,
1008 ) -> PResult<'a, AttrVec> {
1009 if let Some(attrs) = already_parsed_attrs {
1010 Ok(attrs)
1011 } else {
1012 self.parse_outer_attributes().map(|a| a.into())
1013 }
1014 }
1015
1016 /// Parses a single token tree from the input.
1017 pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
1018 match self.token.kind {
1019 token::OpenDelim(..) => {
1020 let depth = self.token_cursor.stack.len();
1021
1022 // We keep advancing the token cursor until we hit
1023 // the matching `CloseDelim` token.
1024 while !(depth == self.token_cursor.stack.len()
1025 && matches!(self.token.kind, token::CloseDelim(_)))
1026 {
1027 // Advance one token at a time, so `TokenCursor::next()`
1028 // can capture these tokens if necessary.
1029 self.bump();
1030 }
1031 // We are still inside the frame corresponding
1032 // to the delimited stream we captured, so grab
1033 // the tokens from this frame.
1034 let frame = &self.token_cursor.frame;
1035 let stream = frame.tree_cursor.stream.clone();
1036 let span = frame.span;
1037 let delim = frame.delim;
1038 // Consume close delimiter
1039 self.bump();
1040 TokenTree::Delimited(span, delim, stream)
1041 }
1042 token::CloseDelim(_) | token::Eof => unreachable!(),
1043 _ => {
1044 self.bump();
1045 TokenTree::Token(self.prev_token.clone())
1046 }
1047 }
1048 }
1049
1050 /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
1051 pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
1052 let mut tts = Vec::new();
1053 while self.token != token::Eof {
1054 tts.push(self.parse_token_tree());
1055 }
1056 Ok(tts)
1057 }
1058
1059 pub fn parse_tokens(&mut self) -> TokenStream {
1060 let mut result = Vec::new();
1061 loop {
1062 match self.token.kind {
1063 token::Eof | token::CloseDelim(..) => break,
1064 _ => result.push(self.parse_token_tree().into()),
1065 }
1066 }
1067 TokenStream::new(result)
1068 }
1069
1070 /// Evaluates the closure with restrictions in place.
1071 ///
1072 /// Afters the closure is evaluated, restrictions are reset.
1073 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1074 let old = self.restrictions;
1075 self.restrictions = res;
1076 let res = f(self);
1077 self.restrictions = old;
1078 res
1079 }
1080
1081 fn is_crate_vis(&self) -> bool {
1082 self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
1083 }
1084
1085 /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
1086 /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
1087 /// If the following element can't be a tuple (i.e., it's a function definition), then
1088 /// it's not a tuple struct field), and the contents within the parentheses isn't valid,
1089 /// so emit a proper diagnostic.
1090 // Public for rustfmt usage.
1091 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1092 maybe_whole!(self, NtVis, |x| x);
1093
1094 self.expected_tokens.push(TokenType::Keyword(kw::Crate));
1095 if self.is_crate_vis() {
1096 self.bump(); // `crate`
1097 self.sess.gated_spans.gate(sym::crate_visibility_modifier, self.prev_token.span);
1098 return Ok(Visibility {
1099 span: self.prev_token.span,
1100 kind: VisibilityKind::Crate(CrateSugar::JustCrate),
1101 tokens: None,
1102 });
1103 }
1104
1105 if !self.eat_keyword(kw::Pub) {
1106 // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1107 // keyword to grab a span from for inherited visibility; an empty span at the
1108 // beginning of the current token would seem to be the "Schelling span".
1109 return Ok(Visibility {
1110 span: self.token.span.shrink_to_lo(),
1111 kind: VisibilityKind::Inherited,
1112 tokens: None,
1113 });
1114 }
1115 let lo = self.prev_token.span;
1116
1117 if self.check(&token::OpenDelim(token::Paren)) {
1118 // We don't `self.bump()` the `(` yet because this might be a struct definition where
1119 // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1120 // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1121 // by the following tokens.
1122 if self.is_keyword_ahead(1, &[kw::Crate]) && self.look_ahead(2, |t| t != &token::ModSep)
1123 // account for `pub(crate::foo)`
1124 {
1125 // Parse `pub(crate)`.
1126 self.bump(); // `(`
1127 self.bump(); // `crate`
1128 self.expect(&token::CloseDelim(token::Paren))?; // `)`
1129 let vis = VisibilityKind::Crate(CrateSugar::PubCrate);
1130 return Ok(Visibility {
1131 span: lo.to(self.prev_token.span),
1132 kind: vis,
1133 tokens: None,
1134 });
1135 } else if self.is_keyword_ahead(1, &[kw::In]) {
1136 // Parse `pub(in path)`.
1137 self.bump(); // `(`
1138 self.bump(); // `in`
1139 let path = self.parse_path(PathStyle::Mod)?; // `path`
1140 self.expect(&token::CloseDelim(token::Paren))?; // `)`
1141 let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
1142 return Ok(Visibility {
1143 span: lo.to(self.prev_token.span),
1144 kind: vis,
1145 tokens: None,
1146 });
1147 } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren))
1148 && self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
1149 {
1150 // Parse `pub(self)` or `pub(super)`.
1151 self.bump(); // `(`
1152 let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
1153 self.expect(&token::CloseDelim(token::Paren))?; // `)`
1154 let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
1155 return Ok(Visibility {
1156 span: lo.to(self.prev_token.span),
1157 kind: vis,
1158 tokens: None,
1159 });
1160 } else if let FollowedByType::No = fbt {
1161 // Provide this diagnostic if a type cannot follow;
1162 // in particular, if this is not a tuple struct.
1163 self.recover_incorrect_vis_restriction()?;
1164 // Emit diagnostic, but continue with public visibility.
1165 }
1166 }
1167
1168 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1169 }
1170
1171 /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1172 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1173 self.bump(); // `(`
1174 let path = self.parse_path(PathStyle::Mod)?;
1175 self.expect(&token::CloseDelim(token::Paren))?; // `)`
1176
1177 let msg = "incorrect visibility restriction";
1178 let suggestion = r##"some possible visibility restrictions are:
1179 `pub(crate)`: visible only on the current crate
1180 `pub(super)`: visible only in the current module's parent
1181 `pub(in path::to::module)`: visible only on the specified path"##;
1182
1183 let path_str = pprust::path_to_string(&path);
1184
1185 struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
1186 .help(suggestion)
1187 .span_suggestion(
1188 path.span,
1189 &format!("make this visible only to module `{}` with `in`", path_str),
1190 format!("in {}", path_str),
1191 Applicability::MachineApplicable,
1192 )
1193 .emit();
1194
1195 Ok(())
1196 }
1197
1198 /// Parses `extern string_literal?`.
1199 fn parse_extern(&mut self) -> PResult<'a, Extern> {
1200 Ok(if self.eat_keyword(kw::Extern) {
1201 Extern::from_abi(self.parse_abi())
1202 } else {
1203 Extern::None
1204 })
1205 }
1206
1207 /// Parses a string literal as an ABI spec.
1208 fn parse_abi(&mut self) -> Option<StrLit> {
1209 match self.parse_str_lit() {
1210 Ok(str_lit) => Some(str_lit),
1211 Err(Some(lit)) => match lit.kind {
1212 ast::LitKind::Err(_) => None,
1213 _ => {
1214 self.struct_span_err(lit.span, "non-string ABI literal")
1215 .span_suggestion(
1216 lit.span,
1217 "specify the ABI with a string literal",
1218 "\"C\"".to_string(),
1219 Applicability::MaybeIncorrect,
1220 )
1221 .emit();
1222 None
1223 }
1224 },
1225 Err(None) => None,
1226 }
1227 }
1228
1229 pub fn collect_tokens<R: HasTokens>(
1230 &mut self,
1231 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1232 ) -> PResult<'a, R> {
1233 self.collect_tokens_trailing_token(|this| Ok((f(this)?, TrailingToken::None)))
1234 }
1235
1236 /// Records all tokens consumed by the provided callback,
1237 /// including the current token. These tokens are collected
1238 /// into a `LazyTokenStream`, and returned along with the result
1239 /// of the callback.
1240 ///
1241 /// Note: If your callback consumes an opening delimiter
1242 /// (including the case where you call `collect_tokens`
1243 /// when the current token is an opening delimeter),
1244 /// you must also consume the corresponding closing delimiter.
1245 ///
1246 /// That is, you can consume
1247 /// `something ([{ }])` or `([{}])`, but not `([{}]`
1248 ///
1249 /// This restriction shouldn't be an issue in practice,
1250 /// since this function is used to record the tokens for
1251 /// a parsed AST item, which always has matching delimiters.
1252 pub fn collect_tokens_trailing_token<R: HasTokens>(
1253 &mut self,
1254 f: impl FnOnce(&mut Self) -> PResult<'a, (R, TrailingToken)>,
1255 ) -> PResult<'a, R> {
1256 let start_token = (self.token.clone(), self.token_spacing);
1257 let cursor_snapshot = self.token_cursor.clone();
1258
1259 let (mut ret, trailing_token) = f(self)?;
1260
1261 // Produces a `TokenStream` on-demand. Using `cursor_snapshot`
1262 // and `num_calls`, we can reconstruct the `TokenStream` seen
1263 // by the callback. This allows us to avoid producing a `TokenStream`
1264 // if it is never needed - for example, a captured `macro_rules!`
1265 // argument that is never passed to a proc macro.
1266 // In practice token stream creation happens rarely compared to
1267 // calls to `collect_tokens` (see some statistics in #78736),
1268 // so we are doing as little up-front work as possible.
1269 //
1270 // This also makes `Parser` very cheap to clone, since
1271 // there is no intermediate collection buffer to clone.
1272 #[derive(Clone)]
1273 struct LazyTokenStreamImpl {
1274 start_token: (Token, Spacing),
1275 cursor_snapshot: TokenCursor,
1276 num_calls: usize,
1277 desugar_doc_comments: bool,
1278 append_unglued_token: Option<TreeAndSpacing>,
1279 }
1280 impl CreateTokenStream for LazyTokenStreamImpl {
1281 fn create_token_stream(&self) -> TokenStream {
1282 // The token produced by the final call to `next` or `next_desugared`
1283 // was not actually consumed by the callback. The combination
1284 // of chaining the initial token and using `take` produces the desired
1285 // result - we produce an empty `TokenStream` if no calls were made,
1286 // and omit the final token otherwise.
1287 let mut cursor_snapshot = self.cursor_snapshot.clone();
1288 let tokens = std::iter::once(self.start_token.clone())
1289 .chain((0..self.num_calls).map(|_| {
1290 if self.desugar_doc_comments {
1291 cursor_snapshot.next_desugared()
1292 } else {
1293 cursor_snapshot.next()
1294 }
1295 }))
1296 .take(self.num_calls);
1297
1298 make_token_stream(tokens, self.append_unglued_token.clone())
1299 }
1300 }
1301
1302 let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
1303 match trailing_token {
1304 TrailingToken::None => {}
1305 TrailingToken::Semi => {
1306 assert_eq!(self.token.kind, token::Semi);
1307 num_calls += 1;
1308 }
1309 }
1310
1311 let lazy_impl = LazyTokenStreamImpl {
1312 start_token,
1313 num_calls,
1314 cursor_snapshot,
1315 desugar_doc_comments: self.desugar_doc_comments,
1316 append_unglued_token: self.token_cursor.append_unglued_token.clone(),
1317 };
1318 ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
1319 Ok(ret)
1320 }
1321
1322 /// `::{` or `::*`
1323 fn is_import_coupler(&mut self) -> bool {
1324 self.check(&token::ModSep)
1325 && self.look_ahead(1, |t| {
1326 *t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star)
1327 })
1328 }
1329
1330 pub fn clear_expected_tokens(&mut self) {
1331 self.expected_tokens.clear();
1332 }
1333 }
1334
1335 crate fn make_unclosed_delims_error(
1336 unmatched: UnmatchedBrace,
1337 sess: &ParseSess,
1338 ) -> Option<DiagnosticBuilder<'_>> {
1339 // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
1340 // `unmatched_braces` only for error recovery in the `Parser`.
1341 let found_delim = unmatched.found_delim?;
1342 let mut err = sess.span_diagnostic.struct_span_err(
1343 unmatched.found_span,
1344 &format!(
1345 "mismatched closing delimiter: `{}`",
1346 pprust::token_kind_to_string(&token::CloseDelim(found_delim)),
1347 ),
1348 );
1349 err.span_label(unmatched.found_span, "mismatched closing delimiter");
1350 if let Some(sp) = unmatched.candidate_span {
1351 err.span_label(sp, "closing delimiter possibly meant for this");
1352 }
1353 if let Some(sp) = unmatched.unclosed_span {
1354 err.span_label(sp, "unclosed delimiter");
1355 }
1356 Some(err)
1357 }
1358
1359 pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) {
1360 *sess.reached_eof.borrow_mut() |=
1361 unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
1362 for unmatched in unclosed_delims.drain(..) {
1363 if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
1364 e.emit();
1365 }
1366 }
1367 }
1368
1369 /// Converts a flattened iterator of tokens (including open and close delimiter tokens)
1370 /// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
1371 /// of open and close delims.
1372 fn make_token_stream(
1373 tokens: impl Iterator<Item = (Token, Spacing)>,
1374 append_unglued_token: Option<TreeAndSpacing>,
1375 ) -> TokenStream {
1376 #[derive(Debug)]
1377 struct FrameData {
1378 open: Span,
1379 inner: Vec<(TokenTree, Spacing)>,
1380 }
1381 let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }];
1382 for (token, spacing) in tokens {
1383 match token {
1384 Token { kind: TokenKind::OpenDelim(_), span } => {
1385 stack.push(FrameData { open: span, inner: vec![] });
1386 }
1387 Token { kind: TokenKind::CloseDelim(delim), span } => {
1388 let frame_data = stack.pop().expect("Token stack was empty!");
1389 let dspan = DelimSpan::from_pair(frame_data.open, span);
1390 let stream = TokenStream::new(frame_data.inner);
1391 let delimited = TokenTree::Delimited(dspan, delim, stream);
1392 stack
1393 .last_mut()
1394 .unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!"))
1395 .inner
1396 .push((delimited, Spacing::Alone));
1397 }
1398 token => {
1399 stack
1400 .last_mut()
1401 .expect("Bottom token frame is missing!")
1402 .inner
1403 .push((TokenTree::Token(token), spacing));
1404 }
1405 }
1406 }
1407 let mut final_buf = stack.pop().expect("Missing final buf!");
1408 final_buf.inner.extend(append_unglued_token);
1409 assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
1410 TokenStream::new(final_buf.inner)
1411 }
1412
1413 #[macro_export]
1414 macro_rules! maybe_collect_tokens {
1415 ($self:ident, $force_collect:expr, $attrs:expr, $f:expr) => {
1416 if matches!($force_collect, ForceCollect::Yes)
1417 || $crate::parser::attr::maybe_needs_tokens($attrs)
1418 {
1419 $self.collect_tokens_trailing_token($f)
1420 } else {
1421 Ok($f($self)?.0)
1422 }
1423 };
1424 }