]> git.proxmox.com Git - rustc.git/blame_incremental - compiler/rustc_parse/src/parser/mod.rs
Merge tag 'debian/1.52.1+dfsg1-1_exp2' into proxmox/buster
[rustc.git] / compiler / rustc_parse / src / parser / mod.rs
... / ...
CommitLineData
1pub mod attr;
2mod attr_wrapper;
3mod diagnostics;
4mod expr;
5mod generics;
6mod item;
7mod nonterminal;
8mod pat;
9mod path;
10mod stmt;
11mod ty;
12
13use crate::lexer::UnmatchedBrace;
14pub use attr_wrapper::AttrWrapper;
15pub use diagnostics::AttemptLocalParseRecovery;
16use diagnostics::Error;
17pub use pat::{GateOr, RecoverComma};
18pub use path::PathStyle;
19
20use rustc_ast::ptr::P;
21use rustc_ast::token::{self, DelimToken, Token, TokenKind};
22use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
23use rustc_ast::tokenstream::{TokenStream, TokenTree, TreeAndSpacing};
24use rustc_ast::DUMMY_NODE_ID;
25use rustc_ast::{self as ast, AnonConst, AstLike, AttrStyle, AttrVec, Const, CrateSugar, Extern};
26use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
27use rustc_ast::{Visibility, VisibilityKind};
28use rustc_ast_pretty::pprust;
29use rustc_data_structures::sync::Lrc;
30use rustc_errors::PResult;
31use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError};
32use rustc_session::parse::ParseSess;
33use rustc_span::source_map::{Span, DUMMY_SP};
34use rustc_span::symbol::{kw, sym, Ident, Symbol};
35use tracing::debug;
36
37use std::{cmp, mem, slice};
38
39bitflags::bitflags! {
40 struct Restrictions: u8 {
41 const STMT_EXPR = 1 << 0;
42 const NO_STRUCT_LITERAL = 1 << 1;
43 const CONST_EXPR = 1 << 2;
44 }
45}
46
47#[derive(Clone, Copy, PartialEq, Debug)]
48enum SemiColonMode {
49 Break,
50 Ignore,
51 Comma,
52}
53
54#[derive(Clone, Copy, PartialEq, Debug)]
55enum BlockMode {
56 Break,
57 Ignore,
58}
59
60/// Whether or not we should force collection of tokens for an AST node,
61/// regardless of whether or not it has attributes
62pub enum ForceCollect {
63 Yes,
64 No,
65}
66
67pub enum TrailingToken {
68 None,
69 Semi,
70 /// If the trailing token is a comma, then capture it
71 /// Otherwise, ignore the trailing token
72 MaybeComma,
73}
74
75/// Like `maybe_whole_expr`, but for things other than expressions.
76#[macro_export]
77macro_rules! maybe_whole {
78 ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
79 if let token::Interpolated(nt) = &$p.token.kind {
80 if let token::$constructor(x) = &**nt {
81 let $x = x.clone();
82 $p.bump();
83 return Ok($e);
84 }
85 }
86 };
87}
88
89/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
90#[macro_export]
91macro_rules! maybe_recover_from_interpolated_ty_qpath {
92 ($self: expr, $allow_qpath_recovery: expr) => {
93 if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
94 if let token::Interpolated(nt) = &$self.token.kind {
95 if let token::NtTy(ty) = &**nt {
96 let ty = ty.clone();
97 $self.bump();
98 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
99 }
100 }
101 }
102 };
103}
104
105#[derive(Clone)]
106pub struct Parser<'a> {
107 pub sess: &'a ParseSess,
108 /// The current token.
109 pub token: Token,
110 /// The spacing for the current token
111 pub token_spacing: Spacing,
112 /// The previous token.
113 pub prev_token: Token,
114 restrictions: Restrictions,
115 expected_tokens: Vec<TokenType>,
116 // Important: This must only be advanced from `next_tok`
117 // to ensure that `token_cursor.num_next_calls` is updated properly
118 token_cursor: TokenCursor,
119 desugar_doc_comments: bool,
120 /// This field is used to keep track of how many left angle brackets we have seen. This is
121 /// required in order to detect extra leading left angle brackets (`<` characters) and error
122 /// appropriately.
123 ///
124 /// See the comments in the `parse_path_segment` function for more details.
125 unmatched_angle_bracket_count: u32,
126 max_angle_bracket_count: u32,
127 /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
128 /// it gets removed from here. Every entry left at the end gets emitted as an independent
129 /// error.
130 pub(super) unclosed_delims: Vec<UnmatchedBrace>,
131 last_unexpected_token_span: Option<Span>,
132 /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
133 /// looked like it could have been a mistyped path or literal `Option:Some(42)`).
134 pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
135 /// If present, this `Parser` is not parsing Rust code but rather a macro call.
136 subparser_name: Option<&'static str>,
137}
138
139impl<'a> Drop for Parser<'a> {
140 fn drop(&mut self) {
141 emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
142 }
143}
144
145#[derive(Clone)]
146struct TokenCursor {
147 frame: TokenCursorFrame,
148 stack: Vec<TokenCursorFrame>,
149 desugar_doc_comments: bool,
150 // Counts the number of calls to `next` or `next_desugared`,
151 // depending on whether `desugar_doc_comments` is set.
152 num_next_calls: usize,
153 // During parsing, we may sometimes need to 'unglue' a
154 // glued token into two component tokens
155 // (e.g. '>>' into '>' and '>), so that the parser
156 // can consume them one at a time. This process
157 // bypasses the normal capturing mechanism
158 // (e.g. `num_next_calls` will not be incremented),
159 // since the 'unglued' tokens due not exist in
160 // the original `TokenStream`.
161 //
162 // If we end up consuming both unglued tokens,
163 // then this is not an issue - we'll end up
164 // capturing the single 'glued' token.
165 //
166 // However, in certain circumstances, we may
167 // want to capture just the first 'unglued' token.
168 // For example, capturing the `Vec<u8>`
169 // in `Option<Vec<u8>>` requires us to unglue
170 // the trailing `>>` token. The `append_unglued_token`
171 // field is used to track this token - it gets
172 // appended to the captured stream when
173 // we evaluate a `LazyTokenStream`
174 append_unglued_token: Option<TreeAndSpacing>,
175}
176
177#[derive(Clone)]
178struct TokenCursorFrame {
179 delim: token::DelimToken,
180 span: DelimSpan,
181 open_delim: bool,
182 tree_cursor: tokenstream::Cursor,
183 close_delim: bool,
184}
185
186impl TokenCursorFrame {
187 fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
188 TokenCursorFrame {
189 delim,
190 span,
191 open_delim: delim == token::NoDelim,
192 tree_cursor: tts.into_trees(),
193 close_delim: delim == token::NoDelim,
194 }
195 }
196}
197
198impl TokenCursor {
199 fn next(&mut self) -> (Token, Spacing) {
200 loop {
201 let (tree, spacing) = if !self.frame.open_delim {
202 self.frame.open_delim = true;
203 TokenTree::open_tt(self.frame.span, self.frame.delim).into()
204 } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() {
205 tree
206 } else if !self.frame.close_delim {
207 self.frame.close_delim = true;
208 TokenTree::close_tt(self.frame.span, self.frame.delim).into()
209 } else if let Some(frame) = self.stack.pop() {
210 self.frame = frame;
211 continue;
212 } else {
213 (TokenTree::Token(Token::new(token::Eof, DUMMY_SP)), Spacing::Alone)
214 };
215
216 match tree {
217 TokenTree::Token(token) => {
218 return (token, spacing);
219 }
220 TokenTree::Delimited(sp, delim, tts) => {
221 let frame = TokenCursorFrame::new(sp, delim, tts);
222 self.stack.push(mem::replace(&mut self.frame, frame));
223 }
224 }
225 }
226 }
227
228 fn next_desugared(&mut self) -> (Token, Spacing) {
229 let (data, attr_style, sp) = match self.next() {
230 (Token { kind: token::DocComment(_, attr_style, data), span }, _) => {
231 (data, attr_style, span)
232 }
233 tok => return tok,
234 };
235
236 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
237 // required to wrap the text.
238 let mut num_of_hashes = 0;
239 let mut count = 0;
240 for ch in data.as_str().chars() {
241 count = match ch {
242 '"' => 1,
243 '#' if count > 0 => count + 1,
244 _ => 0,
245 };
246 num_of_hashes = cmp::max(num_of_hashes, count);
247 }
248
249 let delim_span = DelimSpan::from_single(sp);
250 let body = TokenTree::Delimited(
251 delim_span,
252 token::Bracket,
253 [
254 TokenTree::token(token::Ident(sym::doc, false), sp),
255 TokenTree::token(token::Eq, sp),
256 TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), sp),
257 ]
258 .iter()
259 .cloned()
260 .collect::<TokenStream>(),
261 );
262
263 self.stack.push(mem::replace(
264 &mut self.frame,
265 TokenCursorFrame::new(
266 delim_span,
267 token::NoDelim,
268 if attr_style == AttrStyle::Inner {
269 [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
270 .iter()
271 .cloned()
272 .collect::<TokenStream>()
273 } else {
274 [TokenTree::token(token::Pound, sp), body]
275 .iter()
276 .cloned()
277 .collect::<TokenStream>()
278 },
279 ),
280 ));
281
282 self.next()
283 }
284}
285
286#[derive(Debug, Clone, PartialEq)]
287enum TokenType {
288 Token(TokenKind),
289 Keyword(Symbol),
290 Operator,
291 Lifetime,
292 Ident,
293 Path,
294 Type,
295 Const,
296}
297
298impl TokenType {
299 fn to_string(&self) -> String {
300 match *self {
301 TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
302 TokenType::Keyword(kw) => format!("`{}`", kw),
303 TokenType::Operator => "an operator".to_string(),
304 TokenType::Lifetime => "lifetime".to_string(),
305 TokenType::Ident => "identifier".to_string(),
306 TokenType::Path => "path".to_string(),
307 TokenType::Type => "type".to_string(),
308 TokenType::Const => "a const expression".to_string(),
309 }
310 }
311}
312
313#[derive(Copy, Clone, Debug)]
314enum TokenExpectType {
315 Expect,
316 NoExpect,
317}
318
319/// A sequence separator.
320struct SeqSep {
321 /// The separator token.
322 sep: Option<TokenKind>,
323 /// `true` if a trailing separator is allowed.
324 trailing_sep_allowed: bool,
325}
326
327impl SeqSep {
328 fn trailing_allowed(t: TokenKind) -> SeqSep {
329 SeqSep { sep: Some(t), trailing_sep_allowed: true }
330 }
331
332 fn none() -> SeqSep {
333 SeqSep { sep: None, trailing_sep_allowed: false }
334 }
335}
336
337pub enum FollowedByType {
338 Yes,
339 No,
340}
341
342fn token_descr_opt(token: &Token) -> Option<&'static str> {
343 Some(match token.kind {
344 _ if token.is_special_ident() => "reserved identifier",
345 _ if token.is_used_keyword() => "keyword",
346 _ if token.is_unused_keyword() => "reserved keyword",
347 token::DocComment(..) => "doc comment",
348 _ => return None,
349 })
350}
351
352pub(super) fn token_descr(token: &Token) -> String {
353 let token_str = pprust::token_to_string(token);
354 match token_descr_opt(token) {
355 Some(prefix) => format!("{} `{}`", prefix, token_str),
356 _ => format!("`{}`", token_str),
357 }
358}
359
360impl<'a> Parser<'a> {
361 pub fn new(
362 sess: &'a ParseSess,
363 tokens: TokenStream,
364 desugar_doc_comments: bool,
365 subparser_name: Option<&'static str>,
366 ) -> Self {
367 let mut parser = Parser {
368 sess,
369 token: Token::dummy(),
370 token_spacing: Spacing::Alone,
371 prev_token: Token::dummy(),
372 restrictions: Restrictions::empty(),
373 expected_tokens: Vec::new(),
374 token_cursor: TokenCursor {
375 frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens),
376 stack: Vec::new(),
377 num_next_calls: 0,
378 desugar_doc_comments,
379 append_unglued_token: None,
380 },
381 desugar_doc_comments,
382 unmatched_angle_bracket_count: 0,
383 max_angle_bracket_count: 0,
384 unclosed_delims: Vec::new(),
385 last_unexpected_token_span: None,
386 last_type_ascription: None,
387 subparser_name,
388 };
389
390 // Make parser point to the first token.
391 parser.bump();
392
393 parser
394 }
395
396 fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) {
397 let (mut next, spacing) = if self.desugar_doc_comments {
398 self.token_cursor.next_desugared()
399 } else {
400 self.token_cursor.next()
401 };
402 self.token_cursor.num_next_calls += 1;
403 // We've retrieved an token from the underlying
404 // cursor, so we no longer need to worry about
405 // an unglued token. See `break_and_eat` for more details
406 self.token_cursor.append_unglued_token = None;
407 if next.span.is_dummy() {
408 // Tweak the location for better diagnostics, but keep syntactic context intact.
409 next.span = fallback_span.with_ctxt(next.span.ctxt());
410 }
411 (next, spacing)
412 }
413
414 pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
415 match self.expect_one_of(&[], &[]) {
416 Err(e) => Err(e),
417 // We can get `Ok(true)` from `recover_closing_delimiter`
418 // which is called in `expected_one_of_not_found`.
419 Ok(_) => FatalError.raise(),
420 }
421 }
422
423 /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
424 pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
425 if self.expected_tokens.is_empty() {
426 if self.token == *t {
427 self.bump();
428 Ok(false)
429 } else {
430 self.unexpected_try_recover(t)
431 }
432 } else {
433 self.expect_one_of(slice::from_ref(t), &[])
434 }
435 }
436
437 /// Expect next token to be edible or inedible token. If edible,
438 /// then consume it; if inedible, then return without consuming
439 /// anything. Signal a fatal error if next token is unexpected.
440 pub fn expect_one_of(
441 &mut self,
442 edible: &[TokenKind],
443 inedible: &[TokenKind],
444 ) -> PResult<'a, bool /* recovered */> {
445 if edible.contains(&self.token.kind) {
446 self.bump();
447 Ok(false)
448 } else if inedible.contains(&self.token.kind) {
449 // leave it in the input
450 Ok(false)
451 } else if self.last_unexpected_token_span == Some(self.token.span) {
452 FatalError.raise();
453 } else {
454 self.expected_one_of_not_found(edible, inedible)
455 }
456 }
457
458 // Public for rustfmt usage.
459 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
460 self.parse_ident_common(true)
461 }
462
463 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
464 match self.token.ident() {
465 Some((ident, is_raw)) => {
466 if !is_raw && ident.is_reserved() {
467 let mut err = self.expected_ident_found();
468 if recover {
469 err.emit();
470 } else {
471 return Err(err);
472 }
473 }
474 self.bump();
475 Ok(ident)
476 }
477 _ => Err(match self.prev_token.kind {
478 TokenKind::DocComment(..) => {
479 self.span_fatal_err(self.prev_token.span, Error::UselessDocComment)
480 }
481 _ => self.expected_ident_found(),
482 }),
483 }
484 }
485
486 /// Checks if the next token is `tok`, and returns `true` if so.
487 ///
488 /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
489 /// encountered.
490 fn check(&mut self, tok: &TokenKind) -> bool {
491 let is_present = self.token == *tok;
492 if !is_present {
493 self.expected_tokens.push(TokenType::Token(tok.clone()));
494 }
495 is_present
496 }
497
498 /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
499 pub fn eat(&mut self, tok: &TokenKind) -> bool {
500 let is_present = self.check(tok);
501 if is_present {
502 self.bump()
503 }
504 is_present
505 }
506
507 /// If the next token is the given keyword, returns `true` without eating it.
508 /// An expectation is also added for diagnostics purposes.
509 fn check_keyword(&mut self, kw: Symbol) -> bool {
510 self.expected_tokens.push(TokenType::Keyword(kw));
511 self.token.is_keyword(kw)
512 }
513
514 /// If the next token is the given keyword, eats it and returns `true`.
515 /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
516 // Public for rustfmt usage.
517 pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
518 if self.check_keyword(kw) {
519 self.bump();
520 true
521 } else {
522 false
523 }
524 }
525
526 fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
527 if self.token.is_keyword(kw) {
528 self.bump();
529 true
530 } else {
531 false
532 }
533 }
534
535 /// If the given word is not a keyword, signals an error.
536 /// If the next token is not the given word, signals an error.
537 /// Otherwise, eats it.
538 fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
539 if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
540 }
541
542 /// Is the given keyword `kw` followed by a non-reserved identifier?
543 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
544 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
545 }
546
547 fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
548 if ok {
549 true
550 } else {
551 self.expected_tokens.push(typ);
552 false
553 }
554 }
555
556 fn check_ident(&mut self) -> bool {
557 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
558 }
559
560 fn check_path(&mut self) -> bool {
561 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
562 }
563
564 fn check_type(&mut self) -> bool {
565 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
566 }
567
568 fn check_const_arg(&mut self) -> bool {
569 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
570 }
571
572 fn check_inline_const(&self, dist: usize) -> bool {
573 self.is_keyword_ahead(dist, &[kw::Const])
574 && self.look_ahead(dist + 1, |t| match t.kind {
575 token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
576 token::OpenDelim(DelimToken::Brace) => true,
577 _ => false,
578 })
579 }
580
581 /// Checks to see if the next token is either `+` or `+=`.
582 /// Otherwise returns `false`.
583 fn check_plus(&mut self) -> bool {
584 self.check_or_expected(
585 self.token.is_like_plus(),
586 TokenType::Token(token::BinOp(token::Plus)),
587 )
588 }
589
590 /// Eats the expected token if it's present possibly breaking
591 /// compound tokens like multi-character operators in process.
592 /// Returns `true` if the token was eaten.
593 fn break_and_eat(&mut self, expected: TokenKind) -> bool {
594 if self.token.kind == expected {
595 self.bump();
596 return true;
597 }
598 match self.token.kind.break_two_token_op() {
599 Some((first, second)) if first == expected => {
600 let first_span = self.sess.source_map().start_point(self.token.span);
601 let second_span = self.token.span.with_lo(first_span.hi());
602 self.token = Token::new(first, first_span);
603 // Keep track of this token - if we end token capturing now,
604 // we'll want to append this token to the captured stream.
605 //
606 // If we consume any additional tokens, then this token
607 // is not needed (we'll capture the entire 'glued' token),
608 // and `next_tok` will set this field to `None`
609 self.token_cursor.append_unglued_token =
610 Some((TokenTree::Token(self.token.clone()), Spacing::Alone));
611 // Use the spacing of the glued token as the spacing
612 // of the unglued second token.
613 self.bump_with((Token::new(second, second_span), self.token_spacing));
614 true
615 }
616 _ => {
617 self.expected_tokens.push(TokenType::Token(expected));
618 false
619 }
620 }
621 }
622
623 /// Eats `+` possibly breaking tokens like `+=` in process.
624 fn eat_plus(&mut self) -> bool {
625 self.break_and_eat(token::BinOp(token::Plus))
626 }
627
628 /// Eats `&` possibly breaking tokens like `&&` in process.
629 /// Signals an error if `&` is not eaten.
630 fn expect_and(&mut self) -> PResult<'a, ()> {
631 if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
632 }
633
634 /// Eats `|` possibly breaking tokens like `||` in process.
635 /// Signals an error if `|` was not eaten.
636 fn expect_or(&mut self) -> PResult<'a, ()> {
637 if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
638 }
639
640 /// Eats `<` possibly breaking tokens like `<<` in process.
641 fn eat_lt(&mut self) -> bool {
642 let ate = self.break_and_eat(token::Lt);
643 if ate {
644 // See doc comment for `unmatched_angle_bracket_count`.
645 self.unmatched_angle_bracket_count += 1;
646 self.max_angle_bracket_count += 1;
647 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
648 }
649 ate
650 }
651
652 /// Eats `<` possibly breaking tokens like `<<` in process.
653 /// Signals an error if `<` was not eaten.
654 fn expect_lt(&mut self) -> PResult<'a, ()> {
655 if self.eat_lt() { Ok(()) } else { self.unexpected() }
656 }
657
658 /// Eats `>` possibly breaking tokens like `>>` in process.
659 /// Signals an error if `>` was not eaten.
660 fn expect_gt(&mut self) -> PResult<'a, ()> {
661 if self.break_and_eat(token::Gt) {
662 // See doc comment for `unmatched_angle_bracket_count`.
663 if self.unmatched_angle_bracket_count > 0 {
664 self.unmatched_angle_bracket_count -= 1;
665 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
666 }
667 Ok(())
668 } else {
669 self.unexpected()
670 }
671 }
672
673 fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
674 kets.iter().any(|k| match expect {
675 TokenExpectType::Expect => self.check(k),
676 TokenExpectType::NoExpect => self.token == **k,
677 })
678 }
679
680 fn parse_seq_to_before_tokens<T>(
681 &mut self,
682 kets: &[&TokenKind],
683 sep: SeqSep,
684 expect: TokenExpectType,
685 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
686 ) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
687 let mut first = true;
688 let mut recovered = false;
689 let mut trailing = false;
690 let mut v = vec![];
691 while !self.expect_any_with_type(kets, expect) {
692 if let token::CloseDelim(..) | token::Eof = self.token.kind {
693 break;
694 }
695 if let Some(ref t) = sep.sep {
696 if first {
697 first = false;
698 } else {
699 match self.expect(t) {
700 Ok(false) => {}
701 Ok(true) => {
702 recovered = true;
703 break;
704 }
705 Err(mut expect_err) => {
706 let sp = self.prev_token.span.shrink_to_hi();
707 let token_str = pprust::token_kind_to_string(t);
708
709 // Attempt to keep parsing if it was a similar separator.
710 if let Some(ref tokens) = t.similar_tokens() {
711 if tokens.contains(&self.token.kind) {
712 self.bump();
713 }
714 }
715
716 // If this was a missing `@` in a binding pattern
717 // bail with a suggestion
718 // https://github.com/rust-lang/rust/issues/72373
719 if self.prev_token.is_ident() && self.token.kind == token::DotDot {
720 let msg = format!(
721 "if you meant to bind the contents of \
722 the rest of the array pattern into `{}`, use `@`",
723 pprust::token_to_string(&self.prev_token)
724 );
725 expect_err
726 .span_suggestion_verbose(
727 self.prev_token.span.shrink_to_hi().until(self.token.span),
728 &msg,
729 " @ ".to_string(),
730 Applicability::MaybeIncorrect,
731 )
732 .emit();
733 break;
734 }
735
736 // Attempt to keep parsing if it was an omitted separator.
737 match f(self) {
738 Ok(t) => {
739 // Parsed successfully, therefore most probably the code only
740 // misses a separator.
741 expect_err
742 .span_suggestion_short(
743 sp,
744 &format!("missing `{}`", token_str),
745 token_str,
746 Applicability::MaybeIncorrect,
747 )
748 .emit();
749
750 v.push(t);
751 continue;
752 }
753 Err(mut e) => {
754 // Parsing failed, therefore it must be something more serious
755 // than just a missing separator.
756 expect_err.emit();
757
758 e.cancel();
759 break;
760 }
761 }
762 }
763 }
764 }
765 }
766 if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
767 trailing = true;
768 break;
769 }
770
771 let t = f(self)?;
772 v.push(t);
773 }
774
775 Ok((v, trailing, recovered))
776 }
777
778 /// Parses a sequence, not including the closing delimiter. The function
779 /// `f` must consume tokens until reaching the next separator or
780 /// closing bracket.
781 fn parse_seq_to_before_end<T>(
782 &mut self,
783 ket: &TokenKind,
784 sep: SeqSep,
785 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
786 ) -> PResult<'a, (Vec<T>, bool, bool)> {
787 self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
788 }
789
790 /// Parses a sequence, including the closing delimiter. The function
791 /// `f` must consume tokens until reaching the next separator or
792 /// closing bracket.
793 fn parse_seq_to_end<T>(
794 &mut self,
795 ket: &TokenKind,
796 sep: SeqSep,
797 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
798 ) -> PResult<'a, (Vec<T>, bool /* trailing */)> {
799 let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
800 if !recovered {
801 self.eat(ket);
802 }
803 Ok((val, trailing))
804 }
805
806 /// Parses a sequence, including the closing delimiter. The function
807 /// `f` must consume tokens until reaching the next separator or
808 /// closing bracket.
809 fn parse_unspanned_seq<T>(
810 &mut self,
811 bra: &TokenKind,
812 ket: &TokenKind,
813 sep: SeqSep,
814 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
815 ) -> PResult<'a, (Vec<T>, bool)> {
816 self.expect(bra)?;
817 self.parse_seq_to_end(ket, sep, f)
818 }
819
820 fn parse_delim_comma_seq<T>(
821 &mut self,
822 delim: DelimToken,
823 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
824 ) -> PResult<'a, (Vec<T>, bool)> {
825 self.parse_unspanned_seq(
826 &token::OpenDelim(delim),
827 &token::CloseDelim(delim),
828 SeqSep::trailing_allowed(token::Comma),
829 f,
830 )
831 }
832
833 fn parse_paren_comma_seq<T>(
834 &mut self,
835 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
836 ) -> PResult<'a, (Vec<T>, bool)> {
837 self.parse_delim_comma_seq(token::Paren, f)
838 }
839
840 /// Advance the parser by one token using provided token as the next one.
841 fn bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
842 // Bumping after EOF is a bad sign, usually an infinite loop.
843 if self.prev_token.kind == TokenKind::Eof {
844 let msg = "attempted to bump the parser past EOF (may be stuck in a loop)";
845 self.span_bug(self.token.span, msg);
846 }
847
848 // Update the current and previous tokens.
849 self.prev_token = mem::replace(&mut self.token, next_token);
850 self.token_spacing = next_spacing;
851
852 // Diagnostics.
853 self.expected_tokens.clear();
854 }
855
856 /// Advance the parser by one token.
857 pub fn bump(&mut self) {
858 let next_token = self.next_tok(self.token.span);
859 self.bump_with(next_token);
860 }
861
862 /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
863 /// When `dist == 0` then the current token is looked at.
864 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
865 if dist == 0 {
866 return looker(&self.token);
867 }
868
869 let frame = &self.token_cursor.frame;
870 match frame.tree_cursor.look_ahead(dist - 1) {
871 Some(tree) => match tree {
872 TokenTree::Token(token) => looker(token),
873 TokenTree::Delimited(dspan, delim, _) => {
874 looker(&Token::new(token::OpenDelim(*delim), dspan.open))
875 }
876 },
877 None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
878 }
879 }
880
881 /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
882 fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
883 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
884 }
885
886 /// Parses asyncness: `async` or nothing.
887 fn parse_asyncness(&mut self) -> Async {
888 if self.eat_keyword(kw::Async) {
889 let span = self.prev_token.uninterpolated_span();
890 Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
891 } else {
892 Async::No
893 }
894 }
895
896 /// Parses unsafety: `unsafe` or nothing.
897 fn parse_unsafety(&mut self) -> Unsafe {
898 if self.eat_keyword(kw::Unsafe) {
899 Unsafe::Yes(self.prev_token.uninterpolated_span())
900 } else {
901 Unsafe::No
902 }
903 }
904
905 /// Parses constness: `const` or nothing.
906 fn parse_constness(&mut self) -> Const {
907 // Avoid const blocks to be parsed as const items
908 if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace))
909 && self.eat_keyword(kw::Const)
910 {
911 Const::Yes(self.prev_token.uninterpolated_span())
912 } else {
913 Const::No
914 }
915 }
916
917 /// Parses inline const expressions.
918 fn parse_const_block(&mut self, span: Span) -> PResult<'a, P<Expr>> {
919 self.sess.gated_spans.gate(sym::inline_const, span);
920 self.eat_keyword(kw::Const);
921 let blk = self.parse_block()?;
922 let anon_const = AnonConst {
923 id: DUMMY_NODE_ID,
924 value: self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()),
925 };
926 let blk_span = anon_const.value.span;
927 Ok(self.mk_expr(span.to(blk_span), ExprKind::ConstBlock(anon_const), AttrVec::new()))
928 }
929
930 /// Parses mutability (`mut` or nothing).
931 fn parse_mutability(&mut self) -> Mutability {
932 if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
933 }
934
935 /// Possibly parses mutability (`const` or `mut`).
936 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
937 if self.eat_keyword(kw::Mut) {
938 Some(Mutability::Mut)
939 } else if self.eat_keyword(kw::Const) {
940 Some(Mutability::Not)
941 } else {
942 None
943 }
944 }
945
946 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
947 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
948 {
949 self.expect_no_suffix(self.token.span, "a tuple index", suffix);
950 self.bump();
951 Ok(Ident::new(symbol, self.prev_token.span))
952 } else {
953 self.parse_ident_common(true)
954 }
955 }
956
957 fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
958 self.parse_mac_args_common(true).map(P)
959 }
960
961 fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
962 self.parse_mac_args_common(false)
963 }
964
965 fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
966 Ok(
967 if self.check(&token::OpenDelim(DelimToken::Paren))
968 || self.check(&token::OpenDelim(DelimToken::Bracket))
969 || self.check(&token::OpenDelim(DelimToken::Brace))
970 {
971 match self.parse_token_tree() {
972 TokenTree::Delimited(dspan, delim, tokens) =>
973 // We've confirmed above that there is a delimiter so unwrapping is OK.
974 {
975 MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens)
976 }
977 _ => unreachable!(),
978 }
979 } else if !delimited_only {
980 if self.eat(&token::Eq) {
981 let eq_span = self.prev_token.span;
982 let mut is_interpolated_expr = false;
983 if let token::Interpolated(nt) = &self.token.kind {
984 if let token::NtExpr(..) = **nt {
985 is_interpolated_expr = true;
986 }
987 }
988
989 // Collect tokens because they are used during lowering to HIR.
990 let expr = self.collect_tokens_no_attrs(|this| this.parse_expr())?;
991 let span = expr.span;
992
993 match &expr.kind {
994 // Not gated to supporte things like `doc = $expr` that work on stable.
995 _ if is_interpolated_expr => {}
996 ExprKind::Lit(lit) if lit.kind.is_unsuffixed() => {}
997 _ => self.sess.gated_spans.gate(sym::extended_key_value_attributes, span),
998 }
999
1000 let token_kind = token::Interpolated(Lrc::new(token::NtExpr(expr)));
1001 MacArgs::Eq(eq_span, Token::new(token_kind, span))
1002 } else {
1003 MacArgs::Empty
1004 }
1005 } else {
1006 return self.unexpected();
1007 },
1008 )
1009 }
1010
1011 fn parse_or_use_outer_attributes(
1012 &mut self,
1013 already_parsed_attrs: Option<AttrWrapper>,
1014 ) -> PResult<'a, AttrWrapper> {
1015 if let Some(attrs) = already_parsed_attrs {
1016 Ok(attrs)
1017 } else {
1018 self.parse_outer_attributes()
1019 }
1020 }
1021
1022 /// Parses a single token tree from the input.
1023 pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
1024 match self.token.kind {
1025 token::OpenDelim(..) => {
1026 let depth = self.token_cursor.stack.len();
1027
1028 // We keep advancing the token cursor until we hit
1029 // the matching `CloseDelim` token.
1030 while !(depth == self.token_cursor.stack.len()
1031 && matches!(self.token.kind, token::CloseDelim(_)))
1032 {
1033 // Advance one token at a time, so `TokenCursor::next()`
1034 // can capture these tokens if necessary.
1035 self.bump();
1036 }
1037 // We are still inside the frame corresponding
1038 // to the delimited stream we captured, so grab
1039 // the tokens from this frame.
1040 let frame = &self.token_cursor.frame;
1041 let stream = frame.tree_cursor.stream.clone();
1042 let span = frame.span;
1043 let delim = frame.delim;
1044 // Consume close delimiter
1045 self.bump();
1046 TokenTree::Delimited(span, delim, stream)
1047 }
1048 token::CloseDelim(_) | token::Eof => unreachable!(),
1049 _ => {
1050 self.bump();
1051 TokenTree::Token(self.prev_token.clone())
1052 }
1053 }
1054 }
1055
1056 /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
1057 pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
1058 let mut tts = Vec::new();
1059 while self.token != token::Eof {
1060 tts.push(self.parse_token_tree());
1061 }
1062 Ok(tts)
1063 }
1064
1065 pub fn parse_tokens(&mut self) -> TokenStream {
1066 let mut result = Vec::new();
1067 loop {
1068 match self.token.kind {
1069 token::Eof | token::CloseDelim(..) => break,
1070 _ => result.push(self.parse_token_tree().into()),
1071 }
1072 }
1073 TokenStream::new(result)
1074 }
1075
1076 /// Evaluates the closure with restrictions in place.
1077 ///
1078 /// Afters the closure is evaluated, restrictions are reset.
1079 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1080 let old = self.restrictions;
1081 self.restrictions = res;
1082 let res = f(self);
1083 self.restrictions = old;
1084 res
1085 }
1086
1087 fn is_crate_vis(&self) -> bool {
1088 self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
1089 }
1090
1091 /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
1092 /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
1093 /// If the following element can't be a tuple (i.e., it's a function definition), then
1094 /// it's not a tuple struct field), and the contents within the parentheses isn't valid,
1095 /// so emit a proper diagnostic.
1096 // Public for rustfmt usage.
1097 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1098 maybe_whole!(self, NtVis, |x| x);
1099
1100 self.expected_tokens.push(TokenType::Keyword(kw::Crate));
1101 if self.is_crate_vis() {
1102 self.bump(); // `crate`
1103 self.sess.gated_spans.gate(sym::crate_visibility_modifier, self.prev_token.span);
1104 return Ok(Visibility {
1105 span: self.prev_token.span,
1106 kind: VisibilityKind::Crate(CrateSugar::JustCrate),
1107 tokens: None,
1108 });
1109 }
1110
1111 if !self.eat_keyword(kw::Pub) {
1112 // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1113 // keyword to grab a span from for inherited visibility; an empty span at the
1114 // beginning of the current token would seem to be the "Schelling span".
1115 return Ok(Visibility {
1116 span: self.token.span.shrink_to_lo(),
1117 kind: VisibilityKind::Inherited,
1118 tokens: None,
1119 });
1120 }
1121 let lo = self.prev_token.span;
1122
1123 if self.check(&token::OpenDelim(token::Paren)) {
1124 // We don't `self.bump()` the `(` yet because this might be a struct definition where
1125 // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1126 // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1127 // by the following tokens.
1128 if self.is_keyword_ahead(1, &[kw::Crate]) && self.look_ahead(2, |t| t != &token::ModSep)
1129 // account for `pub(crate::foo)`
1130 {
1131 // Parse `pub(crate)`.
1132 self.bump(); // `(`
1133 self.bump(); // `crate`
1134 self.expect(&token::CloseDelim(token::Paren))?; // `)`
1135 let vis = VisibilityKind::Crate(CrateSugar::PubCrate);
1136 return Ok(Visibility {
1137 span: lo.to(self.prev_token.span),
1138 kind: vis,
1139 tokens: None,
1140 });
1141 } else if self.is_keyword_ahead(1, &[kw::In]) {
1142 // Parse `pub(in path)`.
1143 self.bump(); // `(`
1144 self.bump(); // `in`
1145 let path = self.parse_path(PathStyle::Mod)?; // `path`
1146 self.expect(&token::CloseDelim(token::Paren))?; // `)`
1147 let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
1148 return Ok(Visibility {
1149 span: lo.to(self.prev_token.span),
1150 kind: vis,
1151 tokens: None,
1152 });
1153 } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren))
1154 && self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
1155 {
1156 // Parse `pub(self)` or `pub(super)`.
1157 self.bump(); // `(`
1158 let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
1159 self.expect(&token::CloseDelim(token::Paren))?; // `)`
1160 let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
1161 return Ok(Visibility {
1162 span: lo.to(self.prev_token.span),
1163 kind: vis,
1164 tokens: None,
1165 });
1166 } else if let FollowedByType::No = fbt {
1167 // Provide this diagnostic if a type cannot follow;
1168 // in particular, if this is not a tuple struct.
1169 self.recover_incorrect_vis_restriction()?;
1170 // Emit diagnostic, but continue with public visibility.
1171 }
1172 }
1173
1174 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1175 }
1176
1177 /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1178 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1179 self.bump(); // `(`
1180 let path = self.parse_path(PathStyle::Mod)?;
1181 self.expect(&token::CloseDelim(token::Paren))?; // `)`
1182
1183 let msg = "incorrect visibility restriction";
1184 let suggestion = r##"some possible visibility restrictions are:
1185`pub(crate)`: visible only on the current crate
1186`pub(super)`: visible only in the current module's parent
1187`pub(in path::to::module)`: visible only on the specified path"##;
1188
1189 let path_str = pprust::path_to_string(&path);
1190
1191 struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
1192 .help(suggestion)
1193 .span_suggestion(
1194 path.span,
1195 &format!("make this visible only to module `{}` with `in`", path_str),
1196 format!("in {}", path_str),
1197 Applicability::MachineApplicable,
1198 )
1199 .emit();
1200
1201 Ok(())
1202 }
1203
1204 /// Parses `extern string_literal?`.
1205 fn parse_extern(&mut self) -> Extern {
1206 if self.eat_keyword(kw::Extern) { Extern::from_abi(self.parse_abi()) } else { Extern::None }
1207 }
1208
1209 /// Parses a string literal as an ABI spec.
1210 fn parse_abi(&mut self) -> Option<StrLit> {
1211 match self.parse_str_lit() {
1212 Ok(str_lit) => Some(str_lit),
1213 Err(Some(lit)) => match lit.kind {
1214 ast::LitKind::Err(_) => None,
1215 _ => {
1216 self.struct_span_err(lit.span, "non-string ABI literal")
1217 .span_suggestion(
1218 lit.span,
1219 "specify the ABI with a string literal",
1220 "\"C\"".to_string(),
1221 Applicability::MaybeIncorrect,
1222 )
1223 .emit();
1224 None
1225 }
1226 },
1227 Err(None) => None,
1228 }
1229 }
1230
1231 pub fn collect_tokens_no_attrs<R: AstLike>(
1232 &mut self,
1233 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1234 ) -> PResult<'a, R> {
1235 // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1236 // `ForceCollect::Yes`
1237 self.collect_tokens_trailing_token(
1238 AttrWrapper::empty(),
1239 ForceCollect::Yes,
1240 |this, _attrs| Ok((f(this)?, TrailingToken::None)),
1241 )
1242 }
1243
1244 /// `::{` or `::*`
1245 fn is_import_coupler(&mut self) -> bool {
1246 self.check(&token::ModSep)
1247 && self.look_ahead(1, |t| {
1248 *t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star)
1249 })
1250 }
1251
1252 pub fn clear_expected_tokens(&mut self) {
1253 self.expected_tokens.clear();
1254 }
1255}
1256
1257crate fn make_unclosed_delims_error(
1258 unmatched: UnmatchedBrace,
1259 sess: &ParseSess,
1260) -> Option<DiagnosticBuilder<'_>> {
1261 // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
1262 // `unmatched_braces` only for error recovery in the `Parser`.
1263 let found_delim = unmatched.found_delim?;
1264 let mut err = sess.span_diagnostic.struct_span_err(
1265 unmatched.found_span,
1266 &format!(
1267 "mismatched closing delimiter: `{}`",
1268 pprust::token_kind_to_string(&token::CloseDelim(found_delim)),
1269 ),
1270 );
1271 err.span_label(unmatched.found_span, "mismatched closing delimiter");
1272 if let Some(sp) = unmatched.candidate_span {
1273 err.span_label(sp, "closing delimiter possibly meant for this");
1274 }
1275 if let Some(sp) = unmatched.unclosed_span {
1276 err.span_label(sp, "unclosed delimiter");
1277 }
1278 Some(err)
1279}
1280
1281pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) {
1282 *sess.reached_eof.borrow_mut() |=
1283 unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
1284 for unmatched in unclosed_delims.drain(..) {
1285 if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
1286 e.emit();
1287 }
1288 }
1289}