]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_parse/src/parser/mod.rs
New upstream version 1.64.0+dfsg1
[rustc.git] / compiler / rustc_parse / src / parser / mod.rs
CommitLineData
e74abb32 1pub mod attr;
6a06907d 2mod attr_wrapper;
3dfed10e 3mod diagnostics;
416331ca 4mod expr;
3dfed10e 5mod generics;
416331ca 6mod item;
3dfed10e 7mod nonterminal;
dfeec247 8mod pat;
416331ca 9mod path;
dfeec247 10mod stmt;
3dfed10e 11mod ty;
416331ca 12
60c5eb7d 13use crate::lexer::UnmatchedBrace;
6a06907d 14pub use attr_wrapper::AttrWrapper;
29967ef6 15pub use diagnostics::AttemptLocalParseRecovery;
3dfed10e 16use diagnostics::Error;
a2a8927a 17pub(crate) use item::FnParseMode;
5e7ed085 18pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
3dfed10e 19pub use path::PathStyle;
60c5eb7d 20
74b04a01 21use rustc_ast::ptr::P;
04454e1e 22use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
cdc7bbd5 23use rustc_ast::tokenstream::AttributesData;
6a06907d 24use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
cdc7bbd5
XL
25use rustc_ast::tokenstream::{TokenStream, TokenTree};
26use rustc_ast::AttrId;
3dfed10e 27use rustc_ast::DUMMY_NODE_ID;
923072b8 28use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, Extern};
04454e1e
FG
29use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacArgsEq, MacDelimiter, Mutability, StrLit};
30use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
74b04a01 31use rustc_ast_pretty::pprust;
cdc7bbd5 32use rustc_data_structures::fx::FxHashMap;
29967ef6 33use rustc_errors::PResult;
5e7ed085 34use rustc_errors::{
04454e1e 35 struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, MultiSpan,
5e7ed085 36};
74b04a01 37use rustc_session::parse::ParseSess;
04454e1e 38use rustc_span::source_map::{Span, DUMMY_SP};
f9f354fc 39use rustc_span::symbol::{kw, sym, Ident, Symbol};
3dfed10e 40use tracing::debug;
74b04a01 41
cdc7bbd5 42use std::ops::Range;
dfeec247 43use std::{cmp, mem, slice};
60c5eb7d 44
9fa01778 45bitflags::bitflags! {
94b46f34 46 struct Restrictions: u8 {
ea8adc8c
XL
47 const STMT_EXPR = 1 << 0;
48 const NO_STRUCT_LITERAL = 1 << 1;
29967ef6 49 const CONST_EXPR = 1 << 2;
064997fb 50 const ALLOW_LET = 1 << 3;
1a4d82fc 51 }
223e47cc
LB
52}
53
8faf50e0 54#[derive(Clone, Copy, PartialEq, Debug)]
e74abb32 55enum SemiColonMode {
7453a54e
SL
56 Break,
57 Ignore,
9fa01778 58 Comma,
7453a54e
SL
59}
60
8faf50e0 61#[derive(Clone, Copy, PartialEq, Debug)]
e74abb32 62enum BlockMode {
cc61c64b
XL
63 Break,
64 Ignore,
65}
66
5869c6ff
XL
67/// Whether or not we should force collection of tokens for an AST node,
68/// regardless of whether or not it has attributes
17df50a5 69#[derive(Clone, Copy, PartialEq)]
5869c6ff
XL
70pub enum ForceCollect {
71 Yes,
72 No,
73}
74
cdc7bbd5 75#[derive(Debug, Eq, PartialEq)]
5869c6ff
XL
76pub enum TrailingToken {
77 None,
78 Semi,
6a06907d
XL
79 /// If the trailing token is a comma, then capture it
80 /// Otherwise, ignore the trailing token
81 MaybeComma,
5869c6ff
XL
82}
83
e1599b0c 84/// Like `maybe_whole_expr`, but for things other than expressions.
416331ca 85#[macro_export]
1a4d82fc 86macro_rules! maybe_whole {
c30ab7b3 87 ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
dc9dc135 88 if let token::Interpolated(nt) = &$p.token.kind {
532ac7d7
XL
89 if let token::$constructor(x) = &**nt {
90 let $x = x.clone();
c30ab7b3
SL
91 $p.bump();
92 return Ok($e);
223e47cc 93 }
1a4d82fc 94 }
c30ab7b3 95 };
1a4d82fc 96}
223e47cc 97
532ac7d7 98/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
416331ca 99#[macro_export]
532ac7d7
XL
100macro_rules! maybe_recover_from_interpolated_ty_qpath {
101 ($self: expr, $allow_qpath_recovery: expr) => {
5e7ed085
FG
102 if $allow_qpath_recovery
103 && $self.look_ahead(1, |t| t == &token::ModSep)
104 && let token::Interpolated(nt) = &$self.token.kind
105 && let token::NtTy(ty) = &**nt
106 {
532ac7d7
XL
107 let ty = ty.clone();
108 $self.bump();
74b04a01 109 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
532ac7d7 110 }
dfeec247 111 };
532ac7d7
XL
112}
113
041b39d2 114#[derive(Clone)]
1a4d82fc
JJ
115pub struct Parser<'a> {
116 pub sess: &'a ParseSess,
74b04a01 117 /// The current token.
dc9dc135 118 pub token: Token,
29967ef6
XL
119 /// The spacing for the current token
120 pub token_spacing: Spacing,
74b04a01
XL
121 /// The previous token.
122 pub prev_token: Token,
cdc7bbd5 123 pub capture_cfg: bool,
94b46f34 124 restrictions: Restrictions,
e74abb32 125 expected_tokens: Vec<TokenType>,
04454e1e
FG
126 // Important: This must only be advanced from `bump` to ensure that
127 // `token_cursor.num_next_calls` is updated properly.
e1599b0c 128 token_cursor: TokenCursor,
94b46f34 129 desugar_doc_comments: bool,
9fa01778
XL
130 /// This field is used to keep track of how many left angle brackets we have seen. This is
131 /// required in order to detect extra leading left angle brackets (`<` characters) and error
132 /// appropriately.
133 ///
134 /// See the comments in the `parse_path_segment` function for more details.
e74abb32
XL
135 unmatched_angle_bracket_count: u32,
136 max_angle_bracket_count: u32,
e1599b0c 137 /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
9fa01778
XL
138 /// it gets removed from here. Every entry left at the end gets emitted as an independent
139 /// error.
e74abb32
XL
140 pub(super) unclosed_delims: Vec<UnmatchedBrace>,
141 last_unexpected_token_span: Option<Span>,
3dfed10e
XL
142 /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
143 /// looked like it could have been a mistyped path or literal `Option:Some(42)`).
e74abb32 144 pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
dc9dc135 145 /// If present, this `Parser` is not parsing Rust code but rather a macro call.
e74abb32 146 subparser_name: Option<&'static str>,
cdc7bbd5 147 capture_state: CaptureState,
c295e0f8
XL
148 /// This allows us to recover when the user forget to add braces around
149 /// multiple statements in the closure body.
150 pub current_closure: Option<ClosureSpans>,
151}
152
04454e1e
FG
153// This type is used a lot, e.g. it's cloned when matching many declarative macro rules. Make sure
154// it doesn't unintentionally get bigger.
155#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
156rustc_data_structures::static_assert_size!(Parser<'_>, 328);
157
5e7ed085 158/// Stores span information about a closure.
c295e0f8
XL
159#[derive(Clone)]
160pub struct ClosureSpans {
161 pub whole_closure: Span,
162 pub closing_pipe: Span,
163 pub body: Span,
cdc7bbd5
XL
164}
165
166/// Indicates a range of tokens that should be replaced by
167/// the tokens in the provided vector. This is used in two
168/// places during token collection:
169///
170/// 1. During the parsing of an AST node that may have a `#[derive]`
171/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
172/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
173/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
94222f64 174/// on an `AttrAnnotatedTokenStream`
cdc7bbd5
XL
175///
176/// 2. When we parse an inner attribute while collecting tokens. We
177/// remove inner attributes from the token stream entirely, and
178/// instead track them through the `attrs` field on the AST node.
179/// This allows us to easily manipulate them (for example, removing
180/// the first macro inner attribute to invoke a proc-macro).
181/// When create a `TokenStream`, the inner attributes get inserted
182/// into the proper place in the token stream.
183pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
184
185/// Controls how we capture tokens. Capturing can be expensive,
186/// so we try to avoid performing capturing in cases where
94222f64 187/// we will never need an `AttrAnnotatedTokenStream`
cdc7bbd5
XL
188#[derive(Copy, Clone)]
189pub enum Capturing {
190 /// We aren't performing any capturing - this is the default mode.
191 No,
192 /// We are capturing tokens
193 Yes,
194}
195
196#[derive(Clone)]
197struct CaptureState {
198 capturing: Capturing,
199 replace_ranges: Vec<ReplaceRange>,
200 inner_attr_ranges: FxHashMap<AttrId, ReplaceRange>,
1a4d82fc
JJ
201}
202
9fa01778
XL
203impl<'a> Drop for Parser<'a> {
204 fn drop(&mut self) {
e74abb32 205 emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
9fa01778
XL
206 }
207}
7cac9316 208
041b39d2 209#[derive(Clone)]
e1599b0c 210struct TokenCursor {
04454e1e
FG
211 // The current (innermost) frame. `frame` and `stack` could be combined,
212 // but it's faster to have them separately to access `frame` directly
213 // rather than via something like `stack.last().unwrap()` or
214 // `stack[stack.len() - 1]`.
e1599b0c 215 frame: TokenCursorFrame,
04454e1e 216 // Additional frames that enclose `frame`.
e1599b0c 217 stack: Vec<TokenCursorFrame>,
29967ef6 218 desugar_doc_comments: bool,
04454e1e 219 // Counts the number of calls to `{,inlined_}next`.
29967ef6 220 num_next_calls: usize,
fc512014
XL
221 // During parsing, we may sometimes need to 'unglue' a
222 // glued token into two component tokens
223 // (e.g. '>>' into '>' and '>), so that the parser
224 // can consume them one at a time. This process
225 // bypasses the normal capturing mechanism
226 // (e.g. `num_next_calls` will not be incremented),
227 // since the 'unglued' tokens due not exist in
228 // the original `TokenStream`.
229 //
230 // If we end up consuming both unglued tokens,
231 // then this is not an issue - we'll end up
232 // capturing the single 'glued' token.
233 //
234 // However, in certain circumstances, we may
235 // want to capture just the first 'unglued' token.
236 // For example, capturing the `Vec<u8>`
237 // in `Option<Vec<u8>>` requires us to unglue
cdc7bbd5 238 // the trailing `>>` token. The `break_last_token`
fc512014
XL
239 // field is used to track this token - it gets
240 // appended to the captured stream when
241 // we evaluate a `LazyTokenStream`
cdc7bbd5 242 break_last_token: bool,
8bb4bdeb
XL
243}
244
041b39d2 245#[derive(Clone)]
e1599b0c 246struct TokenCursorFrame {
04454e1e 247 delim_sp: Option<(Delimiter, DelimSpan)>,
e1599b0c 248 tree_cursor: tokenstream::Cursor,
3b2f2976
XL
249}
250
8bb4bdeb 251impl TokenCursorFrame {
04454e1e
FG
252 fn new(delim_sp: Option<(Delimiter, DelimSpan)>, tts: TokenStream) -> Self {
253 TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() }
8bb4bdeb
XL
254 }
255}
256
257impl TokenCursor {
04454e1e
FG
258 fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
259 self.inlined_next(desugar_doc_comments)
5e7ed085
FG
260 }
261
262 /// This always-inlined version should only be used on hot code paths.
263 #[inline(always)]
04454e1e 264 fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
8bb4bdeb 265 loop {
04454e1e
FG
266 // FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
267 // need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
268 // removed.
064997fb 269 if let Some(tree) = self.frame.tree_cursor.next_ref() {
04454e1e 270 match tree {
064997fb 271 &TokenTree::Token(ref token, spacing) => match (desugar_doc_comments, token) {
04454e1e
FG
272 (true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
273 return self.desugar(attr_style, data, span);
274 }
064997fb 275 _ => return (token.clone(), spacing),
04454e1e
FG
276 },
277 &TokenTree::Delimited(sp, delim, ref tts) => {
278 // Set `open_delim` to true here because we deal with it immediately.
279 let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone());
280 self.stack.push(mem::replace(&mut self.frame, frame));
281 if delim != Delimiter::Invisible {
282 return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
283 }
284 // No open delimeter to return; continue on to the next iteration.
285 }
286 };
8bb4bdeb 287 } else if let Some(frame) = self.stack.pop() {
04454e1e
FG
288 if let Some((delim, span)) = self.frame.delim_sp && delim != Delimiter::Invisible {
289 self.frame = frame;
290 return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
291 }
8bb4bdeb 292 self.frame = frame;
04454e1e 293 // No close delimiter to return; continue on to the next iteration.
8bb4bdeb 294 } else {
04454e1e 295 return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
8bb4bdeb
XL
296 }
297 }
298 }
299
04454e1e 300 fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> (Token, Spacing) {
8bb4bdeb
XL
301 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
302 // required to wrap the text.
303 let mut num_of_hashes = 0;
304 let mut count = 0;
3dfed10e 305 for ch in data.as_str().chars() {
8bb4bdeb
XL
306 count = match ch {
307 '"' => 1,
308 '#' if count > 0 => count + 1,
309 _ => 0,
310 };
311 num_of_hashes = cmp::max(num_of_hashes, count);
312 }
313
04454e1e 314 let delim_span = DelimSpan::from_single(span);
0731742a
XL
315 let body = TokenTree::Delimited(
316 delim_span,
04454e1e 317 Delimiter::Bracket,
dc9dc135 318 [
064997fb
FG
319 TokenTree::token_alone(token::Ident(sym::doc, false), span),
320 TokenTree::token_alone(token::Eq, span),
321 TokenTree::token_alone(
322 TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
323 span,
324 ),
0731742a 325 ]
064997fb 326 .into_iter()
74b04a01 327 .collect::<TokenStream>(),
0731742a 328 );
8bb4bdeb 329
dfeec247
XL
330 self.stack.push(mem::replace(
331 &mut self.frame,
332 TokenCursorFrame::new(
04454e1e 333 None,
29967ef6 334 if attr_style == AttrStyle::Inner {
064997fb
FG
335 [
336 TokenTree::token_alone(token::Pound, span),
337 TokenTree::token_alone(token::Not, span),
338 body,
339 ]
340 .into_iter()
341 .collect::<TokenStream>()
dfeec247 342 } else {
064997fb
FG
343 [TokenTree::token_alone(token::Pound, span), body]
344 .into_iter()
dfeec247
XL
345 .collect::<TokenStream>()
346 },
347 ),
348 ));
8bb4bdeb 349
04454e1e 350 self.next(/* desugar_doc_comments */ false)
8bb4bdeb
XL
351 }
352}
353
5869c6ff 354#[derive(Debug, Clone, PartialEq)]
e74abb32 355enum TokenType {
dc9dc135
XL
356 Token(TokenKind),
357 Keyword(Symbol),
1a4d82fc 358 Operator,
32a655c1
SL
359 Lifetime,
360 Ident,
361 Path,
362 Type,
9fa01778 363 Const,
1a4d82fc 364}
223e47cc 365
1a4d82fc 366impl TokenType {
e74abb32 367 fn to_string(&self) -> String {
1a4d82fc 368 match *self {
dc9dc135
XL
369 TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
370 TokenType::Keyword(kw) => format!("`{}`", kw),
32a655c1
SL
371 TokenType::Operator => "an operator".to_string(),
372 TokenType::Lifetime => "lifetime".to_string(),
373 TokenType::Ident => "identifier".to_string(),
374 TokenType::Path => "path".to_string(),
375 TokenType::Type => "type".to_string(),
fc512014 376 TokenType::Const => "a const expression".to_string(),
1a4d82fc
JJ
377 }
378 }
223e47cc
LB
379}
380
8faf50e0 381#[derive(Copy, Clone, Debug)]
e74abb32 382enum TokenExpectType {
ea8adc8c
XL
383 Expect,
384 NoExpect,
385}
386
e74abb32
XL
387/// A sequence separator.
388struct SeqSep {
389 /// The separator token.
390 sep: Option<TokenKind>,
391 /// `true` if a trailing separator is allowed.
392 trailing_sep_allowed: bool,
393}
394
395impl SeqSep {
396 fn trailing_allowed(t: TokenKind) -> SeqSep {
dfeec247 397 SeqSep { sep: Some(t), trailing_sep_allowed: true }
e74abb32
XL
398 }
399
400 fn none() -> SeqSep {
dfeec247 401 SeqSep { sep: None, trailing_sep_allowed: false }
e74abb32
XL
402 }
403}
404
dfeec247
XL
405pub enum FollowedByType {
406 Yes,
407 No,
408}
409
410fn token_descr_opt(token: &Token) -> Option<&'static str> {
411 Some(match token.kind {
412 _ if token.is_special_ident() => "reserved identifier",
413 _ if token.is_used_keyword() => "keyword",
414 _ if token.is_unused_keyword() => "reserved keyword",
415 token::DocComment(..) => "doc comment",
416 _ => return None,
417 })
418}
419
420pub(super) fn token_descr(token: &Token) -> String {
421 let token_str = pprust::token_to_string(token);
422 match token_descr_opt(token) {
423 Some(prefix) => format!("{} `{}`", prefix, token_str),
424 _ => format!("`{}`", token_str),
425 }
426}
60c5eb7d 427
1a4d82fc 428impl<'a> Parser<'a> {
dc9dc135
XL
429 pub fn new(
430 sess: &'a ParseSess,
431 tokens: TokenStream,
dc9dc135
XL
432 desugar_doc_comments: bool,
433 subparser_name: Option<&'static str>,
434 ) -> Self {
c30ab7b3 435 let mut parser = Parser {
3b2f2976 436 sess,
dc9dc135 437 token: Token::dummy(),
29967ef6 438 token_spacing: Spacing::Alone,
74b04a01 439 prev_token: Token::dummy(),
cdc7bbd5 440 capture_cfg: false,
d9579d0f 441 restrictions: Restrictions::empty(),
1a4d82fc 442 expected_tokens: Vec::new(),
8bb4bdeb 443 token_cursor: TokenCursor {
04454e1e 444 frame: TokenCursorFrame::new(None, tokens),
8bb4bdeb 445 stack: Vec::new(),
29967ef6
XL
446 num_next_calls: 0,
447 desugar_doc_comments,
cdc7bbd5 448 break_last_token: false,
8bb4bdeb 449 },
3b2f2976 450 desugar_doc_comments,
9fa01778
XL
451 unmatched_angle_bracket_count: 0,
452 max_angle_bracket_count: 0,
453 unclosed_delims: Vec::new(),
454 last_unexpected_token_span: None,
416331ca 455 last_type_ascription: None,
dc9dc135 456 subparser_name,
cdc7bbd5
XL
457 capture_state: CaptureState {
458 capturing: Capturing::No,
459 replace_ranges: Vec::new(),
460 inner_attr_ranges: Default::default(),
461 },
c295e0f8 462 current_closure: None,
c30ab7b3
SL
463 };
464
74b04a01
XL
465 // Make parser point to the first token.
466 parser.bump();
7cac9316 467
c30ab7b3
SL
468 parser
469 }
470
29967ef6 471 pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
9346a6ac 472 match self.expect_one_of(&[], &[]) {
9cc50fc6 473 Err(e) => Err(e),
e74abb32
XL
474 // We can get `Ok(true)` from `recover_closing_delimiter`
475 // which is called in `expected_one_of_not_found`.
476 Ok(_) => FatalError.raise(),
9346a6ac 477 }
1a4d82fc
JJ
478 }
479
9fa01778 480 /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
dc9dc135 481 pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
1a4d82fc
JJ
482 if self.expected_tokens.is_empty() {
483 if self.token == *t {
9cc50fc6 484 self.bump();
9fa01778 485 Ok(false)
1a4d82fc 486 } else {
dc9dc135 487 self.unexpected_try_recover(t)
1a4d82fc
JJ
488 }
489 } else {
94b46f34 490 self.expect_one_of(slice::from_ref(t), &[])
1a4d82fc
JJ
491 }
492 }
493
494 /// Expect next token to be edible or inedible token. If edible,
495 /// then consume it; if inedible, then return without consuming
496 /// anything. Signal a fatal error if next token is unexpected.
9fa01778
XL
497 pub fn expect_one_of(
498 &mut self,
dc9dc135
XL
499 edible: &[TokenKind],
500 inedible: &[TokenKind],
9fa01778 501 ) -> PResult<'a, bool /* recovered */> {
dc9dc135 502 if edible.contains(&self.token.kind) {
9cc50fc6 503 self.bump();
9fa01778 504 Ok(false)
dc9dc135 505 } else if inedible.contains(&self.token.kind) {
1a4d82fc 506 // leave it in the input
9fa01778 507 Ok(false)
dc9dc135 508 } else if self.last_unexpected_token_span == Some(self.token.span) {
9fa01778 509 FatalError.raise();
1a4d82fc 510 } else {
dc9dc135 511 self.expected_one_of_not_found(edible, inedible)
1a4d82fc 512 }
970d7e83
LB
513 }
514
dfeec247 515 // Public for rustfmt usage.
f9f354fc 516 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
2c00a5a8
XL
517 self.parse_ident_common(true)
518 }
519
17df50a5
XL
520 fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> {
521 self.token.ident().ok_or_else(|| match self.prev_token.kind {
522 TokenKind::DocComment(..) => {
523 self.span_err(self.prev_token.span, Error::UselessDocComment)
524 }
525 _ => self.expected_ident_found(),
526 })
527 }
528
f9f354fc 529 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
17df50a5
XL
530 let (ident, is_raw) = self.ident_or_err()?;
531 if !is_raw && ident.is_reserved() {
532 let mut err = self.expected_ident_found();
533 if recover {
534 err.emit();
535 } else {
536 return Err(err);
970d7e83 537 }
970d7e83 538 }
17df50a5
XL
539 self.bump();
540 Ok(ident)
970d7e83
LB
541 }
542
9fa01778 543 /// Checks if the next token is `tok`, and returns `true` if so.
1a4d82fc 544 ///
7453a54e 545 /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
1a4d82fc 546 /// encountered.
e74abb32 547 fn check(&mut self, tok: &TokenKind) -> bool {
1a4d82fc 548 let is_present = self.token == *tok;
dfeec247
XL
549 if !is_present {
550 self.expected_tokens.push(TokenType::Token(tok.clone()));
551 }
1a4d82fc 552 is_present
970d7e83
LB
553 }
554
923072b8
FG
555 fn check_noexpect(&self, tok: &TokenKind) -> bool {
556 self.token == *tok
557 }
558
559 /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
560 ///
561 /// the main purpose of this function is to reduce the cluttering of the suggestions list
562 /// which using the normal eat method could introduce in some cases.
563 pub fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
564 let is_present = self.check_noexpect(tok);
565 if is_present {
566 self.bump()
567 }
568 is_present
569 }
570
9fa01778 571 /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
dc9dc135 572 pub fn eat(&mut self, tok: &TokenKind) -> bool {
1a4d82fc 573 let is_present = self.check(tok);
dfeec247
XL
574 if is_present {
575 self.bump()
576 }
9cc50fc6 577 is_present
970d7e83
LB
578 }
579
e74abb32
XL
580 /// If the next token is the given keyword, returns `true` without eating it.
581 /// An expectation is also added for diagnostics purposes.
dc9dc135 582 fn check_keyword(&mut self, kw: Symbol) -> bool {
85aaf69f
SL
583 self.expected_tokens.push(TokenType::Keyword(kw));
584 self.token.is_keyword(kw)
585 }
586
e74abb32
XL
587 /// If the next token is the given keyword, eats it and returns `true`.
588 /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
dfeec247
XL
589 // Public for rustfmt usage.
590 pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
85aaf69f 591 if self.check_keyword(kw) {
9cc50fc6
SL
592 self.bump();
593 true
85aaf69f 594 } else {
9cc50fc6 595 false
85aaf69f
SL
596 }
597 }
598
dc9dc135 599 fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
1a4d82fc 600 if self.token.is_keyword(kw) {
9cc50fc6
SL
601 self.bump();
602 true
1a4d82fc 603 } else {
9cc50fc6 604 false
1a4d82fc 605 }
970d7e83
LB
606 }
607
9fa01778
XL
608 /// If the given word is not a keyword, signals an error.
609 /// If the next token is not the given word, signals an error.
610 /// Otherwise, eats it.
dc9dc135 611 fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
dfeec247 612 if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
970d7e83
LB
613 }
614
74b04a01
XL
615 /// Is the given keyword `kw` followed by a non-reserved identifier?
616 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
617 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
618 }
619
e74abb32
XL
620 fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
621 if ok {
32a655c1
SL
622 true
623 } else {
e74abb32 624 self.expected_tokens.push(typ);
32a655c1
SL
625 false
626 }
627 }
628
e74abb32
XL
629 fn check_ident(&mut self) -> bool {
630 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
631 }
632
32a655c1 633 fn check_path(&mut self) -> bool {
e74abb32 634 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
32a655c1
SL
635 }
636
637 fn check_type(&mut self) -> bool {
e74abb32 638 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
32a655c1
SL
639 }
640
9fa01778 641 fn check_const_arg(&mut self) -> bool {
e74abb32
XL
642 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
643 }
644
29967ef6
XL
645 fn check_inline_const(&self, dist: usize) -> bool {
646 self.is_keyword_ahead(dist, &[kw::Const])
647 && self.look_ahead(dist + 1, |t| match t.kind {
648 token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
04454e1e 649 token::OpenDelim(Delimiter::Brace) => true,
29967ef6
XL
650 _ => false,
651 })
652 }
653
e74abb32
XL
654 /// Checks to see if the next token is either `+` or `+=`.
655 /// Otherwise returns `false`.
656 fn check_plus(&mut self) -> bool {
657 self.check_or_expected(
658 self.token.is_like_plus(),
659 TokenType::Token(token::BinOp(token::Plus)),
660 )
9fa01778
XL
661 }
662
74b04a01
XL
663 /// Eats the expected token if it's present possibly breaking
664 /// compound tokens like multi-character operators in process.
665 /// Returns `true` if the token was eaten.
666 fn break_and_eat(&mut self, expected: TokenKind) -> bool {
667 if self.token.kind == expected {
668 self.bump();
669 return true;
670 }
671 match self.token.kind.break_two_token_op() {
672 Some((first, second)) if first == expected => {
673 let first_span = self.sess.source_map().start_point(self.token.span);
674 let second_span = self.token.span.with_lo(first_span.hi());
675 self.token = Token::new(first, first_span);
fc512014
XL
676 // Keep track of this token - if we end token capturing now,
677 // we'll want to append this token to the captured stream.
678 //
679 // If we consume any additional tokens, then this token
680 // is not needed (we'll capture the entire 'glued' token),
04454e1e 681 // and `bump` will set this field to `None`
cdc7bbd5 682 self.token_cursor.break_last_token = true;
29967ef6
XL
683 // Use the spacing of the glued token as the spacing
684 // of the unglued second token.
685 self.bump_with((Token::new(second, second_span), self.token_spacing));
94b46f34
XL
686 true
687 }
74b04a01
XL
688 _ => {
689 self.expected_tokens.push(TokenType::Token(expected));
690 false
94b46f34 691 }
94b46f34
XL
692 }
693 }
694
74b04a01
XL
695 /// Eats `+` possibly breaking tokens like `+=` in process.
696 fn eat_plus(&mut self) -> bool {
697 self.break_and_eat(token::BinOp(token::Plus))
698 }
699
700 /// Eats `&` possibly breaking tokens like `&&` in process.
701 /// Signals an error if `&` is not eaten.
9cc50fc6 702 fn expect_and(&mut self) -> PResult<'a, ()> {
74b04a01 703 if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
ea8adc8c
XL
704 }
705
74b04a01
XL
706 /// Eats `|` possibly breaking tokens like `||` in process.
707 /// Signals an error if `|` was not eaten.
ea8adc8c 708 fn expect_or(&mut self) -> PResult<'a, ()> {
74b04a01 709 if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
970d7e83
LB
710 }
711
74b04a01 712 /// Eats `<` possibly breaking tokens like `<<` in process.
9cc50fc6 713 fn eat_lt(&mut self) -> bool {
74b04a01 714 let ate = self.break_and_eat(token::Lt);
9fa01778
XL
715 if ate {
716 // See doc comment for `unmatched_angle_bracket_count`.
717 self.unmatched_angle_bracket_count += 1;
718 self.max_angle_bracket_count += 1;
719 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
1a4d82fc 720 }
9fa01778 721 ate
1a4d82fc
JJ
722 }
723
74b04a01
XL
724 /// Eats `<` possibly breaking tokens like `<<` in process.
725 /// Signals an error if `<` was not eaten.
9cc50fc6 726 fn expect_lt(&mut self) -> PResult<'a, ()> {
74b04a01 727 if self.eat_lt() { Ok(()) } else { self.unexpected() }
1a4d82fc
JJ
728 }
729
74b04a01
XL
730 /// Eats `>` possibly breaking tokens like `>>` in process.
731 /// Signals an error if `>` was not eaten.
94b46f34 732 fn expect_gt(&mut self) -> PResult<'a, ()> {
74b04a01
XL
733 if self.break_and_eat(token::Gt) {
734 // See doc comment for `unmatched_angle_bracket_count`.
735 if self.unmatched_angle_bracket_count > 0 {
736 self.unmatched_angle_bracket_count -= 1;
737 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
dfeec247 738 }
74b04a01
XL
739 Ok(())
740 } else {
741 self.unexpected()
1a4d82fc
JJ
742 }
743 }
744
416331ca 745 fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
dfeec247
XL
746 kets.iter().any(|k| match expect {
747 TokenExpectType::Expect => self.check(k),
748 TokenExpectType::NoExpect => self.token == **k,
416331ca
XL
749 })
750 }
751
e74abb32 752 fn parse_seq_to_before_tokens<T>(
b7449926 753 &mut self,
dc9dc135 754 kets: &[&TokenKind],
b7449926
XL
755 sep: SeqSep,
756 expect: TokenExpectType,
416331ca
XL
757 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
758 ) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
9fa01778
XL
759 let mut first = true;
760 let mut recovered = false;
416331ca 761 let mut trailing = false;
c30ab7b3 762 let mut v = vec![];
cdc7bbd5
XL
763 let unclosed_delims = !self.unclosed_delims.is_empty();
764
416331ca
XL
765 while !self.expect_any_with_type(kets, expect) {
766 if let token::CloseDelim(..) | token::Eof = self.token.kind {
dfeec247 767 break;
416331ca 768 }
7cac9316
XL
769 if let Some(ref t) = sep.sep {
770 if first {
771 first = false;
772 } else {
9fa01778 773 match self.expect(t) {
c295e0f8
XL
774 Ok(false) => {
775 self.current_closure.take();
776 }
9fa01778 777 Ok(true) => {
c295e0f8 778 self.current_closure.take();
9fa01778
XL
779 recovered = true;
780 break;
abe05a73 781 }
60c5eb7d 782 Err(mut expect_err) => {
74b04a01 783 let sp = self.prev_token.span.shrink_to_hi();
60c5eb7d
XL
784 let token_str = pprust::token_kind_to_string(t);
785
c295e0f8
XL
786 match self.current_closure.take() {
787 Some(closure_spans) if self.token.kind == TokenKind::Semi => {
788 // Finding a semicolon instead of a comma
789 // after a closure body indicates that the
790 // closure body may be a block but the user
791 // forgot to put braces around its
792 // statements.
793
794 self.recover_missing_braces_around_closure_body(
795 closure_spans,
796 expect_err,
797 )?;
798
799 continue;
800 }
801
802 _ => {
803 // Attempt to keep parsing if it was a similar separator.
804 if let Some(ref tokens) = t.similar_tokens() {
805 if tokens.contains(&self.token.kind) && !unclosed_delims {
806 self.bump();
807 }
808 }
9fa01778
XL
809 }
810 }
60c5eb7d 811
f9f354fc
XL
812 // If this was a missing `@` in a binding pattern
813 // bail with a suggestion
814 // https://github.com/rust-lang/rust/issues/72373
f035d41b 815 if self.prev_token.is_ident() && self.token.kind == token::DotDot {
f9f354fc
XL
816 let msg = format!(
817 "if you meant to bind the contents of \
818 the rest of the array pattern into `{}`, use `@`",
819 pprust::token_to_string(&self.prev_token)
820 );
821 expect_err
822 .span_suggestion_verbose(
823 self.prev_token.span.shrink_to_hi().until(self.token.span),
824 &msg,
04454e1e 825 " @ ",
f9f354fc
XL
826 Applicability::MaybeIncorrect,
827 )
828 .emit();
829 break;
830 }
831
e1599b0c 832 // Attempt to keep parsing if it was an omitted separator.
9fa01778
XL
833 match f(self) {
834 Ok(t) => {
60c5eb7d
XL
835 // Parsed successfully, therefore most probably the code only
836 // misses a separator.
837 expect_err
838 .span_suggestion_short(
5869c6ff 839 sp,
60c5eb7d 840 &format!("missing `{}`", token_str),
04454e1e 841 token_str,
60c5eb7d
XL
842 Applicability::MaybeIncorrect,
843 )
844 .emit();
845
9fa01778
XL
846 v.push(t);
847 continue;
dfeec247 848 }
5e7ed085 849 Err(e) => {
60c5eb7d
XL
850 // Parsing failed, therefore it must be something more serious
851 // than just a missing separator.
852 expect_err.emit();
853
9fa01778
XL
854 e.cancel();
855 break;
856 }
abe05a73
XL
857 }
858 }
7453a54e
SL
859 }
860 }
7453a54e 861 }
416331ca
XL
862 if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
863 trailing = true;
7453a54e
SL
864 break;
865 }
866
abe05a73
XL
867 let t = f(self)?;
868 v.push(t);
970d7e83 869 }
7453a54e 870
416331ca 871 Ok((v, trailing, recovered))
970d7e83
LB
872 }
873
c295e0f8
XL
874 fn recover_missing_braces_around_closure_body(
875 &mut self,
876 closure_spans: ClosureSpans,
5e7ed085 877 mut expect_err: DiagnosticBuilder<'_, ErrorGuaranteed>,
c295e0f8
XL
878 ) -> PResult<'a, ()> {
879 let initial_semicolon = self.token.span;
880
881 while self.eat(&TokenKind::Semi) {
882 let _ = self.parse_stmt(ForceCollect::Yes)?;
883 }
884
885 expect_err.set_primary_message(
886 "closure bodies that contain statements must be surrounded by braces",
887 );
888
889 let preceding_pipe_span = closure_spans.closing_pipe;
890 let following_token_span = self.token.span;
891
892 let mut first_note = MultiSpan::from(vec![initial_semicolon]);
893 first_note.push_span_label(
894 initial_semicolon,
064997fb 895 "this `;` turns the preceding closure into a statement",
c295e0f8
XL
896 );
897 first_note.push_span_label(
898 closure_spans.body,
064997fb 899 "this expression is a statement because of the trailing semicolon",
c295e0f8
XL
900 );
901 expect_err.span_note(first_note, "statement found outside of a block");
902
903 let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
064997fb 904 second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
c295e0f8
XL
905 second_note.push_span_label(
906 following_token_span,
064997fb 907 "...but likely you meant the closure to end here",
c295e0f8
XL
908 );
909 expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
910
911 expect_err.set_span(vec![preceding_pipe_span, following_token_span]);
912
913 let opening_suggestion_str = " {".to_string();
914 let closing_suggestion_str = "}".to_string();
915
916 expect_err.multipart_suggestion(
917 "try adding braces",
918 vec![
919 (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
920 (following_token_span.shrink_to_lo(), closing_suggestion_str),
921 ],
922 Applicability::MaybeIncorrect,
923 );
924
925 expect_err.emit();
926
927 Ok(())
928 }
929
dfeec247
XL
930 /// Parses a sequence, not including the closing delimiter. The function
931 /// `f` must consume tokens until reaching the next separator or
932 /// closing bracket.
933 fn parse_seq_to_before_end<T>(
934 &mut self,
935 ket: &TokenKind,
936 sep: SeqSep,
937 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
938 ) -> PResult<'a, (Vec<T>, bool, bool)> {
939 self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
940 }
941
942 /// Parses a sequence, including the closing delimiter. The function
943 /// `f` must consume tokens until reaching the next separator or
944 /// closing bracket.
945 fn parse_seq_to_end<T>(
946 &mut self,
947 ket: &TokenKind,
948 sep: SeqSep,
949 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
950 ) -> PResult<'a, (Vec<T>, bool /* trailing */)> {
951 let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
952 if !recovered {
953 self.eat(ket);
954 }
955 Ok((val, trailing))
956 }
957
9fa01778
XL
958 /// Parses a sequence, including the closing delimiter. The function
959 /// `f` must consume tokens until reaching the next separator or
1a4d82fc 960 /// closing bracket.
416331ca 961 fn parse_unspanned_seq<T>(
9fa01778 962 &mut self,
dc9dc135
XL
963 bra: &TokenKind,
964 ket: &TokenKind,
9fa01778 965 sep: SeqSep,
416331ca
XL
966 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
967 ) -> PResult<'a, (Vec<T>, bool)> {
54a0048b 968 self.expect(bra)?;
dfeec247 969 self.parse_seq_to_end(ket, sep, f)
416331ca
XL
970 }
971
972 fn parse_delim_comma_seq<T>(
973 &mut self,
04454e1e 974 delim: Delimiter,
416331ca
XL
975 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
976 ) -> PResult<'a, (Vec<T>, bool)> {
977 self.parse_unspanned_seq(
978 &token::OpenDelim(delim),
979 &token::CloseDelim(delim),
980 SeqSep::trailing_allowed(token::Comma),
981 f,
982 )
983 }
984
985 fn parse_paren_comma_seq<T>(
986 &mut self,
987 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
988 ) -> PResult<'a, (Vec<T>, bool)> {
04454e1e 989 self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
970d7e83
LB
990 }
991
74b04a01 992 /// Advance the parser by one token using provided token as the next one.
5e7ed085
FG
993 fn bump_with(&mut self, next: (Token, Spacing)) {
994 self.inlined_bump_with(next)
995 }
996
997 /// This always-inlined version should only be used on hot code paths.
998 #[inline(always)]
999 fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
74b04a01
XL
1000 // Update the current and previous tokens.
1001 self.prev_token = mem::replace(&mut self.token, next_token);
29967ef6 1002 self.token_spacing = next_spacing;
9e0c209e 1003
74b04a01 1004 // Diagnostics.
7453a54e 1005 self.expected_tokens.clear();
223e47cc 1006 }
7453a54e 1007
74b04a01
XL
1008 /// Advance the parser by one token.
1009 pub fn bump(&mut self) {
04454e1e
FG
1010 // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
1011 // than `.0`/`.1` access.
1012 let mut next = self.token_cursor.inlined_next(self.desugar_doc_comments);
1013 self.token_cursor.num_next_calls += 1;
1014 // We've retrieved an token from the underlying
1015 // cursor, so we no longer need to worry about
1016 // an unglued token. See `break_and_eat` for more details
1017 self.token_cursor.break_last_token = false;
1018 if next.0.span.is_dummy() {
1019 // Tweak the location for better diagnostics, but keep syntactic context intact.
1020 let fallback_span = self.token.span;
1021 next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1022 }
1023 debug_assert!(!matches!(
1024 next.0.kind,
1025 token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
1026 ));
1027 self.inlined_bump_with(next)
74b04a01
XL
1028 }
1029
e74abb32
XL
1030 /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
1031 /// When `dist == 0` then the current token is looked at.
1032 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
c30ab7b3 1033 if dist == 0 {
e74abb32 1034 return looker(&self.token);
223e47cc 1035 }
8bb4bdeb 1036
dc9dc135 1037 let frame = &self.token_cursor.frame;
04454e1e 1038 if let Some((delim, span)) = frame.delim_sp && delim != Delimiter::Invisible {
cdc7bbd5
XL
1039 let all_normal = (0..dist).all(|i| {
1040 let token = frame.tree_cursor.look_ahead(i);
04454e1e 1041 !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
cdc7bbd5
XL
1042 });
1043 if all_normal {
1044 return match frame.tree_cursor.look_ahead(dist - 1) {
1045 Some(tree) => match tree {
064997fb 1046 TokenTree::Token(token, _) => looker(token),
cdc7bbd5
XL
1047 TokenTree::Delimited(dspan, delim, _) => {
1048 looker(&Token::new(token::OpenDelim(*delim), dspan.open))
1049 }
1050 },
04454e1e 1051 None => looker(&Token::new(token::CloseDelim(delim), span.close)),
cdc7bbd5
XL
1052 };
1053 }
29967ef6 1054 }
cdc7bbd5
XL
1055
1056 let mut cursor = self.token_cursor.clone();
1057 let mut i = 0;
1058 let mut token = Token::dummy();
1059 while i < dist {
04454e1e 1060 token = cursor.next(/* desugar_doc_comments */ false).0;
cdc7bbd5
XL
1061 if matches!(
1062 token.kind,
04454e1e 1063 token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
cdc7bbd5
XL
1064 ) {
1065 continue;
1066 }
1067 i += 1;
1068 }
1069 return looker(&token);
223e47cc 1070 }
ff7c6d11 1071
dc9dc135
XL
1072 /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
1073 fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1074 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
223e47cc 1075 }
223e47cc 1076
9fa01778 1077 /// Parses asyncness: `async` or nothing.
74b04a01 1078 fn parse_asyncness(&mut self) -> Async {
dc9dc135 1079 if self.eat_keyword(kw::Async) {
74b04a01
XL
1080 let span = self.prev_token.uninterpolated_span();
1081 Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
8faf50e0 1082 } else {
74b04a01 1083 Async::No
8faf50e0
XL
1084 }
1085 }
1086
9fa01778 1087 /// Parses unsafety: `unsafe` or nothing.
74b04a01
XL
1088 fn parse_unsafety(&mut self) -> Unsafe {
1089 if self.eat_keyword(kw::Unsafe) {
1090 Unsafe::Yes(self.prev_token.uninterpolated_span())
1091 } else {
1092 Unsafe::No
1093 }
1094 }
1095
1096 /// Parses constness: `const` or nothing.
1097 fn parse_constness(&mut self) -> Const {
29967ef6 1098 // Avoid const blocks to be parsed as const items
04454e1e 1099 if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
29967ef6
XL
1100 && self.eat_keyword(kw::Const)
1101 {
74b04a01
XL
1102 Const::Yes(self.prev_token.uninterpolated_span())
1103 } else {
1104 Const::No
1105 }
1a4d82fc
JJ
1106 }
1107
29967ef6 1108 /// Parses inline const expressions.
3c0e092e
XL
1109 fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1110 if pat {
1111 self.sess.gated_spans.gate(sym::inline_const_pat, span);
1112 } else {
1113 self.sess.gated_spans.gate(sym::inline_const, span);
1114 }
29967ef6 1115 self.eat_keyword(kw::Const);
04454e1e 1116 let (attrs, blk) = self.parse_inner_attrs_and_block()?;
29967ef6
XL
1117 let anon_const = AnonConst {
1118 id: DUMMY_NODE_ID,
1119 value: self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()),
1120 };
fc512014 1121 let blk_span = anon_const.value.span;
04454e1e 1122 Ok(self.mk_expr(span.to(blk_span), ExprKind::ConstBlock(anon_const), AttrVec::from(attrs)))
29967ef6
XL
1123 }
1124
416331ca
XL
1125 /// Parses mutability (`mut` or nothing).
1126 fn parse_mutability(&mut self) -> Mutability {
dfeec247 1127 if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
223e47cc
LB
1128 }
1129
e74abb32
XL
1130 /// Possibly parses mutability (`const` or `mut`).
1131 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1132 if self.eat_keyword(kw::Mut) {
dfeec247 1133 Some(Mutability::Mut)
e74abb32 1134 } else if self.eat_keyword(kw::Const) {
dfeec247 1135 Some(Mutability::Not)
e74abb32
XL
1136 } else {
1137 None
1138 }
1139 }
1140
416331ca 1141 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
dfeec247
XL
1142 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1143 {
416331ca
XL
1144 self.expect_no_suffix(self.token.span, "a tuple index", suffix);
1145 self.bump();
74b04a01 1146 Ok(Ident::new(symbol, self.prev_token.span))
223e47cc 1147 } else {
6a06907d 1148 self.parse_ident_common(true)
223e47cc
LB
1149 }
1150 }
1151
60c5eb7d
XL
1152 fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
1153 self.parse_mac_args_common(true).map(P)
1154 }
1155
1156 fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
1157 self.parse_mac_args_common(false)
1158 }
1159
1160 fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
dfeec247 1161 Ok(
04454e1e
FG
1162 if self.check(&token::OpenDelim(Delimiter::Parenthesis))
1163 || self.check(&token::OpenDelim(Delimiter::Bracket))
1164 || self.check(&token::OpenDelim(Delimiter::Brace))
dfeec247
XL
1165 {
1166 match self.parse_token_tree() {
1167 TokenTree::Delimited(dspan, delim, tokens) =>
60c5eb7d 1168 // We've confirmed above that there is a delimiter so unwrapping is OK.
dfeec247
XL
1169 {
1170 MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens)
60c5eb7d 1171 }
dfeec247 1172 _ => unreachable!(),
60c5eb7d 1173 }
dfeec247
XL
1174 } else if !delimited_only {
1175 if self.eat(&token::Eq) {
74b04a01 1176 let eq_span = self.prev_token.span;
04454e1e 1177 MacArgs::Eq(eq_span, MacArgsEq::Ast(self.parse_expr_force_collect()?))
dfeec247
XL
1178 } else {
1179 MacArgs::Empty
1180 }
60c5eb7d 1181 } else {
dfeec247
XL
1182 return self.unexpected();
1183 },
1184 )
416331ca 1185 }
041b39d2 1186
e74abb32
XL
1187 fn parse_or_use_outer_attributes(
1188 &mut self,
6a06907d
XL
1189 already_parsed_attrs: Option<AttrWrapper>,
1190 ) -> PResult<'a, AttrWrapper> {
416331ca
XL
1191 if let Some(attrs) = already_parsed_attrs {
1192 Ok(attrs)
970d7e83 1193 } else {
6a06907d 1194 self.parse_outer_attributes()
970d7e83
LB
1195 }
1196 }
1197
416331ca 1198 /// Parses a single token tree from the input.
3dfed10e 1199 pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
dc9dc135 1200 match self.token.kind {
416331ca 1201 token::OpenDelim(..) => {
04454e1e
FG
1202 // Grab the tokens from this frame.
1203 let frame = &self.token_cursor.frame;
1204 let stream = frame.tree_cursor.stream.clone();
1205 let (delim, span) = frame.delim_sp.unwrap();
1206
1207 // Advance the token cursor through the entire delimited
1208 // sequence. After getting the `OpenDelim` we are *within* the
1209 // delimited sequence, i.e. at depth `d`. After getting the
1210 // matching `CloseDelim` we are *after* the delimited sequence,
1211 // i.e. at depth `d - 1`.
1212 let target_depth = self.token_cursor.stack.len() - 1;
1213 loop {
29967ef6
XL
1214 // Advance one token at a time, so `TokenCursor::next()`
1215 // can capture these tokens if necessary.
1216 self.bump();
04454e1e
FG
1217 if self.token_cursor.stack.len() == target_depth {
1218 debug_assert!(matches!(self.token.kind, token::CloseDelim(_)));
1219 break;
1220 }
29967ef6 1221 }
04454e1e 1222
29967ef6 1223 // Consume close delimiter
0731742a 1224 self.bump();
29967ef6 1225 TokenTree::Delimited(span, delim, stream)
dfeec247 1226 }
416331ca
XL
1227 token::CloseDelim(_) | token::Eof => unreachable!(),
1228 _ => {
416331ca 1229 self.bump();
064997fb 1230 TokenTree::Token(self.prev_token.clone(), Spacing::Alone)
0731742a 1231 }
0731742a
XL
1232 }
1233 }
1234
416331ca
XL
1235 /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
1236 pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
1237 let mut tts = Vec::new();
1238 while self.token != token::Eof {
1239 tts.push(self.parse_token_tree());
54a0048b 1240 }
416331ca 1241 Ok(tts)
54a0048b
SL
1242 }
1243
416331ca
XL
1244 pub fn parse_tokens(&mut self) -> TokenStream {
1245 let mut result = Vec::new();
1246 loop {
1247 match self.token.kind {
1248 token::Eof | token::CloseDelim(..) => break,
064997fb 1249 _ => result.push(self.parse_token_tree()),
ff7c6d11 1250 }
223e47cc 1251 }
416331ca 1252 TokenStream::new(result)
223e47cc
LB
1253 }
1254
416331ca
XL
1255 /// Evaluates the closure with restrictions in place.
1256 ///
1257 /// Afters the closure is evaluated, restrictions are reset.
e74abb32 1258 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
416331ca 1259 let old = self.restrictions;
e74abb32
XL
1260 self.restrictions = res;
1261 let res = f(self);
416331ca 1262 self.restrictions = old;
e74abb32 1263 res
8faf50e0
XL
1264 }
1265
923072b8
FG
1266 /// Parses `pub` and `pub(in path)` plus shortcuts `pub(crate)` for `pub(in crate)`, `pub(self)`
1267 /// for `pub(in self)` and `pub(super)` for `pub(in super)`.
416331ca 1268 /// If the following element can't be a tuple (i.e., it's a function definition), then
3c0e092e 1269 /// it's not a tuple struct field), and the contents within the parentheses aren't valid,
416331ca 1270 /// so emit a proper diagnostic.
1b1a35ee
XL
1271 // Public for rustfmt usage.
1272 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
04454e1e 1273 maybe_whole!(self, NtVis, |x| x.into_inner());
dc9dc135 1274
416331ca
XL
1275 if !self.eat_keyword(kw::Pub) {
1276 // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1277 // keyword to grab a span from for inherited visibility; an empty span at the
1278 // beginning of the current token would seem to be the "Schelling span".
1b1a35ee
XL
1279 return Ok(Visibility {
1280 span: self.token.span.shrink_to_lo(),
1281 kind: VisibilityKind::Inherited,
1282 tokens: None,
1283 });
416331ca 1284 }
74b04a01 1285 let lo = self.prev_token.span;
223e47cc 1286
04454e1e 1287 if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
416331ca
XL
1288 // We don't `self.bump()` the `(` yet because this might be a struct definition where
1289 // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1290 // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1291 // by the following tokens.
923072b8 1292 if self.is_keyword_ahead(1, &[kw::In]) {
e74abb32 1293 // Parse `pub(in path)`.
416331ca
XL
1294 self.bump(); // `(`
1295 self.bump(); // `in`
1296 let path = self.parse_path(PathStyle::Mod)?; // `path`
04454e1e 1297 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
dfeec247 1298 let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
1b1a35ee
XL
1299 return Ok(Visibility {
1300 span: lo.to(self.prev_token.span),
1301 kind: vis,
1302 tokens: None,
1303 });
04454e1e 1304 } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
923072b8 1305 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
416331ca 1306 {
923072b8 1307 // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
416331ca 1308 self.bump(); // `(`
923072b8 1309 let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
04454e1e 1310 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
dfeec247 1311 let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
1b1a35ee
XL
1312 return Ok(Visibility {
1313 span: lo.to(self.prev_token.span),
1314 kind: vis,
1315 tokens: None,
1316 });
60c5eb7d
XL
1317 } else if let FollowedByType::No = fbt {
1318 // Provide this diagnostic if a type cannot follow;
1319 // in particular, if this is not a tuple struct.
e74abb32
XL
1320 self.recover_incorrect_vis_restriction()?;
1321 // Emit diagnostic, but continue with public visibility.
532ac7d7 1322 }
223e47cc 1323 }
223e47cc 1324
1b1a35ee 1325 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
223e47cc
LB
1326 }
1327
e74abb32
XL
1328 /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1329 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1330 self.bump(); // `(`
1331 let path = self.parse_path(PathStyle::Mod)?;
04454e1e 1332 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
e74abb32
XL
1333
1334 let msg = "incorrect visibility restriction";
1335 let suggestion = r##"some possible visibility restrictions are:
1336`pub(crate)`: visible only on the current crate
1337`pub(super)`: visible only in the current module's parent
1338`pub(in path::to::module)`: visible only on the specified path"##;
1339
1340 let path_str = pprust::path_to_string(&path);
1341
1342 struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
1343 .help(suggestion)
1344 .span_suggestion(
1345 path.span,
1346 &format!("make this visible only to module `{}` with `in`", path_str),
1347 format!("in {}", path_str),
1348 Applicability::MachineApplicable,
1349 )
1350 .emit();
1351
1352 Ok(())
1353 }
1354
60c5eb7d 1355 /// Parses `extern string_literal?`.
6a06907d 1356 fn parse_extern(&mut self) -> Extern {
064997fb
FG
1357 if self.eat_keyword(kw::Extern) {
1358 let mut extern_span = self.prev_token.span;
1359 let abi = self.parse_abi();
1360 if let Some(abi) = abi {
1361 extern_span = extern_span.to(abi.span);
1362 }
1363 Extern::from_abi(abi, extern_span)
1364 } else {
1365 Extern::None
1366 }
e74abb32
XL
1367 }
1368
60c5eb7d
XL
1369 /// Parses a string literal as an ABI spec.
1370 fn parse_abi(&mut self) -> Option<StrLit> {
1371 match self.parse_str_lit() {
1372 Ok(str_lit) => Some(str_lit),
1373 Err(Some(lit)) => match lit.kind {
1374 ast::LitKind::Err(_) => None,
1375 _ => {
1376 self.struct_span_err(lit.span, "non-string ABI literal")
1377 .span_suggestion(
1378 lit.span,
1379 "specify the ABI with a string literal",
923072b8 1380 "\"C\"",
60c5eb7d
XL
1381 Applicability::MaybeIncorrect,
1382 )
1383 .emit();
1384 None
223e47cc 1385 }
dfeec247 1386 },
60c5eb7d 1387 Err(None) => None,
223e47cc
LB
1388 }
1389 }
1390
04454e1e 1391 pub fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
5869c6ff
XL
1392 &mut self,
1393 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1394 ) -> PResult<'a, R> {
6a06907d
XL
1395 // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1396 // `ForceCollect::Yes`
1397 self.collect_tokens_trailing_token(
1398 AttrWrapper::empty(),
1399 ForceCollect::Yes,
1400 |this, _attrs| Ok((f(this)?, TrailingToken::None)),
1401 )
3b2f2976
XL
1402 }
1403
0531ce1d
XL
1404 /// `::{` or `::*`
1405 fn is_import_coupler(&mut self) -> bool {
dfeec247
XL
1406 self.check(&token::ModSep)
1407 && self.look_ahead(1, |t| {
04454e1e 1408 *t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star)
dfeec247 1409 })
a7813a04 1410 }
1b1a35ee
XL
1411
1412 pub fn clear_expected_tokens(&mut self) {
1413 self.expected_tokens.clear();
1414 }
e74abb32 1415}
532ac7d7 1416
923072b8 1417pub(crate) fn make_unclosed_delims_error(
e74abb32
XL
1418 unmatched: UnmatchedBrace,
1419 sess: &ParseSess,
5e7ed085 1420) -> Option<DiagnosticBuilder<'_, ErrorGuaranteed>> {
e74abb32
XL
1421 // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
1422 // `unmatched_braces` only for error recovery in the `Parser`.
1423 let found_delim = unmatched.found_delim?;
94222f64
XL
1424 let span: MultiSpan = if let Some(sp) = unmatched.unclosed_span {
1425 vec![unmatched.found_span, sp].into()
1426 } else {
1427 unmatched.found_span.into()
1428 };
dfeec247 1429 let mut err = sess.span_diagnostic.struct_span_err(
94222f64 1430 span,
dfeec247
XL
1431 &format!(
1432 "mismatched closing delimiter: `{}`",
1433 pprust::token_kind_to_string(&token::CloseDelim(found_delim)),
1434 ),
1435 );
1436 err.span_label(unmatched.found_span, "mismatched closing delimiter");
e74abb32 1437 if let Some(sp) = unmatched.candidate_span {
dfeec247 1438 err.span_label(sp, "closing delimiter possibly meant for this");
e74abb32
XL
1439 }
1440 if let Some(sp) = unmatched.unclosed_span {
dfeec247 1441 err.span_label(sp, "unclosed delimiter");
e74abb32
XL
1442 }
1443 Some(err)
223e47cc 1444}
9fa01778 1445
e74abb32 1446pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) {
dfeec247
XL
1447 *sess.reached_eof.borrow_mut() |=
1448 unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
e74abb32 1449 for unmatched in unclosed_delims.drain(..) {
f9f354fc 1450 if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
74b04a01 1451 e.emit();
f9f354fc 1452 }
9fa01778 1453 }
9fa01778 1454}
cdc7bbd5 1455
94222f64 1456/// A helper struct used when building an `AttrAnnotatedTokenStream` from
cdc7bbd5
XL
1457/// a `LazyTokenStream`. Both delimiter and non-delimited tokens
1458/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
94222f64 1459/// is then 'parsed' to build up an `AttrAnnotatedTokenStream` with nested
cdc7bbd5
XL
1460/// `AttrAnnotatedTokenTree::Delimited` tokens
1461#[derive(Debug, Clone)]
1462pub enum FlatToken {
1463 /// A token - this holds both delimiter (e.g. '{' and '}')
1464 /// and non-delimiter tokens
1465 Token(Token),
1466 /// Holds the `AttributesData` for an AST node. The
1467 /// `AttributesData` is inserted directly into the
1468 /// constructed `AttrAnnotatedTokenStream` as
94222f64 1469 /// an `AttrAnnotatedTokenTree::Attributes`
cdc7bbd5
XL
1470 AttrTarget(AttributesData),
1471 /// A special 'empty' token that is ignored during the conversion
94222f64 1472 /// to an `AttrAnnotatedTokenStream`. This is used to simplify the
cdc7bbd5
XL
1473 /// handling of replace ranges.
1474 Empty,
1475}
5e7ed085
FG
1476
1477#[derive(Debug)]
1478pub enum NtOrTt {
1479 Nt(Nonterminal),
1480 Tt(TokenTree),
1481}