]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_parse/src/parser/mod.rs
New upstream version 1.51.0+dfsg1
[rustc.git] / compiler / rustc_parse / src / parser / mod.rs
CommitLineData
e74abb32 1pub mod attr;
3dfed10e 2mod diagnostics;
416331ca 3mod expr;
3dfed10e 4mod generics;
416331ca 5mod item;
3dfed10e 6mod nonterminal;
dfeec247 7mod pat;
416331ca 8mod path;
dfeec247 9mod stmt;
3dfed10e 10mod ty;
416331ca 11
60c5eb7d 12use crate::lexer::UnmatchedBrace;
29967ef6 13pub use diagnostics::AttemptLocalParseRecovery;
3dfed10e
XL
14use diagnostics::Error;
15pub use path::PathStyle;
60c5eb7d 16
74b04a01
XL
17use rustc_ast::ptr::P;
18use rustc_ast::token::{self, DelimToken, Token, TokenKind};
29967ef6 19use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
fc512014 20use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
3dfed10e 21use rustc_ast::DUMMY_NODE_ID;
5869c6ff
XL
22use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, HasTokens};
23use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
29967ef6 24use rustc_ast::{Visibility, VisibilityKind};
74b04a01 25use rustc_ast_pretty::pprust;
fc512014 26use rustc_data_structures::sync::Lrc;
29967ef6
XL
27use rustc_errors::PResult;
28use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError};
74b04a01 29use rustc_session::parse::ParseSess;
1b1a35ee 30use rustc_span::source_map::{Span, DUMMY_SP};
f9f354fc 31use rustc_span::symbol::{kw, sym, Ident, Symbol};
3dfed10e 32use tracing::debug;
74b04a01 33
dfeec247 34use std::{cmp, mem, slice};
60c5eb7d 35
9fa01778 36bitflags::bitflags! {
94b46f34 37 struct Restrictions: u8 {
ea8adc8c
XL
38 const STMT_EXPR = 1 << 0;
39 const NO_STRUCT_LITERAL = 1 << 1;
29967ef6 40 const CONST_EXPR = 1 << 2;
1a4d82fc 41 }
223e47cc
LB
42}
43
8faf50e0 44#[derive(Clone, Copy, PartialEq, Debug)]
e74abb32 45enum SemiColonMode {
7453a54e
SL
46 Break,
47 Ignore,
9fa01778 48 Comma,
7453a54e
SL
49}
50
8faf50e0 51#[derive(Clone, Copy, PartialEq, Debug)]
e74abb32 52enum BlockMode {
cc61c64b
XL
53 Break,
54 Ignore,
55}
56
5869c6ff
XL
57/// Whether or not we should force collection of tokens for an AST node,
58/// regardless of whether or not it has attributes
59pub enum ForceCollect {
60 Yes,
61 No,
62}
63
64pub enum TrailingToken {
65 None,
66 Semi,
67}
68
e1599b0c 69/// Like `maybe_whole_expr`, but for things other than expressions.
416331ca 70#[macro_export]
1a4d82fc 71macro_rules! maybe_whole {
c30ab7b3 72 ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
dc9dc135 73 if let token::Interpolated(nt) = &$p.token.kind {
532ac7d7
XL
74 if let token::$constructor(x) = &**nt {
75 let $x = x.clone();
c30ab7b3
SL
76 $p.bump();
77 return Ok($e);
223e47cc 78 }
1a4d82fc 79 }
c30ab7b3 80 };
1a4d82fc 81}
223e47cc 82
532ac7d7 83/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
416331ca 84#[macro_export]
532ac7d7
XL
85macro_rules! maybe_recover_from_interpolated_ty_qpath {
86 ($self: expr, $allow_qpath_recovery: expr) => {
87 if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
dc9dc135 88 if let token::Interpolated(nt) = &$self.token.kind {
532ac7d7
XL
89 if let token::NtTy(ty) = &**nt {
90 let ty = ty.clone();
91 $self.bump();
74b04a01 92 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
532ac7d7
XL
93 }
94 }
95 }
dfeec247 96 };
532ac7d7
XL
97}
98
041b39d2 99#[derive(Clone)]
1a4d82fc
JJ
100pub struct Parser<'a> {
101 pub sess: &'a ParseSess,
74b04a01 102 /// The current token.
dc9dc135 103 pub token: Token,
29967ef6
XL
104 /// The spacing for the current token
105 pub token_spacing: Spacing,
74b04a01
XL
106 /// The previous token.
107 pub prev_token: Token,
94b46f34 108 restrictions: Restrictions,
e74abb32 109 expected_tokens: Vec<TokenType>,
29967ef6
XL
110 // Important: This must only be advanced from `next_tok`
111 // to ensure that `token_cursor.num_next_calls` is updated properly
e1599b0c 112 token_cursor: TokenCursor,
94b46f34 113 desugar_doc_comments: bool,
9fa01778
XL
114 /// This field is used to keep track of how many left angle brackets we have seen. This is
115 /// required in order to detect extra leading left angle brackets (`<` characters) and error
116 /// appropriately.
117 ///
118 /// See the comments in the `parse_path_segment` function for more details.
e74abb32
XL
119 unmatched_angle_bracket_count: u32,
120 max_angle_bracket_count: u32,
e1599b0c 121 /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
9fa01778
XL
122 /// it gets removed from here. Every entry left at the end gets emitted as an independent
123 /// error.
e74abb32
XL
124 pub(super) unclosed_delims: Vec<UnmatchedBrace>,
125 last_unexpected_token_span: Option<Span>,
3dfed10e
XL
126 /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
127 /// looked like it could have been a mistyped path or literal `Option:Some(42)`).
e74abb32 128 pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
dc9dc135 129 /// If present, this `Parser` is not parsing Rust code but rather a macro call.
e74abb32 130 subparser_name: Option<&'static str>,
1a4d82fc
JJ
131}
132
9fa01778
XL
133impl<'a> Drop for Parser<'a> {
134 fn drop(&mut self) {
e74abb32 135 emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
9fa01778
XL
136 }
137}
7cac9316 138
041b39d2 139#[derive(Clone)]
e1599b0c
XL
140struct TokenCursor {
141 frame: TokenCursorFrame,
142 stack: Vec<TokenCursorFrame>,
29967ef6
XL
143 desugar_doc_comments: bool,
144 // Counts the number of calls to `next` or `next_desugared`,
145 // depending on whether `desugar_doc_comments` is set.
146 num_next_calls: usize,
fc512014
XL
147 // During parsing, we may sometimes need to 'unglue' a
148 // glued token into two component tokens
149 // (e.g. '>>' into '>' and '>), so that the parser
150 // can consume them one at a time. This process
151 // bypasses the normal capturing mechanism
152 // (e.g. `num_next_calls` will not be incremented),
153 // since the 'unglued' tokens due not exist in
154 // the original `TokenStream`.
155 //
156 // If we end up consuming both unglued tokens,
157 // then this is not an issue - we'll end up
158 // capturing the single 'glued' token.
159 //
160 // However, in certain circumstances, we may
161 // want to capture just the first 'unglued' token.
162 // For example, capturing the `Vec<u8>`
163 // in `Option<Vec<u8>>` requires us to unglue
164 // the trailing `>>` token. The `append_unglued_token`
165 // field is used to track this token - it gets
166 // appended to the captured stream when
167 // we evaluate a `LazyTokenStream`
168 append_unglued_token: Option<TreeAndSpacing>,
8bb4bdeb
XL
169}
170
041b39d2 171#[derive(Clone)]
e1599b0c
XL
172struct TokenCursorFrame {
173 delim: token::DelimToken,
174 span: DelimSpan,
175 open_delim: bool,
176 tree_cursor: tokenstream::Cursor,
177 close_delim: bool,
3b2f2976
XL
178}
179
8bb4bdeb 180impl TokenCursorFrame {
29967ef6 181 fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
8bb4bdeb 182 TokenCursorFrame {
dc9dc135
XL
183 delim,
184 span,
0731742a 185 open_delim: delim == token::NoDelim,
29967ef6 186 tree_cursor: tts.into_trees(),
0731742a 187 close_delim: delim == token::NoDelim,
8bb4bdeb
XL
188 }
189 }
190}
191
192impl TokenCursor {
29967ef6 193 fn next(&mut self) -> (Token, Spacing) {
8bb4bdeb 194 loop {
29967ef6 195 let (tree, spacing) = if !self.frame.open_delim {
8bb4bdeb 196 self.frame.open_delim = true;
f9f354fc 197 TokenTree::open_tt(self.frame.span, self.frame.delim).into()
1b1a35ee 198 } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() {
8bb4bdeb
XL
199 tree
200 } else if !self.frame.close_delim {
201 self.frame.close_delim = true;
f9f354fc 202 TokenTree::close_tt(self.frame.span, self.frame.delim).into()
8bb4bdeb
XL
203 } else if let Some(frame) = self.stack.pop() {
204 self.frame = frame;
dfeec247 205 continue;
8bb4bdeb 206 } else {
29967ef6 207 (TokenTree::Token(Token::new(token::Eof, DUMMY_SP)), Spacing::Alone)
8bb4bdeb
XL
208 };
209
29967ef6
XL
210 match tree {
211 TokenTree::Token(token) => {
212 return (token, spacing);
f9f354fc 213 }
0731742a 214 TokenTree::Delimited(sp, delim, tts) => {
29967ef6 215 let frame = TokenCursorFrame::new(sp, delim, tts);
8bb4bdeb
XL
216 self.stack.push(mem::replace(&mut self.frame, frame));
217 }
218 }
219 }
220 }
221
29967ef6 222 fn next_desugared(&mut self) -> (Token, Spacing) {
3dfed10e 223 let (data, attr_style, sp) = match self.next() {
29967ef6 224 (Token { kind: token::DocComment(_, attr_style, data), span }, _) => {
3dfed10e
XL
225 (data, attr_style, span)
226 }
7cac9316 227 tok => return tok,
8bb4bdeb
XL
228 };
229
8bb4bdeb
XL
230 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
231 // required to wrap the text.
232 let mut num_of_hashes = 0;
233 let mut count = 0;
3dfed10e 234 for ch in data.as_str().chars() {
8bb4bdeb
XL
235 count = match ch {
236 '"' => 1,
237 '#' if count > 0 => count + 1,
238 _ => 0,
239 };
240 num_of_hashes = cmp::max(num_of_hashes, count);
241 }
242
b7449926 243 let delim_span = DelimSpan::from_single(sp);
0731742a
XL
244 let body = TokenTree::Delimited(
245 delim_span,
246 token::Bracket,
dc9dc135
XL
247 [
248 TokenTree::token(token::Ident(sym::doc, false), sp),
249 TokenTree::token(token::Eq, sp),
3dfed10e 250 TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), sp),
0731742a 251 ]
dfeec247
XL
252 .iter()
253 .cloned()
74b04a01 254 .collect::<TokenStream>(),
0731742a 255 );
8bb4bdeb 256
dfeec247
XL
257 self.stack.push(mem::replace(
258 &mut self.frame,
259 TokenCursorFrame::new(
260 delim_span,
261 token::NoDelim,
29967ef6 262 if attr_style == AttrStyle::Inner {
dfeec247
XL
263 [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
264 .iter()
265 .cloned()
266 .collect::<TokenStream>()
267 } else {
268 [TokenTree::token(token::Pound, sp), body]
269 .iter()
270 .cloned()
271 .collect::<TokenStream>()
272 },
273 ),
274 ));
8bb4bdeb
XL
275
276 self.next()
277 }
278}
279
5869c6ff 280#[derive(Debug, Clone, PartialEq)]
e74abb32 281enum TokenType {
dc9dc135
XL
282 Token(TokenKind),
283 Keyword(Symbol),
1a4d82fc 284 Operator,
32a655c1
SL
285 Lifetime,
286 Ident,
287 Path,
288 Type,
9fa01778 289 Const,
1a4d82fc 290}
223e47cc 291
1a4d82fc 292impl TokenType {
e74abb32 293 fn to_string(&self) -> String {
1a4d82fc 294 match *self {
dc9dc135
XL
295 TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
296 TokenType::Keyword(kw) => format!("`{}`", kw),
32a655c1
SL
297 TokenType::Operator => "an operator".to_string(),
298 TokenType::Lifetime => "lifetime".to_string(),
299 TokenType::Ident => "identifier".to_string(),
300 TokenType::Path => "path".to_string(),
301 TokenType::Type => "type".to_string(),
fc512014 302 TokenType::Const => "a const expression".to_string(),
1a4d82fc
JJ
303 }
304 }
223e47cc
LB
305}
306
8faf50e0 307#[derive(Copy, Clone, Debug)]
e74abb32 308enum TokenExpectType {
ea8adc8c
XL
309 Expect,
310 NoExpect,
311}
312
e74abb32
XL
313/// A sequence separator.
314struct SeqSep {
315 /// The separator token.
316 sep: Option<TokenKind>,
317 /// `true` if a trailing separator is allowed.
318 trailing_sep_allowed: bool,
319}
320
321impl SeqSep {
322 fn trailing_allowed(t: TokenKind) -> SeqSep {
dfeec247 323 SeqSep { sep: Some(t), trailing_sep_allowed: true }
e74abb32
XL
324 }
325
326 fn none() -> SeqSep {
dfeec247 327 SeqSep { sep: None, trailing_sep_allowed: false }
e74abb32
XL
328 }
329}
330
dfeec247
XL
331pub enum FollowedByType {
332 Yes,
333 No,
334}
335
336fn token_descr_opt(token: &Token) -> Option<&'static str> {
337 Some(match token.kind {
338 _ if token.is_special_ident() => "reserved identifier",
339 _ if token.is_used_keyword() => "keyword",
340 _ if token.is_unused_keyword() => "reserved keyword",
341 token::DocComment(..) => "doc comment",
342 _ => return None,
343 })
344}
345
346pub(super) fn token_descr(token: &Token) -> String {
347 let token_str = pprust::token_to_string(token);
348 match token_descr_opt(token) {
349 Some(prefix) => format!("{} `{}`", prefix, token_str),
350 _ => format!("`{}`", token_str),
351 }
352}
60c5eb7d 353
1a4d82fc 354impl<'a> Parser<'a> {
dc9dc135
XL
355 pub fn new(
356 sess: &'a ParseSess,
357 tokens: TokenStream,
dc9dc135
XL
358 desugar_doc_comments: bool,
359 subparser_name: Option<&'static str>,
360 ) -> Self {
c30ab7b3 361 let mut parser = Parser {
3b2f2976 362 sess,
dc9dc135 363 token: Token::dummy(),
29967ef6 364 token_spacing: Spacing::Alone,
74b04a01 365 prev_token: Token::dummy(),
d9579d0f 366 restrictions: Restrictions::empty(),
1a4d82fc 367 expected_tokens: Vec::new(),
8bb4bdeb 368 token_cursor: TokenCursor {
29967ef6 369 frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens),
8bb4bdeb 370 stack: Vec::new(),
29967ef6
XL
371 num_next_calls: 0,
372 desugar_doc_comments,
fc512014 373 append_unglued_token: None,
8bb4bdeb 374 },
3b2f2976 375 desugar_doc_comments,
9fa01778
XL
376 unmatched_angle_bracket_count: 0,
377 max_angle_bracket_count: 0,
378 unclosed_delims: Vec::new(),
379 last_unexpected_token_span: None,
416331ca 380 last_type_ascription: None,
dc9dc135 381 subparser_name,
c30ab7b3
SL
382 };
383
74b04a01
XL
384 // Make parser point to the first token.
385 parser.bump();
7cac9316 386
c30ab7b3
SL
387 parser
388 }
389
29967ef6
XL
390 fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) {
391 let (mut next, spacing) = if self.desugar_doc_comments {
7cac9316
XL
392 self.token_cursor.next_desugared()
393 } else {
394 self.token_cursor.next()
8bb4bdeb 395 };
29967ef6 396 self.token_cursor.num_next_calls += 1;
fc512014
XL
397 // We've retrieved an token from the underlying
398 // cursor, so we no longer need to worry about
399 // an unglued token. See `break_and_eat` for more details
400 self.token_cursor.append_unglued_token = None;
dc9dc135 401 if next.span.is_dummy() {
83c7162d 402 // Tweak the location for better diagnostics, but keep syntactic context intact.
74b04a01 403 next.span = fallback_span.with_ctxt(next.span.ctxt());
1a4d82fc 404 }
29967ef6 405 (next, spacing)
1a4d82fc
JJ
406 }
407
29967ef6 408 pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
9346a6ac 409 match self.expect_one_of(&[], &[]) {
9cc50fc6 410 Err(e) => Err(e),
e74abb32
XL
411 // We can get `Ok(true)` from `recover_closing_delimiter`
412 // which is called in `expected_one_of_not_found`.
413 Ok(_) => FatalError.raise(),
9346a6ac 414 }
1a4d82fc
JJ
415 }
416
9fa01778 417 /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
dc9dc135 418 pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
1a4d82fc
JJ
419 if self.expected_tokens.is_empty() {
420 if self.token == *t {
9cc50fc6 421 self.bump();
9fa01778 422 Ok(false)
1a4d82fc 423 } else {
dc9dc135 424 self.unexpected_try_recover(t)
1a4d82fc
JJ
425 }
426 } else {
94b46f34 427 self.expect_one_of(slice::from_ref(t), &[])
1a4d82fc
JJ
428 }
429 }
430
431 /// Expect next token to be edible or inedible token. If edible,
432 /// then consume it; if inedible, then return without consuming
433 /// anything. Signal a fatal error if next token is unexpected.
9fa01778
XL
434 pub fn expect_one_of(
435 &mut self,
dc9dc135
XL
436 edible: &[TokenKind],
437 inedible: &[TokenKind],
9fa01778 438 ) -> PResult<'a, bool /* recovered */> {
dc9dc135 439 if edible.contains(&self.token.kind) {
9cc50fc6 440 self.bump();
9fa01778 441 Ok(false)
dc9dc135 442 } else if inedible.contains(&self.token.kind) {
1a4d82fc 443 // leave it in the input
9fa01778 444 Ok(false)
dc9dc135 445 } else if self.last_unexpected_token_span == Some(self.token.span) {
9fa01778 446 FatalError.raise();
1a4d82fc 447 } else {
dc9dc135 448 self.expected_one_of_not_found(edible, inedible)
1a4d82fc 449 }
970d7e83
LB
450 }
451
dfeec247 452 // Public for rustfmt usage.
f9f354fc 453 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
2c00a5a8
XL
454 self.parse_ident_common(true)
455 }
456
f9f354fc 457 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
74b04a01
XL
458 match self.token.ident() {
459 Some((ident, is_raw)) => {
460 if !is_raw && ident.is_reserved() {
2c00a5a8
XL
461 let mut err = self.expected_ident_found();
462 if recover {
463 err.emit();
464 } else {
465 return Err(err);
466 }
041b39d2 467 }
9cc50fc6 468 self.bump();
74b04a01 469 Ok(ident)
970d7e83 470 }
74b04a01
XL
471 _ => Err(match self.prev_token.kind {
472 TokenKind::DocComment(..) => {
473 self.span_fatal_err(self.prev_token.span, Error::UselessDocComment)
474 }
475 _ => self.expected_ident_found(),
dfeec247 476 }),
970d7e83
LB
477 }
478 }
479
9fa01778 480 /// Checks if the next token is `tok`, and returns `true` if so.
1a4d82fc 481 ///
7453a54e 482 /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
1a4d82fc 483 /// encountered.
e74abb32 484 fn check(&mut self, tok: &TokenKind) -> bool {
1a4d82fc 485 let is_present = self.token == *tok;
dfeec247
XL
486 if !is_present {
487 self.expected_tokens.push(TokenType::Token(tok.clone()));
488 }
1a4d82fc 489 is_present
970d7e83
LB
490 }
491
9fa01778 492 /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
dc9dc135 493 pub fn eat(&mut self, tok: &TokenKind) -> bool {
1a4d82fc 494 let is_present = self.check(tok);
dfeec247
XL
495 if is_present {
496 self.bump()
497 }
9cc50fc6 498 is_present
970d7e83
LB
499 }
500
e74abb32
XL
501 /// If the next token is the given keyword, returns `true` without eating it.
502 /// An expectation is also added for diagnostics purposes.
dc9dc135 503 fn check_keyword(&mut self, kw: Symbol) -> bool {
85aaf69f
SL
504 self.expected_tokens.push(TokenType::Keyword(kw));
505 self.token.is_keyword(kw)
506 }
507
e74abb32
XL
508 /// If the next token is the given keyword, eats it and returns `true`.
509 /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
dfeec247
XL
510 // Public for rustfmt usage.
511 pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
85aaf69f 512 if self.check_keyword(kw) {
9cc50fc6
SL
513 self.bump();
514 true
85aaf69f 515 } else {
9cc50fc6 516 false
85aaf69f
SL
517 }
518 }
519
dc9dc135 520 fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
1a4d82fc 521 if self.token.is_keyword(kw) {
9cc50fc6
SL
522 self.bump();
523 true
1a4d82fc 524 } else {
9cc50fc6 525 false
1a4d82fc 526 }
970d7e83
LB
527 }
528
9fa01778
XL
529 /// If the given word is not a keyword, signals an error.
530 /// If the next token is not the given word, signals an error.
531 /// Otherwise, eats it.
dc9dc135 532 fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
dfeec247 533 if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
970d7e83
LB
534 }
535
74b04a01
XL
536 /// Is the given keyword `kw` followed by a non-reserved identifier?
537 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
538 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
539 }
540
e74abb32
XL
541 fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
542 if ok {
32a655c1
SL
543 true
544 } else {
e74abb32 545 self.expected_tokens.push(typ);
32a655c1
SL
546 false
547 }
548 }
549
e74abb32
XL
550 fn check_ident(&mut self) -> bool {
551 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
552 }
553
32a655c1 554 fn check_path(&mut self) -> bool {
e74abb32 555 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
32a655c1
SL
556 }
557
558 fn check_type(&mut self) -> bool {
e74abb32 559 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
32a655c1
SL
560 }
561
9fa01778 562 fn check_const_arg(&mut self) -> bool {
e74abb32
XL
563 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
564 }
565
29967ef6
XL
566 fn check_inline_const(&self, dist: usize) -> bool {
567 self.is_keyword_ahead(dist, &[kw::Const])
568 && self.look_ahead(dist + 1, |t| match t.kind {
569 token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
570 token::OpenDelim(DelimToken::Brace) => true,
571 _ => false,
572 })
573 }
574
e74abb32
XL
575 /// Checks to see if the next token is either `+` or `+=`.
576 /// Otherwise returns `false`.
577 fn check_plus(&mut self) -> bool {
578 self.check_or_expected(
579 self.token.is_like_plus(),
580 TokenType::Token(token::BinOp(token::Plus)),
581 )
9fa01778
XL
582 }
583
74b04a01
XL
584 /// Eats the expected token if it's present possibly breaking
585 /// compound tokens like multi-character operators in process.
586 /// Returns `true` if the token was eaten.
587 fn break_and_eat(&mut self, expected: TokenKind) -> bool {
588 if self.token.kind == expected {
589 self.bump();
590 return true;
591 }
592 match self.token.kind.break_two_token_op() {
593 Some((first, second)) if first == expected => {
594 let first_span = self.sess.source_map().start_point(self.token.span);
595 let second_span = self.token.span.with_lo(first_span.hi());
596 self.token = Token::new(first, first_span);
fc512014
XL
597 // Keep track of this token - if we end token capturing now,
598 // we'll want to append this token to the captured stream.
599 //
600 // If we consume any additional tokens, then this token
601 // is not needed (we'll capture the entire 'glued' token),
602 // and `next_tok` will set this field to `None`
603 self.token_cursor.append_unglued_token =
604 Some((TokenTree::Token(self.token.clone()), Spacing::Alone));
29967ef6
XL
605 // Use the spacing of the glued token as the spacing
606 // of the unglued second token.
607 self.bump_with((Token::new(second, second_span), self.token_spacing));
94b46f34
XL
608 true
609 }
74b04a01
XL
610 _ => {
611 self.expected_tokens.push(TokenType::Token(expected));
612 false
94b46f34 613 }
94b46f34
XL
614 }
615 }
616
74b04a01
XL
617 /// Eats `+` possibly breaking tokens like `+=` in process.
618 fn eat_plus(&mut self) -> bool {
619 self.break_and_eat(token::BinOp(token::Plus))
620 }
621
622 /// Eats `&` possibly breaking tokens like `&&` in process.
623 /// Signals an error if `&` is not eaten.
9cc50fc6 624 fn expect_and(&mut self) -> PResult<'a, ()> {
74b04a01 625 if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
ea8adc8c
XL
626 }
627
74b04a01
XL
628 /// Eats `|` possibly breaking tokens like `||` in process.
629 /// Signals an error if `|` was not eaten.
ea8adc8c 630 fn expect_or(&mut self) -> PResult<'a, ()> {
74b04a01 631 if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
970d7e83
LB
632 }
633
74b04a01 634 /// Eats `<` possibly breaking tokens like `<<` in process.
9cc50fc6 635 fn eat_lt(&mut self) -> bool {
74b04a01 636 let ate = self.break_and_eat(token::Lt);
9fa01778
XL
637 if ate {
638 // See doc comment for `unmatched_angle_bracket_count`.
639 self.unmatched_angle_bracket_count += 1;
640 self.max_angle_bracket_count += 1;
641 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
1a4d82fc 642 }
9fa01778 643 ate
1a4d82fc
JJ
644 }
645
74b04a01
XL
646 /// Eats `<` possibly breaking tokens like `<<` in process.
647 /// Signals an error if `<` was not eaten.
9cc50fc6 648 fn expect_lt(&mut self) -> PResult<'a, ()> {
74b04a01 649 if self.eat_lt() { Ok(()) } else { self.unexpected() }
1a4d82fc
JJ
650 }
651
74b04a01
XL
652 /// Eats `>` possibly breaking tokens like `>>` in process.
653 /// Signals an error if `>` was not eaten.
94b46f34 654 fn expect_gt(&mut self) -> PResult<'a, ()> {
74b04a01
XL
655 if self.break_and_eat(token::Gt) {
656 // See doc comment for `unmatched_angle_bracket_count`.
657 if self.unmatched_angle_bracket_count > 0 {
658 self.unmatched_angle_bracket_count -= 1;
659 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
dfeec247 660 }
74b04a01
XL
661 Ok(())
662 } else {
663 self.unexpected()
1a4d82fc
JJ
664 }
665 }
666
416331ca 667 fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
dfeec247
XL
668 kets.iter().any(|k| match expect {
669 TokenExpectType::Expect => self.check(k),
670 TokenExpectType::NoExpect => self.token == **k,
416331ca
XL
671 })
672 }
673
e74abb32 674 fn parse_seq_to_before_tokens<T>(
b7449926 675 &mut self,
dc9dc135 676 kets: &[&TokenKind],
b7449926
XL
677 sep: SeqSep,
678 expect: TokenExpectType,
416331ca
XL
679 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
680 ) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
9fa01778
XL
681 let mut first = true;
682 let mut recovered = false;
416331ca 683 let mut trailing = false;
c30ab7b3 684 let mut v = vec![];
416331ca
XL
685 while !self.expect_any_with_type(kets, expect) {
686 if let token::CloseDelim(..) | token::Eof = self.token.kind {
dfeec247 687 break;
416331ca 688 }
7cac9316
XL
689 if let Some(ref t) = sep.sep {
690 if first {
691 first = false;
692 } else {
9fa01778
XL
693 match self.expect(t) {
694 Ok(false) => {}
695 Ok(true) => {
696 recovered = true;
697 break;
abe05a73 698 }
60c5eb7d 699 Err(mut expect_err) => {
74b04a01 700 let sp = self.prev_token.span.shrink_to_hi();
60c5eb7d
XL
701 let token_str = pprust::token_kind_to_string(t);
702
e1599b0c 703 // Attempt to keep parsing if it was a similar separator.
9fa01778 704 if let Some(ref tokens) = t.similar_tokens() {
dc9dc135 705 if tokens.contains(&self.token.kind) {
9fa01778
XL
706 self.bump();
707 }
708 }
60c5eb7d 709
f9f354fc
XL
710 // If this was a missing `@` in a binding pattern
711 // bail with a suggestion
712 // https://github.com/rust-lang/rust/issues/72373
f035d41b 713 if self.prev_token.is_ident() && self.token.kind == token::DotDot {
f9f354fc
XL
714 let msg = format!(
715 "if you meant to bind the contents of \
716 the rest of the array pattern into `{}`, use `@`",
717 pprust::token_to_string(&self.prev_token)
718 );
719 expect_err
720 .span_suggestion_verbose(
721 self.prev_token.span.shrink_to_hi().until(self.token.span),
722 &msg,
723 " @ ".to_string(),
724 Applicability::MaybeIncorrect,
725 )
726 .emit();
727 break;
728 }
729
e1599b0c 730 // Attempt to keep parsing if it was an omitted separator.
9fa01778
XL
731 match f(self) {
732 Ok(t) => {
60c5eb7d
XL
733 // Parsed successfully, therefore most probably the code only
734 // misses a separator.
735 expect_err
736 .span_suggestion_short(
5869c6ff 737 sp,
60c5eb7d
XL
738 &format!("missing `{}`", token_str),
739 token_str,
740 Applicability::MaybeIncorrect,
741 )
742 .emit();
743
9fa01778
XL
744 v.push(t);
745 continue;
dfeec247 746 }
9fa01778 747 Err(mut e) => {
60c5eb7d
XL
748 // Parsing failed, therefore it must be something more serious
749 // than just a missing separator.
750 expect_err.emit();
751
9fa01778
XL
752 e.cancel();
753 break;
754 }
abe05a73
XL
755 }
756 }
7453a54e
SL
757 }
758 }
7453a54e 759 }
416331ca
XL
760 if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
761 trailing = true;
7453a54e
SL
762 break;
763 }
764
abe05a73
XL
765 let t = f(self)?;
766 v.push(t);
970d7e83 767 }
7453a54e 768
416331ca 769 Ok((v, trailing, recovered))
970d7e83
LB
770 }
771
dfeec247
XL
772 /// Parses a sequence, not including the closing delimiter. The function
773 /// `f` must consume tokens until reaching the next separator or
774 /// closing bracket.
775 fn parse_seq_to_before_end<T>(
776 &mut self,
777 ket: &TokenKind,
778 sep: SeqSep,
779 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
780 ) -> PResult<'a, (Vec<T>, bool, bool)> {
781 self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
782 }
783
784 /// Parses a sequence, including the closing delimiter. The function
785 /// `f` must consume tokens until reaching the next separator or
786 /// closing bracket.
787 fn parse_seq_to_end<T>(
788 &mut self,
789 ket: &TokenKind,
790 sep: SeqSep,
791 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
792 ) -> PResult<'a, (Vec<T>, bool /* trailing */)> {
793 let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
794 if !recovered {
795 self.eat(ket);
796 }
797 Ok((val, trailing))
798 }
799
9fa01778
XL
800 /// Parses a sequence, including the closing delimiter. The function
801 /// `f` must consume tokens until reaching the next separator or
1a4d82fc 802 /// closing bracket.
416331ca 803 fn parse_unspanned_seq<T>(
9fa01778 804 &mut self,
dc9dc135
XL
805 bra: &TokenKind,
806 ket: &TokenKind,
9fa01778 807 sep: SeqSep,
416331ca
XL
808 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
809 ) -> PResult<'a, (Vec<T>, bool)> {
54a0048b 810 self.expect(bra)?;
dfeec247 811 self.parse_seq_to_end(ket, sep, f)
416331ca
XL
812 }
813
814 fn parse_delim_comma_seq<T>(
815 &mut self,
816 delim: DelimToken,
817 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
818 ) -> PResult<'a, (Vec<T>, bool)> {
819 self.parse_unspanned_seq(
820 &token::OpenDelim(delim),
821 &token::CloseDelim(delim),
822 SeqSep::trailing_allowed(token::Comma),
823 f,
824 )
825 }
826
827 fn parse_paren_comma_seq<T>(
828 &mut self,
829 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
830 ) -> PResult<'a, (Vec<T>, bool)> {
831 self.parse_delim_comma_seq(token::Paren, f)
970d7e83
LB
832 }
833
74b04a01 834 /// Advance the parser by one token using provided token as the next one.
29967ef6 835 fn bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
74b04a01
XL
836 // Bumping after EOF is a bad sign, usually an infinite loop.
837 if self.prev_token.kind == TokenKind::Eof {
dfeec247
XL
838 let msg = "attempted to bump the parser past EOF (may be stuck in a loop)";
839 self.span_bug(self.token.span, msg);
7453a54e
SL
840 }
841
74b04a01
XL
842 // Update the current and previous tokens.
843 self.prev_token = mem::replace(&mut self.token, next_token);
29967ef6 844 self.token_spacing = next_spacing;
9e0c209e 845
74b04a01 846 // Diagnostics.
7453a54e 847 self.expected_tokens.clear();
223e47cc 848 }
7453a54e 849
74b04a01
XL
850 /// Advance the parser by one token.
851 pub fn bump(&mut self) {
852 let next_token = self.next_tok(self.token.span);
853 self.bump_with(next_token);
854 }
855
e74abb32
XL
856 /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
857 /// When `dist == 0` then the current token is looked at.
858 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
c30ab7b3 859 if dist == 0 {
e74abb32 860 return looker(&self.token);
223e47cc 861 }
8bb4bdeb 862
dc9dc135 863 let frame = &self.token_cursor.frame;
29967ef6 864 match frame.tree_cursor.look_ahead(dist - 1) {
8bb4bdeb 865 Some(tree) => match tree {
29967ef6 866 TokenTree::Token(token) => looker(token),
dfeec247 867 TokenTree::Delimited(dspan, delim, _) => {
29967ef6 868 looker(&Token::new(token::OpenDelim(*delim), dspan.open))
dfeec247
XL
869 }
870 },
29967ef6
XL
871 None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)),
872 }
223e47cc 873 }
ff7c6d11 874
dc9dc135
XL
875 /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
876 fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
877 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
223e47cc 878 }
223e47cc 879
9fa01778 880 /// Parses asyncness: `async` or nothing.
74b04a01 881 fn parse_asyncness(&mut self) -> Async {
dc9dc135 882 if self.eat_keyword(kw::Async) {
74b04a01
XL
883 let span = self.prev_token.uninterpolated_span();
884 Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
8faf50e0 885 } else {
74b04a01 886 Async::No
8faf50e0
XL
887 }
888 }
889
9fa01778 890 /// Parses unsafety: `unsafe` or nothing.
74b04a01
XL
891 fn parse_unsafety(&mut self) -> Unsafe {
892 if self.eat_keyword(kw::Unsafe) {
893 Unsafe::Yes(self.prev_token.uninterpolated_span())
894 } else {
895 Unsafe::No
896 }
897 }
898
899 /// Parses constness: `const` or nothing.
900 fn parse_constness(&mut self) -> Const {
29967ef6
XL
901 // Avoid const blocks to be parsed as const items
902 if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace))
903 && self.eat_keyword(kw::Const)
904 {
74b04a01
XL
905 Const::Yes(self.prev_token.uninterpolated_span())
906 } else {
907 Const::No
908 }
1a4d82fc
JJ
909 }
910
29967ef6
XL
911 /// Parses inline const expressions.
912 fn parse_const_block(&mut self, span: Span) -> PResult<'a, P<Expr>> {
913 self.sess.gated_spans.gate(sym::inline_const, span);
914 self.eat_keyword(kw::Const);
915 let blk = self.parse_block()?;
916 let anon_const = AnonConst {
917 id: DUMMY_NODE_ID,
918 value: self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()),
919 };
fc512014
XL
920 let blk_span = anon_const.value.span;
921 Ok(self.mk_expr(span.to(blk_span), ExprKind::ConstBlock(anon_const), AttrVec::new()))
29967ef6
XL
922 }
923
416331ca
XL
924 /// Parses mutability (`mut` or nothing).
925 fn parse_mutability(&mut self) -> Mutability {
dfeec247 926 if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
223e47cc
LB
927 }
928
e74abb32
XL
929 /// Possibly parses mutability (`const` or `mut`).
930 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
931 if self.eat_keyword(kw::Mut) {
dfeec247 932 Some(Mutability::Mut)
e74abb32 933 } else if self.eat_keyword(kw::Const) {
dfeec247 934 Some(Mutability::Not)
e74abb32
XL
935 } else {
936 None
937 }
938 }
939
416331ca 940 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
dfeec247
XL
941 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
942 {
416331ca
XL
943 self.expect_no_suffix(self.token.span, "a tuple index", suffix);
944 self.bump();
74b04a01 945 Ok(Ident::new(symbol, self.prev_token.span))
223e47cc 946 } else {
416331ca 947 self.parse_ident_common(false)
223e47cc
LB
948 }
949 }
950
60c5eb7d
XL
951 fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
952 self.parse_mac_args_common(true).map(P)
953 }
954
955 fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
956 self.parse_mac_args_common(false)
957 }
958
959 fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
dfeec247
XL
960 Ok(
961 if self.check(&token::OpenDelim(DelimToken::Paren))
962 || self.check(&token::OpenDelim(DelimToken::Bracket))
963 || self.check(&token::OpenDelim(DelimToken::Brace))
964 {
965 match self.parse_token_tree() {
966 TokenTree::Delimited(dspan, delim, tokens) =>
60c5eb7d 967 // We've confirmed above that there is a delimiter so unwrapping is OK.
dfeec247
XL
968 {
969 MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens)
60c5eb7d 970 }
dfeec247 971 _ => unreachable!(),
60c5eb7d 972 }
dfeec247
XL
973 } else if !delimited_only {
974 if self.eat(&token::Eq) {
74b04a01 975 let eq_span = self.prev_token.span;
dfeec247
XL
976 let mut is_interpolated_expr = false;
977 if let token::Interpolated(nt) = &self.token.kind {
978 if let token::NtExpr(..) = **nt {
979 is_interpolated_expr = true;
980 }
981 }
60c5eb7d 982
5869c6ff
XL
983 // Collect tokens because they are used during lowering to HIR.
984 let expr = self.collect_tokens(|this| this.parse_expr())?;
fc512014
XL
985 let span = expr.span;
986
987 match &expr.kind {
988 // Not gated to supporte things like `doc = $expr` that work on stable.
989 _ if is_interpolated_expr => {}
990 ExprKind::Lit(lit) if lit.kind.is_unsuffixed() => {}
991 _ => self.sess.gated_spans.gate(sym::extended_key_value_attributes, span),
992 }
993
5869c6ff
XL
994 let token_kind = token::Interpolated(Lrc::new(token::NtExpr(expr)));
995 MacArgs::Eq(eq_span, Token::new(token_kind, span))
dfeec247
XL
996 } else {
997 MacArgs::Empty
998 }
60c5eb7d 999 } else {
dfeec247
XL
1000 return self.unexpected();
1001 },
1002 )
416331ca 1003 }
041b39d2 1004
e74abb32
XL
1005 fn parse_or_use_outer_attributes(
1006 &mut self,
dfeec247
XL
1007 already_parsed_attrs: Option<AttrVec>,
1008 ) -> PResult<'a, AttrVec> {
416331ca
XL
1009 if let Some(attrs) = already_parsed_attrs {
1010 Ok(attrs)
970d7e83 1011 } else {
416331ca 1012 self.parse_outer_attributes().map(|a| a.into())
970d7e83
LB
1013 }
1014 }
1015
416331ca 1016 /// Parses a single token tree from the input.
3dfed10e 1017 pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
dc9dc135 1018 match self.token.kind {
416331ca 1019 token::OpenDelim(..) => {
29967ef6
XL
1020 let depth = self.token_cursor.stack.len();
1021
1022 // We keep advancing the token cursor until we hit
1023 // the matching `CloseDelim` token.
1024 while !(depth == self.token_cursor.stack.len()
1025 && matches!(self.token.kind, token::CloseDelim(_)))
1026 {
1027 // Advance one token at a time, so `TokenCursor::next()`
1028 // can capture these tokens if necessary.
1029 self.bump();
1030 }
1031 // We are still inside the frame corresponding
1032 // to the delimited stream we captured, so grab
1033 // the tokens from this frame.
1034 let frame = &self.token_cursor.frame;
1035 let stream = frame.tree_cursor.stream.clone();
1036 let span = frame.span;
1037 let delim = frame.delim;
1038 // Consume close delimiter
0731742a 1039 self.bump();
29967ef6 1040 TokenTree::Delimited(span, delim, stream)
dfeec247 1041 }
416331ca
XL
1042 token::CloseDelim(_) | token::Eof => unreachable!(),
1043 _ => {
416331ca 1044 self.bump();
74b04a01 1045 TokenTree::Token(self.prev_token.clone())
0731742a 1046 }
0731742a
XL
1047 }
1048 }
1049
416331ca
XL
1050 /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
1051 pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
1052 let mut tts = Vec::new();
1053 while self.token != token::Eof {
1054 tts.push(self.parse_token_tree());
54a0048b 1055 }
416331ca 1056 Ok(tts)
54a0048b
SL
1057 }
1058
416331ca
XL
1059 pub fn parse_tokens(&mut self) -> TokenStream {
1060 let mut result = Vec::new();
1061 loop {
1062 match self.token.kind {
1063 token::Eof | token::CloseDelim(..) => break,
1064 _ => result.push(self.parse_token_tree().into()),
ff7c6d11 1065 }
223e47cc 1066 }
416331ca 1067 TokenStream::new(result)
223e47cc
LB
1068 }
1069
416331ca
XL
1070 /// Evaluates the closure with restrictions in place.
1071 ///
1072 /// Afters the closure is evaluated, restrictions are reset.
e74abb32 1073 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
416331ca 1074 let old = self.restrictions;
e74abb32
XL
1075 self.restrictions = res;
1076 let res = f(self);
416331ca 1077 self.restrictions = old;
e74abb32 1078 res
8faf50e0
XL
1079 }
1080
416331ca
XL
1081 fn is_crate_vis(&self) -> bool {
1082 self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
223e47cc
LB
1083 }
1084
416331ca
XL
1085 /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
1086 /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
1087 /// If the following element can't be a tuple (i.e., it's a function definition), then
1088 /// it's not a tuple struct field), and the contents within the parentheses isn't valid,
1089 /// so emit a proper diagnostic.
1b1a35ee
XL
1090 // Public for rustfmt usage.
1091 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
416331ca 1092 maybe_whole!(self, NtVis, |x| x);
dc9dc135 1093
416331ca
XL
1094 self.expected_tokens.push(TokenType::Keyword(kw::Crate));
1095 if self.is_crate_vis() {
1096 self.bump(); // `crate`
74b04a01 1097 self.sess.gated_spans.gate(sym::crate_visibility_modifier, self.prev_token.span);
1b1a35ee
XL
1098 return Ok(Visibility {
1099 span: self.prev_token.span,
1100 kind: VisibilityKind::Crate(CrateSugar::JustCrate),
1101 tokens: None,
1102 });
416331ca 1103 }
223e47cc 1104
416331ca
XL
1105 if !self.eat_keyword(kw::Pub) {
1106 // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1107 // keyword to grab a span from for inherited visibility; an empty span at the
1108 // beginning of the current token would seem to be the "Schelling span".
1b1a35ee
XL
1109 return Ok(Visibility {
1110 span: self.token.span.shrink_to_lo(),
1111 kind: VisibilityKind::Inherited,
1112 tokens: None,
1113 });
416331ca 1114 }
74b04a01 1115 let lo = self.prev_token.span;
223e47cc 1116
416331ca
XL
1117 if self.check(&token::OpenDelim(token::Paren)) {
1118 // We don't `self.bump()` the `(` yet because this might be a struct definition where
1119 // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1120 // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1121 // by the following tokens.
dfeec247
XL
1122 if self.is_keyword_ahead(1, &[kw::Crate]) && self.look_ahead(2, |t| t != &token::ModSep)
1123 // account for `pub(crate::foo)`
416331ca 1124 {
e74abb32 1125 // Parse `pub(crate)`.
416331ca
XL
1126 self.bump(); // `(`
1127 self.bump(); // `crate`
1128 self.expect(&token::CloseDelim(token::Paren))?; // `)`
e74abb32 1129 let vis = VisibilityKind::Crate(CrateSugar::PubCrate);
1b1a35ee
XL
1130 return Ok(Visibility {
1131 span: lo.to(self.prev_token.span),
1132 kind: vis,
1133 tokens: None,
1134 });
416331ca 1135 } else if self.is_keyword_ahead(1, &[kw::In]) {
e74abb32 1136 // Parse `pub(in path)`.
416331ca
XL
1137 self.bump(); // `(`
1138 self.bump(); // `in`
1139 let path = self.parse_path(PathStyle::Mod)?; // `path`
1140 self.expect(&token::CloseDelim(token::Paren))?; // `)`
dfeec247 1141 let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
1b1a35ee
XL
1142 return Ok(Visibility {
1143 span: lo.to(self.prev_token.span),
1144 kind: vis,
1145 tokens: None,
1146 });
e74abb32
XL
1147 } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren))
1148 && self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
416331ca 1149 {
e74abb32 1150 // Parse `pub(self)` or `pub(super)`.
416331ca
XL
1151 self.bump(); // `(`
1152 let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
1153 self.expect(&token::CloseDelim(token::Paren))?; // `)`
dfeec247 1154 let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
1b1a35ee
XL
1155 return Ok(Visibility {
1156 span: lo.to(self.prev_token.span),
1157 kind: vis,
1158 tokens: None,
1159 });
60c5eb7d
XL
1160 } else if let FollowedByType::No = fbt {
1161 // Provide this diagnostic if a type cannot follow;
1162 // in particular, if this is not a tuple struct.
e74abb32
XL
1163 self.recover_incorrect_vis_restriction()?;
1164 // Emit diagnostic, but continue with public visibility.
532ac7d7 1165 }
223e47cc 1166 }
223e47cc 1167
1b1a35ee 1168 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
223e47cc
LB
1169 }
1170
e74abb32
XL
1171 /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1172 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1173 self.bump(); // `(`
1174 let path = self.parse_path(PathStyle::Mod)?;
dfeec247 1175 self.expect(&token::CloseDelim(token::Paren))?; // `)`
e74abb32
XL
1176
1177 let msg = "incorrect visibility restriction";
1178 let suggestion = r##"some possible visibility restrictions are:
1179`pub(crate)`: visible only on the current crate
1180`pub(super)`: visible only in the current module's parent
1181`pub(in path::to::module)`: visible only on the specified path"##;
1182
1183 let path_str = pprust::path_to_string(&path);
1184
1185 struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
1186 .help(suggestion)
1187 .span_suggestion(
1188 path.span,
1189 &format!("make this visible only to module `{}` with `in`", path_str),
1190 format!("in {}", path_str),
1191 Applicability::MachineApplicable,
1192 )
1193 .emit();
1194
1195 Ok(())
1196 }
1197
60c5eb7d
XL
1198 /// Parses `extern string_literal?`.
1199 fn parse_extern(&mut self) -> PResult<'a, Extern> {
1200 Ok(if self.eat_keyword(kw::Extern) {
1201 Extern::from_abi(self.parse_abi())
e74abb32 1202 } else {
60c5eb7d
XL
1203 Extern::None
1204 })
e74abb32
XL
1205 }
1206
60c5eb7d
XL
1207 /// Parses a string literal as an ABI spec.
1208 fn parse_abi(&mut self) -> Option<StrLit> {
1209 match self.parse_str_lit() {
1210 Ok(str_lit) => Some(str_lit),
1211 Err(Some(lit)) => match lit.kind {
1212 ast::LitKind::Err(_) => None,
1213 _ => {
1214 self.struct_span_err(lit.span, "non-string ABI literal")
1215 .span_suggestion(
1216 lit.span,
1217 "specify the ABI with a string literal",
1218 "\"C\"".to_string(),
1219 Applicability::MaybeIncorrect,
1220 )
1221 .emit();
1222 None
223e47cc 1223 }
dfeec247 1224 },
60c5eb7d 1225 Err(None) => None,
223e47cc
LB
1226 }
1227 }
1228
5869c6ff
XL
1229 pub fn collect_tokens<R: HasTokens>(
1230 &mut self,
1231 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1232 ) -> PResult<'a, R> {
1233 self.collect_tokens_trailing_token(|this| Ok((f(this)?, TrailingToken::None)))
1234 }
1235
f9f354fc
XL
1236 /// Records all tokens consumed by the provided callback,
1237 /// including the current token. These tokens are collected
29967ef6 1238 /// into a `LazyTokenStream`, and returned along with the result
fc512014 1239 /// of the callback.
f9f354fc
XL
1240 ///
1241 /// Note: If your callback consumes an opening delimiter
1242 /// (including the case where you call `collect_tokens`
1243 /// when the current token is an opening delimeter),
1244 /// you must also consume the corresponding closing delimiter.
1245 ///
1246 /// That is, you can consume
1247 /// `something ([{ }])` or `([{}])`, but not `([{}]`
1248 ///
1249 /// This restriction shouldn't be an issue in practice,
1250 /// since this function is used to record the tokens for
1251 /// a parsed AST item, which always has matching delimiters.
5869c6ff 1252 pub fn collect_tokens_trailing_token<R: HasTokens>(
e74abb32 1253 &mut self,
5869c6ff
XL
1254 f: impl FnOnce(&mut Self) -> PResult<'a, (R, TrailingToken)>,
1255 ) -> PResult<'a, R> {
29967ef6
XL
1256 let start_token = (self.token.clone(), self.token_spacing);
1257 let cursor_snapshot = self.token_cursor.clone();
f9f354fc 1258
5869c6ff 1259 let (mut ret, trailing_token) = f(self)?;
f9f354fc 1260
29967ef6
XL
1261 // Produces a `TokenStream` on-demand. Using `cursor_snapshot`
1262 // and `num_calls`, we can reconstruct the `TokenStream` seen
1263 // by the callback. This allows us to avoid producing a `TokenStream`
1264 // if it is never needed - for example, a captured `macro_rules!`
1265 // argument that is never passed to a proc macro.
fc512014
XL
1266 // In practice token stream creation happens rarely compared to
1267 // calls to `collect_tokens` (see some statistics in #78736),
1268 // so we are doing as little up-front work as possible.
29967ef6
XL
1269 //
1270 // This also makes `Parser` very cheap to clone, since
1271 // there is no intermediate collection buffer to clone.
fc512014 1272 #[derive(Clone)]
29967ef6
XL
1273 struct LazyTokenStreamImpl {
1274 start_token: (Token, Spacing),
1275 cursor_snapshot: TokenCursor,
1276 num_calls: usize,
1277 desugar_doc_comments: bool,
fc512014 1278 append_unglued_token: Option<TreeAndSpacing>,
29967ef6
XL
1279 }
1280 impl CreateTokenStream for LazyTokenStreamImpl {
1281 fn create_token_stream(&self) -> TokenStream {
1282 // The token produced by the final call to `next` or `next_desugared`
1283 // was not actually consumed by the callback. The combination
1284 // of chaining the initial token and using `take` produces the desired
1285 // result - we produce an empty `TokenStream` if no calls were made,
1286 // and omit the final token otherwise.
1287 let mut cursor_snapshot = self.cursor_snapshot.clone();
1288 let tokens = std::iter::once(self.start_token.clone())
5869c6ff 1289 .chain((0..self.num_calls).map(|_| {
29967ef6
XL
1290 if self.desugar_doc_comments {
1291 cursor_snapshot.next_desugared()
1292 } else {
1293 cursor_snapshot.next()
1294 }
1295 }))
5869c6ff 1296 .take(self.num_calls);
29967ef6 1297
fc512014
XL
1298 make_token_stream(tokens, self.append_unglued_token.clone())
1299 }
5869c6ff
XL
1300 }
1301
1302 let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
1303 match trailing_token {
1304 TrailingToken::None => {}
1305 TrailingToken::Semi => {
1306 assert_eq!(self.token.kind, token::Semi);
1307 num_calls += 1;
8faf50e0 1308 }
3b2f2976
XL
1309 }
1310
29967ef6
XL
1311 let lazy_impl = LazyTokenStreamImpl {
1312 start_token,
5869c6ff 1313 num_calls,
29967ef6 1314 cursor_snapshot,
29967ef6 1315 desugar_doc_comments: self.desugar_doc_comments,
fc512014 1316 append_unglued_token: self.token_cursor.append_unglued_token.clone(),
29967ef6 1317 };
5869c6ff
XL
1318 ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
1319 Ok(ret)
3b2f2976
XL
1320 }
1321
0531ce1d
XL
1322 /// `::{` or `::*`
1323 fn is_import_coupler(&mut self) -> bool {
dfeec247
XL
1324 self.check(&token::ModSep)
1325 && self.look_ahead(1, |t| {
1326 *t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star)
1327 })
a7813a04 1328 }
1b1a35ee
XL
1329
1330 pub fn clear_expected_tokens(&mut self) {
1331 self.expected_tokens.clear();
1332 }
e74abb32 1333}
532ac7d7 1334
e74abb32
XL
1335crate fn make_unclosed_delims_error(
1336 unmatched: UnmatchedBrace,
1337 sess: &ParseSess,
1338) -> Option<DiagnosticBuilder<'_>> {
1339 // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
1340 // `unmatched_braces` only for error recovery in the `Parser`.
1341 let found_delim = unmatched.found_delim?;
dfeec247
XL
1342 let mut err = sess.span_diagnostic.struct_span_err(
1343 unmatched.found_span,
1344 &format!(
1345 "mismatched closing delimiter: `{}`",
1346 pprust::token_kind_to_string(&token::CloseDelim(found_delim)),
1347 ),
1348 );
1349 err.span_label(unmatched.found_span, "mismatched closing delimiter");
e74abb32 1350 if let Some(sp) = unmatched.candidate_span {
dfeec247 1351 err.span_label(sp, "closing delimiter possibly meant for this");
e74abb32
XL
1352 }
1353 if let Some(sp) = unmatched.unclosed_span {
dfeec247 1354 err.span_label(sp, "unclosed delimiter");
e74abb32
XL
1355 }
1356 Some(err)
223e47cc 1357}
9fa01778 1358
e74abb32 1359pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) {
dfeec247
XL
1360 *sess.reached_eof.borrow_mut() |=
1361 unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
e74abb32 1362 for unmatched in unclosed_delims.drain(..) {
f9f354fc 1363 if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
74b04a01 1364 e.emit();
f9f354fc 1365 }
9fa01778 1366 }
9fa01778 1367}
29967ef6
XL
1368
1369/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
1370/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
1371/// of open and close delims.
fc512014
XL
1372fn make_token_stream(
1373 tokens: impl Iterator<Item = (Token, Spacing)>,
1374 append_unglued_token: Option<TreeAndSpacing>,
1375) -> TokenStream {
29967ef6
XL
1376 #[derive(Debug)]
1377 struct FrameData {
1378 open: Span,
1379 inner: Vec<(TokenTree, Spacing)>,
1380 }
1381 let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }];
1382 for (token, spacing) in tokens {
1383 match token {
1384 Token { kind: TokenKind::OpenDelim(_), span } => {
1385 stack.push(FrameData { open: span, inner: vec![] });
1386 }
1387 Token { kind: TokenKind::CloseDelim(delim), span } => {
1388 let frame_data = stack.pop().expect("Token stack was empty!");
1389 let dspan = DelimSpan::from_pair(frame_data.open, span);
1390 let stream = TokenStream::new(frame_data.inner);
1391 let delimited = TokenTree::Delimited(dspan, delim, stream);
1392 stack
1393 .last_mut()
1394 .unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!"))
1395 .inner
1396 .push((delimited, Spacing::Alone));
1397 }
fc512014
XL
1398 token => {
1399 stack
1400 .last_mut()
1401 .expect("Bottom token frame is missing!")
1402 .inner
1403 .push((TokenTree::Token(token), spacing));
1404 }
29967ef6
XL
1405 }
1406 }
fc512014
XL
1407 let mut final_buf = stack.pop().expect("Missing final buf!");
1408 final_buf.inner.extend(append_unglued_token);
29967ef6
XL
1409 assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
1410 TokenStream::new(final_buf.inner)
1411}
5869c6ff
XL
1412
1413#[macro_export]
1414macro_rules! maybe_collect_tokens {
1415 ($self:ident, $force_collect:expr, $attrs:expr, $f:expr) => {
1416 if matches!($force_collect, ForceCollect::Yes)
1417 || $crate::parser::attr::maybe_needs_tokens($attrs)
1418 {
1419 $self.collect_tokens_trailing_token($f)
1420 } else {
1421 Ok($f($self)?.0)
1422 }
1423 };
1424}