]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_parse/src/parser/diagnostics.rs
New upstream version 1.48.0~beta.8+dfsg1
[rustc.git] / compiler / rustc_parse / src / parser / diagnostics.rs
1 use super::ty::AllowPlus;
2 use super::{BlockMode, Parser, PathStyle, SemiColonMode, SeqSep, TokenExpectType, TokenType};
3
4 use rustc_ast::ptr::P;
5 use rustc_ast::token::{self, Lit, LitKind, TokenKind};
6 use rustc_ast::util::parser::AssocOp;
7 use rustc_ast::{
8 self as ast, AngleBracketedArgs, AttrVec, BinOpKind, BindingMode, BlockCheckMode, Expr,
9 ExprKind, Item, ItemKind, Mutability, Param, Pat, PatKind, PathSegment, QSelf, Ty, TyKind,
10 };
11 use rustc_ast_pretty::pprust;
12 use rustc_data_structures::fx::FxHashSet;
13 use rustc_errors::{pluralize, struct_span_err};
14 use rustc_errors::{Applicability, DiagnosticBuilder, Handler, PResult};
15 use rustc_span::source_map::Spanned;
16 use rustc_span::symbol::{kw, Ident};
17 use rustc_span::{MultiSpan, Span, SpanSnippetError, DUMMY_SP};
18
19 use tracing::{debug, trace};
20
21 const TURBOFISH: &str = "use `::<...>` instead of `<...>` to specify type arguments";
22
23 /// Creates a placeholder argument.
24 pub(super) fn dummy_arg(ident: Ident) -> Param {
25 let pat = P(Pat {
26 id: ast::DUMMY_NODE_ID,
27 kind: PatKind::Ident(BindingMode::ByValue(Mutability::Not), ident, None),
28 span: ident.span,
29 tokens: None,
30 });
31 let ty = Ty { kind: TyKind::Err, span: ident.span, id: ast::DUMMY_NODE_ID, tokens: None };
32 Param {
33 attrs: AttrVec::default(),
34 id: ast::DUMMY_NODE_ID,
35 pat,
36 span: ident.span,
37 ty: P(ty),
38 is_placeholder: false,
39 }
40 }
41
42 pub enum Error {
43 UselessDocComment,
44 }
45
46 impl Error {
47 fn span_err(self, sp: impl Into<MultiSpan>, handler: &Handler) -> DiagnosticBuilder<'_> {
48 match self {
49 Error::UselessDocComment => {
50 let mut err = struct_span_err!(
51 handler,
52 sp,
53 E0585,
54 "found a documentation comment that doesn't document anything",
55 );
56 err.help(
57 "doc comments must come before what they document, maybe a comment was \
58 intended with `//`?",
59 );
60 err
61 }
62 }
63 }
64 }
65
66 pub(super) trait RecoverQPath: Sized + 'static {
67 const PATH_STYLE: PathStyle = PathStyle::Expr;
68 fn to_ty(&self) -> Option<P<Ty>>;
69 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
70 }
71
72 impl RecoverQPath for Ty {
73 const PATH_STYLE: PathStyle = PathStyle::Type;
74 fn to_ty(&self) -> Option<P<Ty>> {
75 Some(P(self.clone()))
76 }
77 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
78 Self {
79 span: path.span,
80 kind: TyKind::Path(qself, path),
81 id: ast::DUMMY_NODE_ID,
82 tokens: None,
83 }
84 }
85 }
86
87 impl RecoverQPath for Pat {
88 fn to_ty(&self) -> Option<P<Ty>> {
89 self.to_ty()
90 }
91 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
92 Self {
93 span: path.span,
94 kind: PatKind::Path(qself, path),
95 id: ast::DUMMY_NODE_ID,
96 tokens: None,
97 }
98 }
99 }
100
101 impl RecoverQPath for Expr {
102 fn to_ty(&self) -> Option<P<Ty>> {
103 self.to_ty()
104 }
105 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
106 Self {
107 span: path.span,
108 kind: ExprKind::Path(qself, path),
109 attrs: AttrVec::new(),
110 id: ast::DUMMY_NODE_ID,
111 tokens: None,
112 }
113 }
114 }
115
116 /// Control whether the closing delimiter should be consumed when calling `Parser::consume_block`.
117 crate enum ConsumeClosingDelim {
118 Yes,
119 No,
120 }
121
122 impl<'a> Parser<'a> {
123 pub(super) fn span_fatal_err<S: Into<MultiSpan>>(
124 &self,
125 sp: S,
126 err: Error,
127 ) -> DiagnosticBuilder<'a> {
128 err.span_err(sp, self.diagnostic())
129 }
130
131 pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
132 self.sess.span_diagnostic.struct_span_err(sp, m)
133 }
134
135 pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
136 self.sess.span_diagnostic.span_bug(sp, m)
137 }
138
139 pub(super) fn diagnostic(&self) -> &'a Handler {
140 &self.sess.span_diagnostic
141 }
142
143 pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
144 self.sess.source_map().span_to_snippet(span)
145 }
146
147 pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
148 let mut err = self.struct_span_err(
149 self.token.span,
150 &format!("expected identifier, found {}", super::token_descr(&self.token)),
151 );
152 let valid_follow = &[
153 TokenKind::Eq,
154 TokenKind::Colon,
155 TokenKind::Comma,
156 TokenKind::Semi,
157 TokenKind::ModSep,
158 TokenKind::OpenDelim(token::DelimToken::Brace),
159 TokenKind::OpenDelim(token::DelimToken::Paren),
160 TokenKind::CloseDelim(token::DelimToken::Brace),
161 TokenKind::CloseDelim(token::DelimToken::Paren),
162 ];
163 match self.token.ident() {
164 Some((ident, false))
165 if ident.is_raw_guess()
166 && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) =>
167 {
168 err.span_suggestion(
169 ident.span,
170 "you can escape reserved keywords to use them as identifiers",
171 format!("r#{}", ident.name),
172 Applicability::MaybeIncorrect,
173 );
174 }
175 _ => {}
176 }
177 if let Some(token_descr) = super::token_descr_opt(&self.token) {
178 err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
179 } else {
180 err.span_label(self.token.span, "expected identifier");
181 if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
182 err.span_suggestion(
183 self.token.span,
184 "remove this comma",
185 String::new(),
186 Applicability::MachineApplicable,
187 );
188 }
189 }
190 err
191 }
192
193 pub(super) fn expected_one_of_not_found(
194 &mut self,
195 edible: &[TokenKind],
196 inedible: &[TokenKind],
197 ) -> PResult<'a, bool /* recovered */> {
198 fn tokens_to_string(tokens: &[TokenType]) -> String {
199 let mut i = tokens.iter();
200 // This might be a sign we need a connect method on `Iterator`.
201 let b = i.next().map_or(String::new(), |t| t.to_string());
202 i.enumerate().fold(b, |mut b, (i, a)| {
203 if tokens.len() > 2 && i == tokens.len() - 2 {
204 b.push_str(", or ");
205 } else if tokens.len() == 2 && i == tokens.len() - 2 {
206 b.push_str(" or ");
207 } else {
208 b.push_str(", ");
209 }
210 b.push_str(&a.to_string());
211 b
212 })
213 }
214
215 let mut expected = edible
216 .iter()
217 .map(|x| TokenType::Token(x.clone()))
218 .chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
219 .chain(self.expected_tokens.iter().cloned())
220 .collect::<Vec<_>>();
221 expected.sort_by_cached_key(|x| x.to_string());
222 expected.dedup();
223 let expect = tokens_to_string(&expected[..]);
224 let actual = super::token_descr(&self.token);
225 let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
226 let short_expect = if expected.len() > 6 {
227 format!("{} possible tokens", expected.len())
228 } else {
229 expect.clone()
230 };
231 (
232 format!("expected one of {}, found {}", expect, actual),
233 (self.prev_token.span.shrink_to_hi(), format!("expected one of {}", short_expect)),
234 )
235 } else if expected.is_empty() {
236 (
237 format!("unexpected token: {}", actual),
238 (self.prev_token.span, "unexpected token after this".to_string()),
239 )
240 } else {
241 (
242 format!("expected {}, found {}", expect, actual),
243 (self.prev_token.span.shrink_to_hi(), format!("expected {}", expect)),
244 )
245 };
246 self.last_unexpected_token_span = Some(self.token.span);
247 let mut err = self.struct_span_err(self.token.span, &msg_exp);
248 let sp = if self.token == token::Eof {
249 // This is EOF; don't want to point at the following char, but rather the last token.
250 self.prev_token.span
251 } else {
252 label_sp
253 };
254 match self.recover_closing_delimiter(
255 &expected
256 .iter()
257 .filter_map(|tt| match tt {
258 TokenType::Token(t) => Some(t.clone()),
259 _ => None,
260 })
261 .collect::<Vec<_>>(),
262 err,
263 ) {
264 Err(e) => err = e,
265 Ok(recovered) => {
266 return Ok(recovered);
267 }
268 }
269
270 if self.check_too_many_raw_str_terminators(&mut err) {
271 return Err(err);
272 }
273
274 let sm = self.sess.source_map();
275 if self.prev_token.span == DUMMY_SP {
276 // Account for macro context where the previous span might not be
277 // available to avoid incorrect output (#54841).
278 err.span_label(self.token.span, label_exp);
279 } else if !sm.is_multiline(self.token.span.shrink_to_hi().until(sp.shrink_to_lo())) {
280 // When the spans are in the same line, it means that the only content between
281 // them is whitespace, point at the found token in that case:
282 //
283 // X | () => { syntax error };
284 // | ^^^^^ expected one of 8 possible tokens here
285 //
286 // instead of having:
287 //
288 // X | () => { syntax error };
289 // | -^^^^^ unexpected token
290 // | |
291 // | expected one of 8 possible tokens here
292 err.span_label(self.token.span, label_exp);
293 } else {
294 err.span_label(sp, label_exp);
295 err.span_label(self.token.span, "unexpected token");
296 }
297 self.maybe_annotate_with_ascription(&mut err, false);
298 Err(err)
299 }
300
301 fn check_too_many_raw_str_terminators(&mut self, err: &mut DiagnosticBuilder<'_>) -> bool {
302 match (&self.prev_token.kind, &self.token.kind) {
303 (
304 TokenKind::Literal(Lit {
305 kind: LitKind::StrRaw(n_hashes) | LitKind::ByteStrRaw(n_hashes),
306 ..
307 }),
308 TokenKind::Pound,
309 ) => {
310 err.set_primary_message("too many `#` when terminating raw string");
311 err.span_suggestion(
312 self.token.span,
313 "remove the extra `#`",
314 String::new(),
315 Applicability::MachineApplicable,
316 );
317 err.note(&format!("the raw string started with {} `#`s", n_hashes));
318 true
319 }
320 _ => false,
321 }
322 }
323
324 pub fn maybe_annotate_with_ascription(
325 &mut self,
326 err: &mut DiagnosticBuilder<'_>,
327 maybe_expected_semicolon: bool,
328 ) {
329 if let Some((sp, likely_path)) = self.last_type_ascription.take() {
330 let sm = self.sess.source_map();
331 let next_pos = sm.lookup_char_pos(self.token.span.lo());
332 let op_pos = sm.lookup_char_pos(sp.hi());
333
334 let allow_unstable = self.sess.unstable_features.is_nightly_build();
335
336 if likely_path {
337 err.span_suggestion(
338 sp,
339 "maybe write a path separator here",
340 "::".to_string(),
341 if allow_unstable {
342 Applicability::MaybeIncorrect
343 } else {
344 Applicability::MachineApplicable
345 },
346 );
347 self.sess.type_ascription_path_suggestions.borrow_mut().insert(sp);
348 } else if op_pos.line != next_pos.line && maybe_expected_semicolon {
349 err.span_suggestion(
350 sp,
351 "try using a semicolon",
352 ";".to_string(),
353 Applicability::MaybeIncorrect,
354 );
355 } else if allow_unstable {
356 err.span_label(sp, "tried to parse a type due to this type ascription");
357 } else {
358 err.span_label(sp, "tried to parse a type due to this");
359 }
360 if allow_unstable {
361 // Give extra information about type ascription only if it's a nightly compiler.
362 err.note(
363 "`#![feature(type_ascription)]` lets you annotate an expression with a type: \
364 `<expr>: <type>`",
365 );
366 if !likely_path {
367 // Avoid giving too much info when it was likely an unrelated typo.
368 err.note(
369 "see issue #23416 <https://github.com/rust-lang/rust/issues/23416> \
370 for more information",
371 );
372 }
373 }
374 }
375 }
376
377 /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
378 /// passes through any errors encountered. Used for error recovery.
379 pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
380 if let Err(ref mut err) =
381 self.parse_seq_to_before_tokens(kets, SeqSep::none(), TokenExpectType::Expect, |p| {
382 Ok(p.parse_token_tree())
383 })
384 {
385 err.cancel();
386 }
387 }
388
389 /// This function checks if there are trailing angle brackets and produces
390 /// a diagnostic to suggest removing them.
391 ///
392 /// ```ignore (diagnostic)
393 /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
394 /// ^^ help: remove extra angle brackets
395 /// ```
396 ///
397 /// If `true` is returned, then trailing brackets were recovered, tokens were consumed
398 /// up until one of the tokens in 'end' was encountered, and an error was emitted.
399 pub(super) fn check_trailing_angle_brackets(
400 &mut self,
401 segment: &PathSegment,
402 end: &[&TokenKind],
403 ) -> bool {
404 // This function is intended to be invoked after parsing a path segment where there are two
405 // cases:
406 //
407 // 1. A specific token is expected after the path segment.
408 // eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
409 // `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
410 // 2. No specific token is expected after the path segment.
411 // eg. `x.foo` (field access)
412 //
413 // This function is called after parsing `.foo` and before parsing the token `end` (if
414 // present). This includes any angle bracket arguments, such as `.foo::<u32>` or
415 // `Foo::<Bar>`.
416
417 // We only care about trailing angle brackets if we previously parsed angle bracket
418 // arguments. This helps stop us incorrectly suggesting that extra angle brackets be
419 // removed in this case:
420 //
421 // `x.foo >> (3)` (where `x.foo` is a `u32` for example)
422 //
423 // This case is particularly tricky as we won't notice it just looking at the tokens -
424 // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
425 // have already been parsed):
426 //
427 // `x.foo::<u32>>>(3)`
428 let parsed_angle_bracket_args =
429 segment.args.as_ref().map(|args| args.is_angle_bracketed()).unwrap_or(false);
430
431 debug!(
432 "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
433 parsed_angle_bracket_args,
434 );
435 if !parsed_angle_bracket_args {
436 return false;
437 }
438
439 // Keep the span at the start so we can highlight the sequence of `>` characters to be
440 // removed.
441 let lo = self.token.span;
442
443 // We need to look-ahead to see if we have `>` characters without moving the cursor forward
444 // (since we might have the field access case and the characters we're eating are
445 // actual operators and not trailing characters - ie `x.foo >> 3`).
446 let mut position = 0;
447
448 // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
449 // many of each (so we can correctly pluralize our error messages) and continue to
450 // advance.
451 let mut number_of_shr = 0;
452 let mut number_of_gt = 0;
453 while self.look_ahead(position, |t| {
454 trace!("check_trailing_angle_brackets: t={:?}", t);
455 if *t == token::BinOp(token::BinOpToken::Shr) {
456 number_of_shr += 1;
457 true
458 } else if *t == token::Gt {
459 number_of_gt += 1;
460 true
461 } else {
462 false
463 }
464 }) {
465 position += 1;
466 }
467
468 // If we didn't find any trailing `>` characters, then we have nothing to error about.
469 debug!(
470 "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
471 number_of_gt, number_of_shr,
472 );
473 if number_of_gt < 1 && number_of_shr < 1 {
474 return false;
475 }
476
477 // Finally, double check that we have our end token as otherwise this is the
478 // second case.
479 if self.look_ahead(position, |t| {
480 trace!("check_trailing_angle_brackets: t={:?}", t);
481 end.contains(&&t.kind)
482 }) {
483 // Eat from where we started until the end token so that parsing can continue
484 // as if we didn't have those extra angle brackets.
485 self.eat_to_tokens(end);
486 let span = lo.until(self.token.span);
487
488 let total_num_of_gt = number_of_gt + number_of_shr * 2;
489 self.struct_span_err(
490 span,
491 &format!("unmatched angle bracket{}", pluralize!(total_num_of_gt)),
492 )
493 .span_suggestion(
494 span,
495 &format!("remove extra angle bracket{}", pluralize!(total_num_of_gt)),
496 String::new(),
497 Applicability::MachineApplicable,
498 )
499 .emit();
500 return true;
501 }
502 false
503 }
504
505 /// Check if a method call with an intended turbofish has been written without surrounding
506 /// angle brackets.
507 pub(super) fn check_turbofish_missing_angle_brackets(&mut self, segment: &mut PathSegment) {
508 if token::ModSep == self.token.kind && segment.args.is_none() {
509 let snapshot = self.clone();
510 self.bump();
511 let lo = self.token.span;
512 match self.parse_angle_args() {
513 Ok(args) => {
514 let span = lo.to(self.prev_token.span);
515 // Detect trailing `>` like in `x.collect::Vec<_>>()`.
516 let mut trailing_span = self.prev_token.span.shrink_to_hi();
517 while self.token.kind == token::BinOp(token::Shr)
518 || self.token.kind == token::Gt
519 {
520 trailing_span = trailing_span.to(self.token.span);
521 self.bump();
522 }
523 if self.token.kind == token::OpenDelim(token::Paren) {
524 // Recover from bad turbofish: `foo.collect::Vec<_>()`.
525 let args = AngleBracketedArgs { args, span }.into();
526 segment.args = args;
527
528 self.struct_span_err(
529 span,
530 "generic parameters without surrounding angle brackets",
531 )
532 .multipart_suggestion(
533 "surround the type parameters with angle brackets",
534 vec![
535 (span.shrink_to_lo(), "<".to_string()),
536 (trailing_span, ">".to_string()),
537 ],
538 Applicability::MachineApplicable,
539 )
540 .emit();
541 } else {
542 // This doesn't look like an invalid turbofish, can't recover parse state.
543 *self = snapshot;
544 }
545 }
546 Err(mut err) => {
547 // We could't parse generic parameters, unlikely to be a turbofish. Rely on
548 // generic parse error instead.
549 err.cancel();
550 *self = snapshot;
551 }
552 }
553 }
554 }
555
556 /// When writing a turbofish with multiple type parameters missing the leading `::`, we will
557 /// encounter a parse error when encountering the first `,`.
558 pub(super) fn check_mistyped_turbofish_with_multiple_type_params(
559 &mut self,
560 mut e: DiagnosticBuilder<'a>,
561 expr: &mut P<Expr>,
562 ) -> PResult<'a, ()> {
563 if let ExprKind::Binary(binop, _, _) = &expr.kind {
564 if let ast::BinOpKind::Lt = binop.node {
565 if self.eat(&token::Comma) {
566 let x = self.parse_seq_to_before_end(
567 &token::Gt,
568 SeqSep::trailing_allowed(token::Comma),
569 |p| p.parse_ty(),
570 );
571 match x {
572 Ok((_, _, false)) => {
573 self.bump(); // `>`
574 match self.parse_expr() {
575 Ok(_) => {
576 e.span_suggestion_verbose(
577 binop.span.shrink_to_lo(),
578 "use `::<...>` instead of `<...>` to specify type arguments",
579 "::".to_string(),
580 Applicability::MaybeIncorrect,
581 );
582 e.emit();
583 *expr = self.mk_expr_err(expr.span.to(self.prev_token.span));
584 return Ok(());
585 }
586 Err(mut err) => {
587 err.cancel();
588 }
589 }
590 }
591 Err(mut err) => {
592 err.cancel();
593 }
594 _ => {}
595 }
596 }
597 }
598 }
599 Err(e)
600 }
601
602 /// Check to see if a pair of chained operators looks like an attempt at chained comparison,
603 /// e.g. `1 < x <= 3`. If so, suggest either splitting the comparison into two, or
604 /// parenthesising the leftmost comparison.
605 fn attempt_chained_comparison_suggestion(
606 &mut self,
607 err: &mut DiagnosticBuilder<'_>,
608 inner_op: &Expr,
609 outer_op: &Spanned<AssocOp>,
610 ) -> bool /* advanced the cursor */ {
611 if let ExprKind::Binary(op, ref l1, ref r1) = inner_op.kind {
612 if let ExprKind::Field(_, ident) = l1.kind {
613 if ident.as_str().parse::<i32>().is_err() && !matches!(r1.kind, ExprKind::Lit(_)) {
614 // The parser has encountered `foo.bar<baz`, the likelihood of the turbofish
615 // suggestion being the only one to apply is high.
616 return false;
617 }
618 }
619 let mut enclose = |left: Span, right: Span| {
620 err.multipart_suggestion(
621 "parenthesize the comparison",
622 vec![
623 (left.shrink_to_lo(), "(".to_string()),
624 (right.shrink_to_hi(), ")".to_string()),
625 ],
626 Applicability::MaybeIncorrect,
627 );
628 };
629 return match (op.node, &outer_op.node) {
630 // `x == y == z`
631 (BinOpKind::Eq, AssocOp::Equal) |
632 // `x < y < z` and friends.
633 (BinOpKind::Lt, AssocOp::Less | AssocOp::LessEqual) |
634 (BinOpKind::Le, AssocOp::LessEqual | AssocOp::Less) |
635 // `x > y > z` and friends.
636 (BinOpKind::Gt, AssocOp::Greater | AssocOp::GreaterEqual) |
637 (BinOpKind::Ge, AssocOp::GreaterEqual | AssocOp::Greater) => {
638 let expr_to_str = |e: &Expr| {
639 self.span_to_snippet(e.span)
640 .unwrap_or_else(|_| pprust::expr_to_string(&e))
641 };
642 err.span_suggestion_verbose(
643 inner_op.span.shrink_to_hi(),
644 "split the comparison into two",
645 format!(" && {}", expr_to_str(&r1)),
646 Applicability::MaybeIncorrect,
647 );
648 false // Keep the current parse behavior, where the AST is `(x < y) < z`.
649 }
650 // `x == y < z`
651 (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => {
652 // Consume `z`/outer-op-rhs.
653 let snapshot = self.clone();
654 match self.parse_expr() {
655 Ok(r2) => {
656 // We are sure that outer-op-rhs could be consumed, the suggestion is
657 // likely correct.
658 enclose(r1.span, r2.span);
659 true
660 }
661 Err(mut expr_err) => {
662 expr_err.cancel();
663 *self = snapshot;
664 false
665 }
666 }
667 }
668 // `x > y == z`
669 (BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge, AssocOp::Equal) => {
670 let snapshot = self.clone();
671 // At this point it is always valid to enclose the lhs in parentheses, no
672 // further checks are necessary.
673 match self.parse_expr() {
674 Ok(_) => {
675 enclose(l1.span, r1.span);
676 true
677 }
678 Err(mut expr_err) => {
679 expr_err.cancel();
680 *self = snapshot;
681 false
682 }
683 }
684 }
685 _ => false,
686 };
687 }
688 false
689 }
690
691 /// Produces an error if comparison operators are chained (RFC #558).
692 /// We only need to check the LHS, not the RHS, because all comparison ops have same
693 /// precedence (see `fn precedence`) and are left-associative (see `fn fixity`).
694 ///
695 /// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used
696 /// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the
697 /// case.
698 ///
699 /// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left
700 /// associative we can infer that we have:
701 ///
702 /// ```text
703 /// outer_op
704 /// / \
705 /// inner_op r2
706 /// / \
707 /// l1 r1
708 /// ```
709 pub(super) fn check_no_chained_comparison(
710 &mut self,
711 inner_op: &Expr,
712 outer_op: &Spanned<AssocOp>,
713 ) -> PResult<'a, Option<P<Expr>>> {
714 debug_assert!(
715 outer_op.node.is_comparison(),
716 "check_no_chained_comparison: {:?} is not comparison",
717 outer_op.node,
718 );
719
720 let mk_err_expr =
721 |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err, AttrVec::new())));
722
723 match inner_op.kind {
724 ExprKind::Binary(op, ref l1, ref r1) if op.node.is_comparison() => {
725 let mut err = self.struct_span_err(
726 vec![op.span, self.prev_token.span],
727 "comparison operators cannot be chained",
728 );
729
730 let suggest = |err: &mut DiagnosticBuilder<'_>| {
731 err.span_suggestion_verbose(
732 op.span.shrink_to_lo(),
733 TURBOFISH,
734 "::".to_string(),
735 Applicability::MaybeIncorrect,
736 );
737 };
738
739 // Include `<` to provide this recommendation even in a case like
740 // `Foo<Bar<Baz<Qux, ()>>>`
741 if op.node == BinOpKind::Lt && outer_op.node == AssocOp::Less
742 || outer_op.node == AssocOp::Greater
743 {
744 if outer_op.node == AssocOp::Less {
745 let snapshot = self.clone();
746 self.bump();
747 // So far we have parsed `foo<bar<`, consume the rest of the type args.
748 let modifiers =
749 [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
750 self.consume_tts(1, &modifiers[..]);
751
752 if !&[token::OpenDelim(token::Paren), token::ModSep]
753 .contains(&self.token.kind)
754 {
755 // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
756 // parser and bail out.
757 *self = snapshot.clone();
758 }
759 }
760 return if token::ModSep == self.token.kind {
761 // We have some certainty that this was a bad turbofish at this point.
762 // `foo< bar >::`
763 suggest(&mut err);
764
765 let snapshot = self.clone();
766 self.bump(); // `::`
767
768 // Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`.
769 match self.parse_expr() {
770 Ok(_) => {
771 // 99% certain that the suggestion is correct, continue parsing.
772 err.emit();
773 // FIXME: actually check that the two expressions in the binop are
774 // paths and resynthesize new fn call expression instead of using
775 // `ExprKind::Err` placeholder.
776 mk_err_expr(self, inner_op.span.to(self.prev_token.span))
777 }
778 Err(mut expr_err) => {
779 expr_err.cancel();
780 // Not entirely sure now, but we bubble the error up with the
781 // suggestion.
782 *self = snapshot;
783 Err(err)
784 }
785 }
786 } else if token::OpenDelim(token::Paren) == self.token.kind {
787 // We have high certainty that this was a bad turbofish at this point.
788 // `foo< bar >(`
789 suggest(&mut err);
790 // Consume the fn call arguments.
791 match self.consume_fn_args() {
792 Err(()) => Err(err),
793 Ok(()) => {
794 err.emit();
795 // FIXME: actually check that the two expressions in the binop are
796 // paths and resynthesize new fn call expression instead of using
797 // `ExprKind::Err` placeholder.
798 mk_err_expr(self, inner_op.span.to(self.prev_token.span))
799 }
800 }
801 } else {
802 if !matches!(l1.kind, ExprKind::Lit(_))
803 && !matches!(r1.kind, ExprKind::Lit(_))
804 {
805 // All we know is that this is `foo < bar >` and *nothing* else. Try to
806 // be helpful, but don't attempt to recover.
807 err.help(TURBOFISH);
808 err.help("or use `(...)` if you meant to specify fn arguments");
809 }
810
811 // If it looks like a genuine attempt to chain operators (as opposed to a
812 // misformatted turbofish, for instance), suggest a correct form.
813 if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op)
814 {
815 err.emit();
816 mk_err_expr(self, inner_op.span.to(self.prev_token.span))
817 } else {
818 // These cases cause too many knock-down errors, bail out (#61329).
819 Err(err)
820 }
821 };
822 }
823 let recover =
824 self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
825 err.emit();
826 if recover {
827 return mk_err_expr(self, inner_op.span.to(self.prev_token.span));
828 }
829 }
830 _ => {}
831 }
832 Ok(None)
833 }
834
835 fn consume_fn_args(&mut self) -> Result<(), ()> {
836 let snapshot = self.clone();
837 self.bump(); // `(`
838
839 // Consume the fn call arguments.
840 let modifiers =
841 [(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)];
842 self.consume_tts(1, &modifiers[..]);
843
844 if self.token.kind == token::Eof {
845 // Not entirely sure that what we consumed were fn arguments, rollback.
846 *self = snapshot;
847 Err(())
848 } else {
849 // 99% certain that the suggestion is correct, continue parsing.
850 Ok(())
851 }
852 }
853
854 pub(super) fn maybe_report_ambiguous_plus(
855 &mut self,
856 allow_plus: AllowPlus,
857 impl_dyn_multi: bool,
858 ty: &Ty,
859 ) {
860 if matches!(allow_plus, AllowPlus::No) && impl_dyn_multi {
861 let sum_with_parens = format!("({})", pprust::ty_to_string(&ty));
862 self.struct_span_err(ty.span, "ambiguous `+` in a type")
863 .span_suggestion(
864 ty.span,
865 "use parentheses to disambiguate",
866 sum_with_parens,
867 Applicability::MachineApplicable,
868 )
869 .emit();
870 }
871 }
872
873 pub(super) fn maybe_recover_from_bad_type_plus(
874 &mut self,
875 allow_plus: AllowPlus,
876 ty: &Ty,
877 ) -> PResult<'a, ()> {
878 // Do not add `+` to expected tokens.
879 if matches!(allow_plus, AllowPlus::No) || !self.token.is_like_plus() {
880 return Ok(());
881 }
882
883 self.bump(); // `+`
884 let bounds = self.parse_generic_bounds(None)?;
885 let sum_span = ty.span.to(self.prev_token.span);
886
887 let mut err = struct_span_err!(
888 self.sess.span_diagnostic,
889 sum_span,
890 E0178,
891 "expected a path on the left-hand side of `+`, not `{}`",
892 pprust::ty_to_string(ty)
893 );
894
895 match ty.kind {
896 TyKind::Rptr(ref lifetime, ref mut_ty) => {
897 let sum_with_parens = pprust::to_string(|s| {
898 s.s.word("&");
899 s.print_opt_lifetime(lifetime);
900 s.print_mutability(mut_ty.mutbl, false);
901 s.popen();
902 s.print_type(&mut_ty.ty);
903 s.print_type_bounds(" +", &bounds);
904 s.pclose()
905 });
906 err.span_suggestion(
907 sum_span,
908 "try adding parentheses",
909 sum_with_parens,
910 Applicability::MachineApplicable,
911 );
912 }
913 TyKind::Ptr(..) | TyKind::BareFn(..) => {
914 err.span_label(sum_span, "perhaps you forgot parentheses?");
915 }
916 _ => {
917 err.span_label(sum_span, "expected a path");
918 }
919 }
920 err.emit();
921 Ok(())
922 }
923
924 /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
925 /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
926 /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
927 pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
928 &mut self,
929 base: P<T>,
930 allow_recovery: bool,
931 ) -> PResult<'a, P<T>> {
932 // Do not add `::` to expected tokens.
933 if allow_recovery && self.token == token::ModSep {
934 if let Some(ty) = base.to_ty() {
935 return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
936 }
937 }
938 Ok(base)
939 }
940
941 /// Given an already parsed `Ty`, parses the `::AssocItem` tail and
942 /// combines them into a `<Ty>::AssocItem` expression/pattern/type.
943 pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
944 &mut self,
945 ty_span: Span,
946 ty: P<Ty>,
947 ) -> PResult<'a, P<T>> {
948 self.expect(&token::ModSep)?;
949
950 let mut path = ast::Path { segments: Vec::new(), span: DUMMY_SP, tokens: None };
951 self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
952 path.span = ty_span.to(self.prev_token.span);
953
954 let ty_str = self.span_to_snippet(ty_span).unwrap_or_else(|_| pprust::ty_to_string(&ty));
955 self.struct_span_err(path.span, "missing angle brackets in associated item path")
956 .span_suggestion(
957 // This is a best-effort recovery.
958 path.span,
959 "try",
960 format!("<{}>::{}", ty_str, pprust::path_to_string(&path)),
961 Applicability::MaybeIncorrect,
962 )
963 .emit();
964
965 let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
966 Ok(P(T::recovered(Some(QSelf { ty, path_span, position: 0 }), path)))
967 }
968
969 pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
970 if self.eat(&token::Semi) {
971 let mut err = self.struct_span_err(self.prev_token.span, "expected item, found `;`");
972 err.span_suggestion_short(
973 self.prev_token.span,
974 "remove this semicolon",
975 String::new(),
976 Applicability::MachineApplicable,
977 );
978 if !items.is_empty() {
979 let previous_item = &items[items.len() - 1];
980 let previous_item_kind_name = match previous_item.kind {
981 // Say "braced struct" because tuple-structs and
982 // braceless-empty-struct declarations do take a semicolon.
983 ItemKind::Struct(..) => Some("braced struct"),
984 ItemKind::Enum(..) => Some("enum"),
985 ItemKind::Trait(..) => Some("trait"),
986 ItemKind::Union(..) => Some("union"),
987 _ => None,
988 };
989 if let Some(name) = previous_item_kind_name {
990 err.help(&format!("{} declarations are not followed by a semicolon", name));
991 }
992 }
993 err.emit();
994 true
995 } else {
996 false
997 }
998 }
999
1000 /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
1001 /// closing delimiter.
1002 pub(super) fn unexpected_try_recover(
1003 &mut self,
1004 t: &TokenKind,
1005 ) -> PResult<'a, bool /* recovered */> {
1006 let token_str = pprust::token_kind_to_string(t);
1007 let this_token_str = super::token_descr(&self.token);
1008 let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
1009 // Point at the end of the macro call when reaching end of macro arguments.
1010 (token::Eof, Some(_)) => {
1011 let sp = self.sess.source_map().next_point(self.token.span);
1012 (sp, sp)
1013 }
1014 // We don't want to point at the following span after DUMMY_SP.
1015 // This happens when the parser finds an empty TokenStream.
1016 _ if self.prev_token.span == DUMMY_SP => (self.token.span, self.token.span),
1017 // EOF, don't want to point at the following char, but rather the last token.
1018 (token::Eof, None) => (self.prev_token.span, self.token.span),
1019 _ => (self.prev_token.span.shrink_to_hi(), self.token.span),
1020 };
1021 let msg = format!(
1022 "expected `{}`, found {}",
1023 token_str,
1024 match (&self.token.kind, self.subparser_name) {
1025 (token::Eof, Some(origin)) => format!("end of {}", origin),
1026 _ => this_token_str,
1027 },
1028 );
1029 let mut err = self.struct_span_err(sp, &msg);
1030 let label_exp = format!("expected `{}`", token_str);
1031 match self.recover_closing_delimiter(&[t.clone()], err) {
1032 Err(e) => err = e,
1033 Ok(recovered) => {
1034 return Ok(recovered);
1035 }
1036 }
1037 let sm = self.sess.source_map();
1038 if !sm.is_multiline(prev_sp.until(sp)) {
1039 // When the spans are in the same line, it means that the only content
1040 // between them is whitespace, point only at the found token.
1041 err.span_label(sp, label_exp);
1042 } else {
1043 err.span_label(prev_sp, label_exp);
1044 err.span_label(sp, "unexpected token");
1045 }
1046 Err(err)
1047 }
1048
1049 pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> {
1050 if self.eat(&token::Semi) {
1051 return Ok(());
1052 }
1053 let sm = self.sess.source_map();
1054 let msg = format!("expected `;`, found {}", super::token_descr(&self.token));
1055 let appl = Applicability::MachineApplicable;
1056 if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
1057 // Likely inside a macro, can't provide meaningful suggestions.
1058 return self.expect(&token::Semi).map(drop);
1059 } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
1060 // The current token is in the same line as the prior token, not recoverable.
1061 } else if [token::Comma, token::Colon].contains(&self.token.kind)
1062 && self.prev_token.kind == token::CloseDelim(token::Paren)
1063 {
1064 // Likely typo: The current token is on a new line and is expected to be
1065 // `.`, `;`, `?`, or an operator after a close delimiter token.
1066 //
1067 // let a = std::process::Command::new("echo")
1068 // .arg("1")
1069 // ,arg("2")
1070 // ^
1071 // https://github.com/rust-lang/rust/issues/72253
1072 self.expect(&token::Semi)?;
1073 return Ok(());
1074 } else if self.look_ahead(1, |t| {
1075 t == &token::CloseDelim(token::Brace) || t.can_begin_expr() && t.kind != token::Colon
1076 }) && [token::Comma, token::Colon].contains(&self.token.kind)
1077 {
1078 // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
1079 // either `,` or `:`, and the next token could either start a new statement or is a
1080 // block close. For example:
1081 //
1082 // let x = 32:
1083 // let y = 42;
1084 self.bump();
1085 let sp = self.prev_token.span;
1086 self.struct_span_err(sp, &msg)
1087 .span_suggestion_short(sp, "change this to `;`", ";".to_string(), appl)
1088 .emit();
1089 return Ok(());
1090 } else if self.look_ahead(0, |t| {
1091 t == &token::CloseDelim(token::Brace)
1092 || (
1093 t.can_begin_expr() && t != &token::Semi && t != &token::Pound
1094 // Avoid triggering with too many trailing `#` in raw string.
1095 )
1096 }) {
1097 // Missing semicolon typo. This is triggered if the next token could either start a
1098 // new statement or is a block close. For example:
1099 //
1100 // let x = 32
1101 // let y = 42;
1102 let sp = self.prev_token.span.shrink_to_hi();
1103 self.struct_span_err(sp, &msg)
1104 .span_label(self.token.span, "unexpected token")
1105 .span_suggestion_short(sp, "add `;` here", ";".to_string(), appl)
1106 .emit();
1107 return Ok(());
1108 }
1109 self.expect(&token::Semi).map(drop) // Error unconditionally
1110 }
1111
1112 /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
1113 /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
1114 pub(super) fn recover_incorrect_await_syntax(
1115 &mut self,
1116 lo: Span,
1117 await_sp: Span,
1118 attrs: AttrVec,
1119 ) -> PResult<'a, P<Expr>> {
1120 let (hi, expr, is_question) = if self.token == token::Not {
1121 // Handle `await!(<expr>)`.
1122 self.recover_await_macro()?
1123 } else {
1124 self.recover_await_prefix(await_sp)?
1125 };
1126 let sp = self.error_on_incorrect_await(lo, hi, &expr, is_question);
1127 let expr = self.mk_expr(lo.to(sp), ExprKind::Await(expr), attrs);
1128 self.maybe_recover_from_bad_qpath(expr, true)
1129 }
1130
1131 fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
1132 self.expect(&token::Not)?;
1133 self.expect(&token::OpenDelim(token::Paren))?;
1134 let expr = self.parse_expr()?;
1135 self.expect(&token::CloseDelim(token::Paren))?;
1136 Ok((self.prev_token.span, expr, false))
1137 }
1138
1139 fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
1140 let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
1141 let expr = if self.token == token::OpenDelim(token::Brace) {
1142 // Handle `await { <expr> }`.
1143 // This needs to be handled separatedly from the next arm to avoid
1144 // interpreting `await { <expr> }?` as `<expr>?.await`.
1145 self.parse_block_expr(None, self.token.span, BlockCheckMode::Default, AttrVec::new())
1146 } else {
1147 self.parse_expr()
1148 }
1149 .map_err(|mut err| {
1150 err.span_label(await_sp, "while parsing this incorrect await expression");
1151 err
1152 })?;
1153 Ok((expr.span, expr, is_question))
1154 }
1155
1156 fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span {
1157 let expr_str =
1158 self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(&expr));
1159 let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
1160 let sp = lo.to(hi);
1161 let app = match expr.kind {
1162 ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?`
1163 _ => Applicability::MachineApplicable,
1164 };
1165 self.struct_span_err(sp, "incorrect use of `await`")
1166 .span_suggestion(sp, "`await` is a postfix operation", suggestion, app)
1167 .emit();
1168 sp
1169 }
1170
1171 /// If encountering `future.await()`, consumes and emits an error.
1172 pub(super) fn recover_from_await_method_call(&mut self) {
1173 if self.token == token::OpenDelim(token::Paren)
1174 && self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
1175 {
1176 // future.await()
1177 let lo = self.token.span;
1178 self.bump(); // (
1179 let sp = lo.to(self.token.span);
1180 self.bump(); // )
1181 self.struct_span_err(sp, "incorrect use of `await`")
1182 .span_suggestion(
1183 sp,
1184 "`await` is not a method call, remove the parentheses",
1185 String::new(),
1186 Applicability::MachineApplicable,
1187 )
1188 .emit();
1189 }
1190 }
1191
1192 pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
1193 let is_try = self.token.is_keyword(kw::Try);
1194 let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for !
1195 let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(token::Paren)); //check for (
1196
1197 if is_try && is_questionmark && is_open {
1198 let lo = self.token.span;
1199 self.bump(); //remove try
1200 self.bump(); //remove !
1201 let try_span = lo.to(self.token.span); //we take the try!( span
1202 self.bump(); //remove (
1203 let is_empty = self.token == token::CloseDelim(token::Paren); //check if the block is empty
1204 self.consume_block(token::Paren, ConsumeClosingDelim::No); //eat the block
1205 let hi = self.token.span;
1206 self.bump(); //remove )
1207 let mut err = self.struct_span_err(lo.to(hi), "use of deprecated `try` macro");
1208 err.note("in the 2018 edition `try` is a reserved keyword, and the `try!()` macro is deprecated");
1209 let prefix = if is_empty { "" } else { "alternatively, " };
1210 if !is_empty {
1211 err.multipart_suggestion(
1212 "you can use the `?` operator instead",
1213 vec![(try_span, "".to_owned()), (hi, "?".to_owned())],
1214 Applicability::MachineApplicable,
1215 );
1216 }
1217 err.span_suggestion(lo.shrink_to_lo(), &format!("{}you can still access the deprecated `try!()` macro using the \"raw identifier\" syntax", prefix), "r#".to_string(), Applicability::MachineApplicable);
1218 err.emit();
1219 Ok(self.mk_expr_err(lo.to(hi)))
1220 } else {
1221 Err(self.expected_expression_found()) // The user isn't trying to invoke the try! macro
1222 }
1223 }
1224
1225 /// Recovers a situation like `for ( $pat in $expr )`
1226 /// and suggest writing `for $pat in $expr` instead.
1227 ///
1228 /// This should be called before parsing the `$block`.
1229 pub(super) fn recover_parens_around_for_head(
1230 &mut self,
1231 pat: P<Pat>,
1232 expr: &Expr,
1233 begin_paren: Option<Span>,
1234 ) -> P<Pat> {
1235 match (&self.token.kind, begin_paren) {
1236 (token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
1237 self.bump();
1238
1239 let pat_str = self
1240 // Remove the `(` from the span of the pattern:
1241 .span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap())
1242 .unwrap_or_else(|_| pprust::pat_to_string(&pat));
1243
1244 self.struct_span_err(self.prev_token.span, "unexpected closing `)`")
1245 .span_label(begin_par_sp, "opening `(`")
1246 .span_suggestion(
1247 begin_par_sp.to(self.prev_token.span),
1248 "remove parenthesis in `for` loop",
1249 format!("{} in {}", pat_str, pprust::expr_to_string(&expr)),
1250 // With e.g. `for (x) in y)` this would replace `(x) in y)`
1251 // with `x) in y)` which is syntactically invalid.
1252 // However, this is prevented before we get here.
1253 Applicability::MachineApplicable,
1254 )
1255 .emit();
1256
1257 // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
1258 pat.and_then(|pat| match pat.kind {
1259 PatKind::Paren(pat) => pat,
1260 _ => P(pat),
1261 })
1262 }
1263 _ => pat,
1264 }
1265 }
1266
1267 pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
1268 (self.token == token::Lt && // `foo:<bar`, likely a typoed turbofish.
1269 self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()))
1270 || self.token.is_ident() &&
1271 match node {
1272 // `foo::` → `foo:` or `foo.bar::` → `foo.bar:`
1273 ast::ExprKind::Path(..) | ast::ExprKind::Field(..) => true,
1274 _ => false,
1275 } &&
1276 !self.token.is_reserved_ident() && // v `foo:bar(baz)`
1277 self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren))
1278 || self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) // `foo:bar {`
1279 || self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz`
1280 self.look_ahead(2, |t| t == &token::Lt) &&
1281 self.look_ahead(3, |t| t.is_ident())
1282 || self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz`
1283 self.look_ahead(2, |t| t.is_ident())
1284 || self.look_ahead(1, |t| t == &token::ModSep)
1285 && (self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz`
1286 self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>`
1287 }
1288
1289 pub(super) fn recover_seq_parse_error(
1290 &mut self,
1291 delim: token::DelimToken,
1292 lo: Span,
1293 result: PResult<'a, P<Expr>>,
1294 ) -> P<Expr> {
1295 match result {
1296 Ok(x) => x,
1297 Err(mut err) => {
1298 err.emit();
1299 // Recover from parse error, callers expect the closing delim to be consumed.
1300 self.consume_block(delim, ConsumeClosingDelim::Yes);
1301 self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err, AttrVec::new())
1302 }
1303 }
1304 }
1305
1306 pub(super) fn recover_closing_delimiter(
1307 &mut self,
1308 tokens: &[TokenKind],
1309 mut err: DiagnosticBuilder<'a>,
1310 ) -> PResult<'a, bool> {
1311 let mut pos = None;
1312 // We want to use the last closing delim that would apply.
1313 for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
1314 if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
1315 && Some(self.token.span) > unmatched.unclosed_span
1316 {
1317 pos = Some(i);
1318 }
1319 }
1320 match pos {
1321 Some(pos) => {
1322 // Recover and assume that the detected unclosed delimiter was meant for
1323 // this location. Emit the diagnostic and act as if the delimiter was
1324 // present for the parser's sake.
1325
1326 // Don't attempt to recover from this unclosed delimiter more than once.
1327 let unmatched = self.unclosed_delims.remove(pos);
1328 let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
1329 if unmatched.found_delim.is_none() {
1330 // We encountered `Eof`, set this fact here to avoid complaining about missing
1331 // `fn main()` when we found place to suggest the closing brace.
1332 *self.sess.reached_eof.borrow_mut() = true;
1333 }
1334
1335 // We want to suggest the inclusion of the closing delimiter where it makes
1336 // the most sense, which is immediately after the last token:
1337 //
1338 // {foo(bar {}}
1339 // - ^
1340 // | |
1341 // | help: `)` may belong here
1342 // |
1343 // unclosed delimiter
1344 if let Some(sp) = unmatched.unclosed_span {
1345 err.span_label(sp, "unclosed delimiter");
1346 }
1347 // Backticks should be removed to apply suggestions.
1348 let mut delim = delim.to_string();
1349 delim.retain(|c| c != '`');
1350 err.span_suggestion_short(
1351 self.prev_token.span.shrink_to_hi(),
1352 &format!("`{}` may belong here", delim),
1353 delim,
1354 Applicability::MaybeIncorrect,
1355 );
1356 if unmatched.found_delim.is_none() {
1357 // Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown
1358 // errors which would be emitted elsewhere in the parser and let other error
1359 // recovery consume the rest of the file.
1360 Err(err)
1361 } else {
1362 err.emit();
1363 self.expected_tokens.clear(); // Reduce the number of errors.
1364 Ok(true)
1365 }
1366 }
1367 _ => Err(err),
1368 }
1369 }
1370
1371 /// Eats tokens until we can be relatively sure we reached the end of the
1372 /// statement. This is something of a best-effort heuristic.
1373 ///
1374 /// We terminate when we find an unmatched `}` (without consuming it).
1375 pub(super) fn recover_stmt(&mut self) {
1376 self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
1377 }
1378
1379 /// If `break_on_semi` is `Break`, then we will stop consuming tokens after
1380 /// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
1381 /// approximate -- it can mean we break too early due to macros, but that
1382 /// should only lead to sub-optimal recovery, not inaccurate parsing).
1383 ///
1384 /// If `break_on_block` is `Break`, then we will stop consuming tokens
1385 /// after finding (and consuming) a brace-delimited block.
1386 pub(super) fn recover_stmt_(
1387 &mut self,
1388 break_on_semi: SemiColonMode,
1389 break_on_block: BlockMode,
1390 ) {
1391 let mut brace_depth = 0;
1392 let mut bracket_depth = 0;
1393 let mut in_block = false;
1394 debug!("recover_stmt_ enter loop (semi={:?}, block={:?})", break_on_semi, break_on_block);
1395 loop {
1396 debug!("recover_stmt_ loop {:?}", self.token);
1397 match self.token.kind {
1398 token::OpenDelim(token::DelimToken::Brace) => {
1399 brace_depth += 1;
1400 self.bump();
1401 if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0
1402 {
1403 in_block = true;
1404 }
1405 }
1406 token::OpenDelim(token::DelimToken::Bracket) => {
1407 bracket_depth += 1;
1408 self.bump();
1409 }
1410 token::CloseDelim(token::DelimToken::Brace) => {
1411 if brace_depth == 0 {
1412 debug!("recover_stmt_ return - close delim {:?}", self.token);
1413 break;
1414 }
1415 brace_depth -= 1;
1416 self.bump();
1417 if in_block && bracket_depth == 0 && brace_depth == 0 {
1418 debug!("recover_stmt_ return - block end {:?}", self.token);
1419 break;
1420 }
1421 }
1422 token::CloseDelim(token::DelimToken::Bracket) => {
1423 bracket_depth -= 1;
1424 if bracket_depth < 0 {
1425 bracket_depth = 0;
1426 }
1427 self.bump();
1428 }
1429 token::Eof => {
1430 debug!("recover_stmt_ return - Eof");
1431 break;
1432 }
1433 token::Semi => {
1434 self.bump();
1435 if break_on_semi == SemiColonMode::Break
1436 && brace_depth == 0
1437 && bracket_depth == 0
1438 {
1439 debug!("recover_stmt_ return - Semi");
1440 break;
1441 }
1442 }
1443 token::Comma
1444 if break_on_semi == SemiColonMode::Comma
1445 && brace_depth == 0
1446 && bracket_depth == 0 =>
1447 {
1448 debug!("recover_stmt_ return - Semi");
1449 break;
1450 }
1451 _ => self.bump(),
1452 }
1453 }
1454 }
1455
1456 pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
1457 if self.eat_keyword(kw::In) {
1458 // a common typo: `for _ in in bar {}`
1459 self.struct_span_err(self.prev_token.span, "expected iterable, found keyword `in`")
1460 .span_suggestion_short(
1461 in_span.until(self.prev_token.span),
1462 "remove the duplicated `in`",
1463 String::new(),
1464 Applicability::MachineApplicable,
1465 )
1466 .emit();
1467 }
1468 }
1469
1470 pub(super) fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> {
1471 let token_str = super::token_descr(&self.token);
1472 let msg = &format!("expected `;` or `{{`, found {}", token_str);
1473 let mut err = self.struct_span_err(self.token.span, msg);
1474 err.span_label(self.token.span, "expected `;` or `{`");
1475 Err(err)
1476 }
1477
1478 pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
1479 if let token::DocComment(..) = self.token.kind {
1480 self.struct_span_err(
1481 self.token.span,
1482 "documentation comments cannot be applied to a function parameter's type",
1483 )
1484 .span_label(self.token.span, "doc comments are not allowed here")
1485 .emit();
1486 self.bump();
1487 } else if self.token == token::Pound
1488 && self.look_ahead(1, |t| *t == token::OpenDelim(token::Bracket))
1489 {
1490 let lo = self.token.span;
1491 // Skip every token until next possible arg.
1492 while self.token != token::CloseDelim(token::Bracket) {
1493 self.bump();
1494 }
1495 let sp = lo.to(self.token.span);
1496 self.bump();
1497 self.struct_span_err(sp, "attributes cannot be applied to a function parameter's type")
1498 .span_label(sp, "attributes are not allowed here")
1499 .emit();
1500 }
1501 }
1502
1503 pub(super) fn parameter_without_type(
1504 &mut self,
1505 err: &mut DiagnosticBuilder<'_>,
1506 pat: P<ast::Pat>,
1507 require_name: bool,
1508 first_param: bool,
1509 ) -> Option<Ident> {
1510 // If we find a pattern followed by an identifier, it could be an (incorrect)
1511 // C-style parameter declaration.
1512 if self.check_ident()
1513 && self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseDelim(token::Paren))
1514 {
1515 // `fn foo(String s) {}`
1516 let ident = self.parse_ident().unwrap();
1517 let span = pat.span.with_hi(ident.span.hi());
1518
1519 err.span_suggestion(
1520 span,
1521 "declare the type after the parameter binding",
1522 String::from("<identifier>: <type>"),
1523 Applicability::HasPlaceholders,
1524 );
1525 return Some(ident);
1526 } else if let PatKind::Ident(_, ident, _) = pat.kind {
1527 if require_name
1528 && (self.token == token::Comma
1529 || self.token == token::Lt
1530 || self.token == token::CloseDelim(token::Paren))
1531 {
1532 // `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}`
1533 if first_param {
1534 err.span_suggestion(
1535 pat.span,
1536 "if this is a `self` type, give it a parameter name",
1537 format!("self: {}", ident),
1538 Applicability::MaybeIncorrect,
1539 );
1540 }
1541 // Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to
1542 // `fn foo(HashMap: TypeName<u32>)`.
1543 if self.token != token::Lt {
1544 err.span_suggestion(
1545 pat.span,
1546 "if this is a parameter name, give it a type",
1547 format!("{}: TypeName", ident),
1548 Applicability::HasPlaceholders,
1549 );
1550 }
1551 err.span_suggestion(
1552 pat.span,
1553 "if this is a type, explicitly ignore the parameter name",
1554 format!("_: {}", ident),
1555 Applicability::MachineApplicable,
1556 );
1557 err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)");
1558
1559 // Don't attempt to recover by using the `X` in `X<Y>` as the parameter name.
1560 return if self.token == token::Lt { None } else { Some(ident) };
1561 }
1562 }
1563 None
1564 }
1565
1566 pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
1567 let pat = self.parse_pat(Some("argument name"))?;
1568 self.expect(&token::Colon)?;
1569 let ty = self.parse_ty()?;
1570
1571 struct_span_err!(
1572 self.diagnostic(),
1573 pat.span,
1574 E0642,
1575 "patterns aren't allowed in methods without bodies",
1576 )
1577 .span_suggestion_short(
1578 pat.span,
1579 "give this argument a name or use an underscore to ignore it",
1580 "_".to_owned(),
1581 Applicability::MachineApplicable,
1582 )
1583 .emit();
1584
1585 // Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
1586 let pat =
1587 P(Pat { kind: PatKind::Wild, span: pat.span, id: ast::DUMMY_NODE_ID, tokens: None });
1588 Ok((pat, ty))
1589 }
1590
1591 pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param> {
1592 let sp = param.pat.span;
1593 param.ty.kind = TyKind::Err;
1594 self.struct_span_err(sp, "unexpected `self` parameter in function")
1595 .span_label(sp, "must be the first parameter of an associated function")
1596 .emit();
1597 Ok(param)
1598 }
1599
1600 pub(super) fn consume_block(
1601 &mut self,
1602 delim: token::DelimToken,
1603 consume_close: ConsumeClosingDelim,
1604 ) {
1605 let mut brace_depth = 0;
1606 loop {
1607 if self.eat(&token::OpenDelim(delim)) {
1608 brace_depth += 1;
1609 } else if self.check(&token::CloseDelim(delim)) {
1610 if brace_depth == 0 {
1611 if let ConsumeClosingDelim::Yes = consume_close {
1612 // Some of the callers of this method expect to be able to parse the
1613 // closing delimiter themselves, so we leave it alone. Otherwise we advance
1614 // the parser.
1615 self.bump();
1616 }
1617 return;
1618 } else {
1619 self.bump();
1620 brace_depth -= 1;
1621 continue;
1622 }
1623 } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
1624 return;
1625 } else {
1626 self.bump();
1627 }
1628 }
1629 }
1630
1631 pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
1632 let (span, msg) = match (&self.token.kind, self.subparser_name) {
1633 (&token::Eof, Some(origin)) => {
1634 let sp = self.sess.source_map().next_point(self.token.span);
1635 (sp, format!("expected expression, found end of {}", origin))
1636 }
1637 _ => (
1638 self.token.span,
1639 format!("expected expression, found {}", super::token_descr(&self.token),),
1640 ),
1641 };
1642 let mut err = self.struct_span_err(span, &msg);
1643 let sp = self.sess.source_map().start_point(self.token.span);
1644 if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
1645 self.sess.expr_parentheses_needed(&mut err, *sp, None);
1646 }
1647 err.span_label(span, "expected expression");
1648 err
1649 }
1650
1651 fn consume_tts(
1652 &mut self,
1653 mut acc: i64, // `i64` because malformed code can have more closing delims than opening.
1654 // Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`.
1655 modifier: &[(token::TokenKind, i64)],
1656 ) {
1657 while acc > 0 {
1658 if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) {
1659 acc += *val;
1660 }
1661 if self.token.kind == token::Eof {
1662 break;
1663 }
1664 self.bump();
1665 }
1666 }
1667
1668 /// Replace duplicated recovered parameters with `_` pattern to avoid unnecessary errors.
1669 ///
1670 /// This is necessary because at this point we don't know whether we parsed a function with
1671 /// anonymous parameters or a function with names but no types. In order to minimize
1672 /// unnecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where
1673 /// the parameters are *names* (so we don't emit errors about not being able to find `b` in
1674 /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
1675 /// we deduplicate them to not complain about duplicated parameter names.
1676 pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
1677 let mut seen_inputs = FxHashSet::default();
1678 for input in fn_inputs.iter_mut() {
1679 let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) =
1680 (&input.pat.kind, &input.ty.kind)
1681 {
1682 Some(*ident)
1683 } else {
1684 None
1685 };
1686 if let Some(ident) = opt_ident {
1687 if seen_inputs.contains(&ident) {
1688 input.pat.kind = PatKind::Wild;
1689 }
1690 seen_inputs.insert(ident);
1691 }
1692 }
1693 }
1694 }