]> git.proxmox.com Git - rustc.git/blob - src/librustc_parse/parser/diagnostics.rs
New upstream version 1.47.0+dfsg1
[rustc.git] / src / librustc_parse / parser / diagnostics.rs
1 use super::ty::AllowPlus;
2 use super::{BlockMode, Parser, PathStyle, SemiColonMode, SeqSep, TokenExpectType, TokenType};
3
4 use rustc_ast::ptr::P;
5 use rustc_ast::token::{self, Lit, LitKind, TokenKind};
6 use rustc_ast::util::parser::AssocOp;
7 use rustc_ast::{
8 self as ast, AngleBracketedArgs, AttrVec, BinOpKind, BindingMode, BlockCheckMode, Expr,
9 ExprKind, Item, ItemKind, Mutability, Param, Pat, PatKind, PathSegment, QSelf, Ty, TyKind,
10 };
11 use rustc_ast_pretty::pprust;
12 use rustc_data_structures::fx::FxHashSet;
13 use rustc_errors::{pluralize, struct_span_err};
14 use rustc_errors::{Applicability, DiagnosticBuilder, Handler, PResult};
15 use rustc_span::source_map::Spanned;
16 use rustc_span::symbol::{kw, Ident};
17 use rustc_span::{MultiSpan, Span, SpanSnippetError, DUMMY_SP};
18
19 use tracing::{debug, trace};
20
21 const TURBOFISH: &str = "use `::<...>` instead of `<...>` to specify type arguments";
22
23 /// Creates a placeholder argument.
24 pub(super) fn dummy_arg(ident: Ident) -> Param {
25 let pat = P(Pat {
26 id: ast::DUMMY_NODE_ID,
27 kind: PatKind::Ident(BindingMode::ByValue(Mutability::Not), ident, None),
28 span: ident.span,
29 tokens: None,
30 });
31 let ty = Ty { kind: TyKind::Err, span: ident.span, id: ast::DUMMY_NODE_ID };
32 Param {
33 attrs: AttrVec::default(),
34 id: ast::DUMMY_NODE_ID,
35 pat,
36 span: ident.span,
37 ty: P(ty),
38 is_placeholder: false,
39 }
40 }
41
42 pub enum Error {
43 UselessDocComment,
44 }
45
46 impl Error {
47 fn span_err(self, sp: impl Into<MultiSpan>, handler: &Handler) -> DiagnosticBuilder<'_> {
48 match self {
49 Error::UselessDocComment => {
50 let mut err = struct_span_err!(
51 handler,
52 sp,
53 E0585,
54 "found a documentation comment that doesn't document anything",
55 );
56 err.help(
57 "doc comments must come before what they document, maybe a comment was \
58 intended with `//`?",
59 );
60 err
61 }
62 }
63 }
64 }
65
66 pub(super) trait RecoverQPath: Sized + 'static {
67 const PATH_STYLE: PathStyle = PathStyle::Expr;
68 fn to_ty(&self) -> Option<P<Ty>>;
69 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
70 }
71
72 impl RecoverQPath for Ty {
73 const PATH_STYLE: PathStyle = PathStyle::Type;
74 fn to_ty(&self) -> Option<P<Ty>> {
75 Some(P(self.clone()))
76 }
77 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
78 Self { span: path.span, kind: TyKind::Path(qself, path), id: ast::DUMMY_NODE_ID }
79 }
80 }
81
82 impl RecoverQPath for Pat {
83 fn to_ty(&self) -> Option<P<Ty>> {
84 self.to_ty()
85 }
86 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
87 Self {
88 span: path.span,
89 kind: PatKind::Path(qself, path),
90 id: ast::DUMMY_NODE_ID,
91 tokens: None,
92 }
93 }
94 }
95
96 impl RecoverQPath for Expr {
97 fn to_ty(&self) -> Option<P<Ty>> {
98 self.to_ty()
99 }
100 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
101 Self {
102 span: path.span,
103 kind: ExprKind::Path(qself, path),
104 attrs: AttrVec::new(),
105 id: ast::DUMMY_NODE_ID,
106 tokens: None,
107 }
108 }
109 }
110
111 /// Control whether the closing delimiter should be consumed when calling `Parser::consume_block`.
112 crate enum ConsumeClosingDelim {
113 Yes,
114 No,
115 }
116
117 impl<'a> Parser<'a> {
118 pub(super) fn span_fatal_err<S: Into<MultiSpan>>(
119 &self,
120 sp: S,
121 err: Error,
122 ) -> DiagnosticBuilder<'a> {
123 err.span_err(sp, self.diagnostic())
124 }
125
126 pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
127 self.sess.span_diagnostic.struct_span_err(sp, m)
128 }
129
130 pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
131 self.sess.span_diagnostic.span_bug(sp, m)
132 }
133
134 pub(super) fn diagnostic(&self) -> &'a Handler {
135 &self.sess.span_diagnostic
136 }
137
138 pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
139 self.sess.source_map().span_to_snippet(span)
140 }
141
142 pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
143 let mut err = self.struct_span_err(
144 self.token.span,
145 &format!("expected identifier, found {}", super::token_descr(&self.token)),
146 );
147 let valid_follow = &[
148 TokenKind::Eq,
149 TokenKind::Colon,
150 TokenKind::Comma,
151 TokenKind::Semi,
152 TokenKind::ModSep,
153 TokenKind::OpenDelim(token::DelimToken::Brace),
154 TokenKind::OpenDelim(token::DelimToken::Paren),
155 TokenKind::CloseDelim(token::DelimToken::Brace),
156 TokenKind::CloseDelim(token::DelimToken::Paren),
157 ];
158 match self.token.ident() {
159 Some((ident, false))
160 if ident.is_raw_guess()
161 && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) =>
162 {
163 err.span_suggestion(
164 ident.span,
165 "you can escape reserved keywords to use them as identifiers",
166 format!("r#{}", ident.name),
167 Applicability::MaybeIncorrect,
168 );
169 }
170 _ => {}
171 }
172 if let Some(token_descr) = super::token_descr_opt(&self.token) {
173 err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
174 } else {
175 err.span_label(self.token.span, "expected identifier");
176 if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
177 err.span_suggestion(
178 self.token.span,
179 "remove this comma",
180 String::new(),
181 Applicability::MachineApplicable,
182 );
183 }
184 }
185 err
186 }
187
188 pub(super) fn expected_one_of_not_found(
189 &mut self,
190 edible: &[TokenKind],
191 inedible: &[TokenKind],
192 ) -> PResult<'a, bool /* recovered */> {
193 fn tokens_to_string(tokens: &[TokenType]) -> String {
194 let mut i = tokens.iter();
195 // This might be a sign we need a connect method on `Iterator`.
196 let b = i.next().map_or(String::new(), |t| t.to_string());
197 i.enumerate().fold(b, |mut b, (i, a)| {
198 if tokens.len() > 2 && i == tokens.len() - 2 {
199 b.push_str(", or ");
200 } else if tokens.len() == 2 && i == tokens.len() - 2 {
201 b.push_str(" or ");
202 } else {
203 b.push_str(", ");
204 }
205 b.push_str(&a.to_string());
206 b
207 })
208 }
209
210 let mut expected = edible
211 .iter()
212 .map(|x| TokenType::Token(x.clone()))
213 .chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
214 .chain(self.expected_tokens.iter().cloned())
215 .collect::<Vec<_>>();
216 expected.sort_by_cached_key(|x| x.to_string());
217 expected.dedup();
218 let expect = tokens_to_string(&expected[..]);
219 let actual = super::token_descr(&self.token);
220 let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
221 let short_expect = if expected.len() > 6 {
222 format!("{} possible tokens", expected.len())
223 } else {
224 expect.clone()
225 };
226 (
227 format!("expected one of {}, found {}", expect, actual),
228 (self.prev_token.span.shrink_to_hi(), format!("expected one of {}", short_expect)),
229 )
230 } else if expected.is_empty() {
231 (
232 format!("unexpected token: {}", actual),
233 (self.prev_token.span, "unexpected token after this".to_string()),
234 )
235 } else {
236 (
237 format!("expected {}, found {}", expect, actual),
238 (self.prev_token.span.shrink_to_hi(), format!("expected {}", expect)),
239 )
240 };
241 self.last_unexpected_token_span = Some(self.token.span);
242 let mut err = self.struct_span_err(self.token.span, &msg_exp);
243 let sp = if self.token == token::Eof {
244 // This is EOF; don't want to point at the following char, but rather the last token.
245 self.prev_token.span
246 } else {
247 label_sp
248 };
249 match self.recover_closing_delimiter(
250 &expected
251 .iter()
252 .filter_map(|tt| match tt {
253 TokenType::Token(t) => Some(t.clone()),
254 _ => None,
255 })
256 .collect::<Vec<_>>(),
257 err,
258 ) {
259 Err(e) => err = e,
260 Ok(recovered) => {
261 return Ok(recovered);
262 }
263 }
264
265 if self.check_too_many_raw_str_terminators(&mut err) {
266 return Err(err);
267 }
268
269 let sm = self.sess.source_map();
270 if self.prev_token.span == DUMMY_SP {
271 // Account for macro context where the previous span might not be
272 // available to avoid incorrect output (#54841).
273 err.span_label(self.token.span, label_exp);
274 } else if !sm.is_multiline(self.token.span.shrink_to_hi().until(sp.shrink_to_lo())) {
275 // When the spans are in the same line, it means that the only content between
276 // them is whitespace, point at the found token in that case:
277 //
278 // X | () => { syntax error };
279 // | ^^^^^ expected one of 8 possible tokens here
280 //
281 // instead of having:
282 //
283 // X | () => { syntax error };
284 // | -^^^^^ unexpected token
285 // | |
286 // | expected one of 8 possible tokens here
287 err.span_label(self.token.span, label_exp);
288 } else {
289 err.span_label(sp, label_exp);
290 err.span_label(self.token.span, "unexpected token");
291 }
292 self.maybe_annotate_with_ascription(&mut err, false);
293 Err(err)
294 }
295
296 fn check_too_many_raw_str_terminators(&mut self, err: &mut DiagnosticBuilder<'_>) -> bool {
297 match (&self.prev_token.kind, &self.token.kind) {
298 (
299 TokenKind::Literal(Lit {
300 kind: LitKind::StrRaw(n_hashes) | LitKind::ByteStrRaw(n_hashes),
301 ..
302 }),
303 TokenKind::Pound,
304 ) => {
305 err.set_primary_message("too many `#` when terminating raw string");
306 err.span_suggestion(
307 self.token.span,
308 "remove the extra `#`",
309 String::new(),
310 Applicability::MachineApplicable,
311 );
312 err.note(&format!("the raw string started with {} `#`s", n_hashes));
313 true
314 }
315 _ => false,
316 }
317 }
318
319 pub fn maybe_annotate_with_ascription(
320 &mut self,
321 err: &mut DiagnosticBuilder<'_>,
322 maybe_expected_semicolon: bool,
323 ) {
324 if let Some((sp, likely_path)) = self.last_type_ascription.take() {
325 let sm = self.sess.source_map();
326 let next_pos = sm.lookup_char_pos(self.token.span.lo());
327 let op_pos = sm.lookup_char_pos(sp.hi());
328
329 let allow_unstable = self.sess.unstable_features.is_nightly_build();
330
331 if likely_path {
332 err.span_suggestion(
333 sp,
334 "maybe write a path separator here",
335 "::".to_string(),
336 if allow_unstable {
337 Applicability::MaybeIncorrect
338 } else {
339 Applicability::MachineApplicable
340 },
341 );
342 self.sess.type_ascription_path_suggestions.borrow_mut().insert(sp);
343 } else if op_pos.line != next_pos.line && maybe_expected_semicolon {
344 err.span_suggestion(
345 sp,
346 "try using a semicolon",
347 ";".to_string(),
348 Applicability::MaybeIncorrect,
349 );
350 } else if allow_unstable {
351 err.span_label(sp, "tried to parse a type due to this type ascription");
352 } else {
353 err.span_label(sp, "tried to parse a type due to this");
354 }
355 if allow_unstable {
356 // Give extra information about type ascription only if it's a nightly compiler.
357 err.note(
358 "`#![feature(type_ascription)]` lets you annotate an expression with a type: \
359 `<expr>: <type>`",
360 );
361 if !likely_path {
362 // Avoid giving too much info when it was likely an unrelated typo.
363 err.note(
364 "see issue #23416 <https://github.com/rust-lang/rust/issues/23416> \
365 for more information",
366 );
367 }
368 }
369 }
370 }
371
372 /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
373 /// passes through any errors encountered. Used for error recovery.
374 pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
375 if let Err(ref mut err) =
376 self.parse_seq_to_before_tokens(kets, SeqSep::none(), TokenExpectType::Expect, |p| {
377 Ok(p.parse_token_tree())
378 })
379 {
380 err.cancel();
381 }
382 }
383
384 /// This function checks if there are trailing angle brackets and produces
385 /// a diagnostic to suggest removing them.
386 ///
387 /// ```ignore (diagnostic)
388 /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
389 /// ^^ help: remove extra angle brackets
390 /// ```
391 ///
392 /// If `true` is returned, then trailing brackets were recovered, tokens were consumed
393 /// up until one of the tokens in 'end' was encountered, and an error was emitted.
394 pub(super) fn check_trailing_angle_brackets(
395 &mut self,
396 segment: &PathSegment,
397 end: &[&TokenKind],
398 ) -> bool {
399 // This function is intended to be invoked after parsing a path segment where there are two
400 // cases:
401 //
402 // 1. A specific token is expected after the path segment.
403 // eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
404 // `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
405 // 2. No specific token is expected after the path segment.
406 // eg. `x.foo` (field access)
407 //
408 // This function is called after parsing `.foo` and before parsing the token `end` (if
409 // present). This includes any angle bracket arguments, such as `.foo::<u32>` or
410 // `Foo::<Bar>`.
411
412 // We only care about trailing angle brackets if we previously parsed angle bracket
413 // arguments. This helps stop us incorrectly suggesting that extra angle brackets be
414 // removed in this case:
415 //
416 // `x.foo >> (3)` (where `x.foo` is a `u32` for example)
417 //
418 // This case is particularly tricky as we won't notice it just looking at the tokens -
419 // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
420 // have already been parsed):
421 //
422 // `x.foo::<u32>>>(3)`
423 let parsed_angle_bracket_args =
424 segment.args.as_ref().map(|args| args.is_angle_bracketed()).unwrap_or(false);
425
426 debug!(
427 "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
428 parsed_angle_bracket_args,
429 );
430 if !parsed_angle_bracket_args {
431 return false;
432 }
433
434 // Keep the span at the start so we can highlight the sequence of `>` characters to be
435 // removed.
436 let lo = self.token.span;
437
438 // We need to look-ahead to see if we have `>` characters without moving the cursor forward
439 // (since we might have the field access case and the characters we're eating are
440 // actual operators and not trailing characters - ie `x.foo >> 3`).
441 let mut position = 0;
442
443 // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
444 // many of each (so we can correctly pluralize our error messages) and continue to
445 // advance.
446 let mut number_of_shr = 0;
447 let mut number_of_gt = 0;
448 while self.look_ahead(position, |t| {
449 trace!("check_trailing_angle_brackets: t={:?}", t);
450 if *t == token::BinOp(token::BinOpToken::Shr) {
451 number_of_shr += 1;
452 true
453 } else if *t == token::Gt {
454 number_of_gt += 1;
455 true
456 } else {
457 false
458 }
459 }) {
460 position += 1;
461 }
462
463 // If we didn't find any trailing `>` characters, then we have nothing to error about.
464 debug!(
465 "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
466 number_of_gt, number_of_shr,
467 );
468 if number_of_gt < 1 && number_of_shr < 1 {
469 return false;
470 }
471
472 // Finally, double check that we have our end token as otherwise this is the
473 // second case.
474 if self.look_ahead(position, |t| {
475 trace!("check_trailing_angle_brackets: t={:?}", t);
476 end.contains(&&t.kind)
477 }) {
478 // Eat from where we started until the end token so that parsing can continue
479 // as if we didn't have those extra angle brackets.
480 self.eat_to_tokens(end);
481 let span = lo.until(self.token.span);
482
483 let total_num_of_gt = number_of_gt + number_of_shr * 2;
484 self.struct_span_err(
485 span,
486 &format!("unmatched angle bracket{}", pluralize!(total_num_of_gt)),
487 )
488 .span_suggestion(
489 span,
490 &format!("remove extra angle bracket{}", pluralize!(total_num_of_gt)),
491 String::new(),
492 Applicability::MachineApplicable,
493 )
494 .emit();
495 return true;
496 }
497 false
498 }
499
500 /// Check if a method call with an intended turbofish has been written without surrounding
501 /// angle brackets.
502 pub(super) fn check_turbofish_missing_angle_brackets(&mut self, segment: &mut PathSegment) {
503 if token::ModSep == self.token.kind && segment.args.is_none() {
504 let snapshot = self.clone();
505 self.bump();
506 let lo = self.token.span;
507 match self.parse_angle_args() {
508 Ok(args) => {
509 let span = lo.to(self.prev_token.span);
510 // Detect trailing `>` like in `x.collect::Vec<_>>()`.
511 let mut trailing_span = self.prev_token.span.shrink_to_hi();
512 while self.token.kind == token::BinOp(token::Shr)
513 || self.token.kind == token::Gt
514 {
515 trailing_span = trailing_span.to(self.token.span);
516 self.bump();
517 }
518 if self.token.kind == token::OpenDelim(token::Paren) {
519 // Recover from bad turbofish: `foo.collect::Vec<_>()`.
520 let args = AngleBracketedArgs { args, span }.into();
521 segment.args = args;
522
523 self.struct_span_err(
524 span,
525 "generic parameters without surrounding angle brackets",
526 )
527 .multipart_suggestion(
528 "surround the type parameters with angle brackets",
529 vec![
530 (span.shrink_to_lo(), "<".to_string()),
531 (trailing_span, ">".to_string()),
532 ],
533 Applicability::MachineApplicable,
534 )
535 .emit();
536 } else {
537 // This doesn't look like an invalid turbofish, can't recover parse state.
538 *self = snapshot;
539 }
540 }
541 Err(mut err) => {
542 // We could't parse generic parameters, unlikely to be a turbofish. Rely on
543 // generic parse error instead.
544 err.cancel();
545 *self = snapshot;
546 }
547 }
548 }
549 }
550
551 /// Check to see if a pair of chained operators looks like an attempt at chained comparison,
552 /// e.g. `1 < x <= 3`. If so, suggest either splitting the comparison into two, or
553 /// parenthesising the leftmost comparison.
554 fn attempt_chained_comparison_suggestion(
555 &mut self,
556 err: &mut DiagnosticBuilder<'_>,
557 inner_op: &Expr,
558 outer_op: &Spanned<AssocOp>,
559 ) -> bool /* advanced the cursor */ {
560 if let ExprKind::Binary(op, ref l1, ref r1) = inner_op.kind {
561 if let ExprKind::Field(_, ident) = l1.kind {
562 if ident.as_str().parse::<i32>().is_err() && !matches!(r1.kind, ExprKind::Lit(_)) {
563 // The parser has encountered `foo.bar<baz`, the likelihood of the turbofish
564 // suggestion being the only one to apply is high.
565 return false;
566 }
567 }
568 let mut enclose = |left: Span, right: Span| {
569 err.multipart_suggestion(
570 "parenthesize the comparison",
571 vec![
572 (left.shrink_to_lo(), "(".to_string()),
573 (right.shrink_to_hi(), ")".to_string()),
574 ],
575 Applicability::MaybeIncorrect,
576 );
577 };
578 return match (op.node, &outer_op.node) {
579 // `x == y == z`
580 (BinOpKind::Eq, AssocOp::Equal) |
581 // `x < y < z` and friends.
582 (BinOpKind::Lt, AssocOp::Less | AssocOp::LessEqual) |
583 (BinOpKind::Le, AssocOp::LessEqual | AssocOp::Less) |
584 // `x > y > z` and friends.
585 (BinOpKind::Gt, AssocOp::Greater | AssocOp::GreaterEqual) |
586 (BinOpKind::Ge, AssocOp::GreaterEqual | AssocOp::Greater) => {
587 let expr_to_str = |e: &Expr| {
588 self.span_to_snippet(e.span)
589 .unwrap_or_else(|_| pprust::expr_to_string(&e))
590 };
591 err.span_suggestion_verbose(
592 inner_op.span.shrink_to_hi(),
593 "split the comparison into two",
594 format!(" && {}", expr_to_str(&r1)),
595 Applicability::MaybeIncorrect,
596 );
597 false // Keep the current parse behavior, where the AST is `(x < y) < z`.
598 }
599 // `x == y < z`
600 (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => {
601 // Consume `z`/outer-op-rhs.
602 let snapshot = self.clone();
603 match self.parse_expr() {
604 Ok(r2) => {
605 // We are sure that outer-op-rhs could be consumed, the suggestion is
606 // likely correct.
607 enclose(r1.span, r2.span);
608 true
609 }
610 Err(mut expr_err) => {
611 expr_err.cancel();
612 *self = snapshot;
613 false
614 }
615 }
616 }
617 // `x > y == z`
618 (BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge, AssocOp::Equal) => {
619 let snapshot = self.clone();
620 // At this point it is always valid to enclose the lhs in parentheses, no
621 // further checks are necessary.
622 match self.parse_expr() {
623 Ok(_) => {
624 enclose(l1.span, r1.span);
625 true
626 }
627 Err(mut expr_err) => {
628 expr_err.cancel();
629 *self = snapshot;
630 false
631 }
632 }
633 }
634 _ => false,
635 };
636 }
637 false
638 }
639
640 /// Produces an error if comparison operators are chained (RFC #558).
641 /// We only need to check the LHS, not the RHS, because all comparison ops have same
642 /// precedence (see `fn precedence`) and are left-associative (see `fn fixity`).
643 ///
644 /// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used
645 /// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the
646 /// case.
647 ///
648 /// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left
649 /// associative we can infer that we have:
650 ///
651 /// ```text
652 /// outer_op
653 /// / \
654 /// inner_op r2
655 /// / \
656 /// l1 r1
657 /// ```
658 pub(super) fn check_no_chained_comparison(
659 &mut self,
660 inner_op: &Expr,
661 outer_op: &Spanned<AssocOp>,
662 ) -> PResult<'a, Option<P<Expr>>> {
663 debug_assert!(
664 outer_op.node.is_comparison(),
665 "check_no_chained_comparison: {:?} is not comparison",
666 outer_op.node,
667 );
668
669 let mk_err_expr =
670 |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err, AttrVec::new())));
671
672 match inner_op.kind {
673 ExprKind::Binary(op, ref l1, ref r1) if op.node.is_comparison() => {
674 let mut err = self.struct_span_err(
675 vec![op.span, self.prev_token.span],
676 "comparison operators cannot be chained",
677 );
678
679 let suggest = |err: &mut DiagnosticBuilder<'_>| {
680 err.span_suggestion_verbose(
681 op.span.shrink_to_lo(),
682 TURBOFISH,
683 "::".to_string(),
684 Applicability::MaybeIncorrect,
685 );
686 };
687
688 // Include `<` to provide this recommendation even in a case like
689 // `Foo<Bar<Baz<Qux, ()>>>`
690 if op.node == BinOpKind::Lt && outer_op.node == AssocOp::Less
691 || outer_op.node == AssocOp::Greater
692 {
693 if outer_op.node == AssocOp::Less {
694 let snapshot = self.clone();
695 self.bump();
696 // So far we have parsed `foo<bar<`, consume the rest of the type args.
697 let modifiers =
698 [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
699 self.consume_tts(1, &modifiers[..]);
700
701 if !&[token::OpenDelim(token::Paren), token::ModSep]
702 .contains(&self.token.kind)
703 {
704 // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
705 // parser and bail out.
706 *self = snapshot.clone();
707 }
708 }
709 return if token::ModSep == self.token.kind {
710 // We have some certainty that this was a bad turbofish at this point.
711 // `foo< bar >::`
712 suggest(&mut err);
713
714 let snapshot = self.clone();
715 self.bump(); // `::`
716
717 // Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`.
718 match self.parse_expr() {
719 Ok(_) => {
720 // 99% certain that the suggestion is correct, continue parsing.
721 err.emit();
722 // FIXME: actually check that the two expressions in the binop are
723 // paths and resynthesize new fn call expression instead of using
724 // `ExprKind::Err` placeholder.
725 mk_err_expr(self, inner_op.span.to(self.prev_token.span))
726 }
727 Err(mut expr_err) => {
728 expr_err.cancel();
729 // Not entirely sure now, but we bubble the error up with the
730 // suggestion.
731 *self = snapshot;
732 Err(err)
733 }
734 }
735 } else if token::OpenDelim(token::Paren) == self.token.kind {
736 // We have high certainty that this was a bad turbofish at this point.
737 // `foo< bar >(`
738 suggest(&mut err);
739 // Consume the fn call arguments.
740 match self.consume_fn_args() {
741 Err(()) => Err(err),
742 Ok(()) => {
743 err.emit();
744 // FIXME: actually check that the two expressions in the binop are
745 // paths and resynthesize new fn call expression instead of using
746 // `ExprKind::Err` placeholder.
747 mk_err_expr(self, inner_op.span.to(self.prev_token.span))
748 }
749 }
750 } else {
751 if !matches!(l1.kind, ExprKind::Lit(_))
752 && !matches!(r1.kind, ExprKind::Lit(_))
753 {
754 // All we know is that this is `foo < bar >` and *nothing* else. Try to
755 // be helpful, but don't attempt to recover.
756 err.help(TURBOFISH);
757 err.help("or use `(...)` if you meant to specify fn arguments");
758 }
759
760 // If it looks like a genuine attempt to chain operators (as opposed to a
761 // misformatted turbofish, for instance), suggest a correct form.
762 if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op)
763 {
764 err.emit();
765 mk_err_expr(self, inner_op.span.to(self.prev_token.span))
766 } else {
767 // These cases cause too many knock-down errors, bail out (#61329).
768 Err(err)
769 }
770 };
771 }
772 let recover =
773 self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
774 err.emit();
775 if recover {
776 return mk_err_expr(self, inner_op.span.to(self.prev_token.span));
777 }
778 }
779 _ => {}
780 }
781 Ok(None)
782 }
783
784 fn consume_fn_args(&mut self) -> Result<(), ()> {
785 let snapshot = self.clone();
786 self.bump(); // `(`
787
788 // Consume the fn call arguments.
789 let modifiers =
790 [(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)];
791 self.consume_tts(1, &modifiers[..]);
792
793 if self.token.kind == token::Eof {
794 // Not entirely sure that what we consumed were fn arguments, rollback.
795 *self = snapshot;
796 Err(())
797 } else {
798 // 99% certain that the suggestion is correct, continue parsing.
799 Ok(())
800 }
801 }
802
803 pub(super) fn maybe_report_ambiguous_plus(
804 &mut self,
805 allow_plus: AllowPlus,
806 impl_dyn_multi: bool,
807 ty: &Ty,
808 ) {
809 if matches!(allow_plus, AllowPlus::No) && impl_dyn_multi {
810 let sum_with_parens = format!("({})", pprust::ty_to_string(&ty));
811 self.struct_span_err(ty.span, "ambiguous `+` in a type")
812 .span_suggestion(
813 ty.span,
814 "use parentheses to disambiguate",
815 sum_with_parens,
816 Applicability::MachineApplicable,
817 )
818 .emit();
819 }
820 }
821
822 pub(super) fn maybe_recover_from_bad_type_plus(
823 &mut self,
824 allow_plus: AllowPlus,
825 ty: &Ty,
826 ) -> PResult<'a, ()> {
827 // Do not add `+` to expected tokens.
828 if matches!(allow_plus, AllowPlus::No) || !self.token.is_like_plus() {
829 return Ok(());
830 }
831
832 self.bump(); // `+`
833 let bounds = self.parse_generic_bounds(None)?;
834 let sum_span = ty.span.to(self.prev_token.span);
835
836 let mut err = struct_span_err!(
837 self.sess.span_diagnostic,
838 sum_span,
839 E0178,
840 "expected a path on the left-hand side of `+`, not `{}`",
841 pprust::ty_to_string(ty)
842 );
843
844 match ty.kind {
845 TyKind::Rptr(ref lifetime, ref mut_ty) => {
846 let sum_with_parens = pprust::to_string(|s| {
847 s.s.word("&");
848 s.print_opt_lifetime(lifetime);
849 s.print_mutability(mut_ty.mutbl, false);
850 s.popen();
851 s.print_type(&mut_ty.ty);
852 s.print_type_bounds(" +", &bounds);
853 s.pclose()
854 });
855 err.span_suggestion(
856 sum_span,
857 "try adding parentheses",
858 sum_with_parens,
859 Applicability::MachineApplicable,
860 );
861 }
862 TyKind::Ptr(..) | TyKind::BareFn(..) => {
863 err.span_label(sum_span, "perhaps you forgot parentheses?");
864 }
865 _ => {
866 err.span_label(sum_span, "expected a path");
867 }
868 }
869 err.emit();
870 Ok(())
871 }
872
873 /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
874 /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
875 /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
876 pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
877 &mut self,
878 base: P<T>,
879 allow_recovery: bool,
880 ) -> PResult<'a, P<T>> {
881 // Do not add `::` to expected tokens.
882 if allow_recovery && self.token == token::ModSep {
883 if let Some(ty) = base.to_ty() {
884 return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
885 }
886 }
887 Ok(base)
888 }
889
890 /// Given an already parsed `Ty`, parses the `::AssocItem` tail and
891 /// combines them into a `<Ty>::AssocItem` expression/pattern/type.
892 pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
893 &mut self,
894 ty_span: Span,
895 ty: P<Ty>,
896 ) -> PResult<'a, P<T>> {
897 self.expect(&token::ModSep)?;
898
899 let mut path = ast::Path { segments: Vec::new(), span: DUMMY_SP };
900 self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
901 path.span = ty_span.to(self.prev_token.span);
902
903 let ty_str = self.span_to_snippet(ty_span).unwrap_or_else(|_| pprust::ty_to_string(&ty));
904 self.struct_span_err(path.span, "missing angle brackets in associated item path")
905 .span_suggestion(
906 // This is a best-effort recovery.
907 path.span,
908 "try",
909 format!("<{}>::{}", ty_str, pprust::path_to_string(&path)),
910 Applicability::MaybeIncorrect,
911 )
912 .emit();
913
914 let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
915 Ok(P(T::recovered(Some(QSelf { ty, path_span, position: 0 }), path)))
916 }
917
918 pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
919 if self.eat(&token::Semi) {
920 let mut err = self.struct_span_err(self.prev_token.span, "expected item, found `;`");
921 err.span_suggestion_short(
922 self.prev_token.span,
923 "remove this semicolon",
924 String::new(),
925 Applicability::MachineApplicable,
926 );
927 if !items.is_empty() {
928 let previous_item = &items[items.len() - 1];
929 let previous_item_kind_name = match previous_item.kind {
930 // Say "braced struct" because tuple-structs and
931 // braceless-empty-struct declarations do take a semicolon.
932 ItemKind::Struct(..) => Some("braced struct"),
933 ItemKind::Enum(..) => Some("enum"),
934 ItemKind::Trait(..) => Some("trait"),
935 ItemKind::Union(..) => Some("union"),
936 _ => None,
937 };
938 if let Some(name) = previous_item_kind_name {
939 err.help(&format!("{} declarations are not followed by a semicolon", name));
940 }
941 }
942 err.emit();
943 true
944 } else {
945 false
946 }
947 }
948
949 /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
950 /// closing delimiter.
951 pub(super) fn unexpected_try_recover(
952 &mut self,
953 t: &TokenKind,
954 ) -> PResult<'a, bool /* recovered */> {
955 let token_str = pprust::token_kind_to_string(t);
956 let this_token_str = super::token_descr(&self.token);
957 let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
958 // Point at the end of the macro call when reaching end of macro arguments.
959 (token::Eof, Some(_)) => {
960 let sp = self.sess.source_map().next_point(self.token.span);
961 (sp, sp)
962 }
963 // We don't want to point at the following span after DUMMY_SP.
964 // This happens when the parser finds an empty TokenStream.
965 _ if self.prev_token.span == DUMMY_SP => (self.token.span, self.token.span),
966 // EOF, don't want to point at the following char, but rather the last token.
967 (token::Eof, None) => (self.prev_token.span, self.token.span),
968 _ => (self.prev_token.span.shrink_to_hi(), self.token.span),
969 };
970 let msg = format!(
971 "expected `{}`, found {}",
972 token_str,
973 match (&self.token.kind, self.subparser_name) {
974 (token::Eof, Some(origin)) => format!("end of {}", origin),
975 _ => this_token_str,
976 },
977 );
978 let mut err = self.struct_span_err(sp, &msg);
979 let label_exp = format!("expected `{}`", token_str);
980 match self.recover_closing_delimiter(&[t.clone()], err) {
981 Err(e) => err = e,
982 Ok(recovered) => {
983 return Ok(recovered);
984 }
985 }
986 let sm = self.sess.source_map();
987 if !sm.is_multiline(prev_sp.until(sp)) {
988 // When the spans are in the same line, it means that the only content
989 // between them is whitespace, point only at the found token.
990 err.span_label(sp, label_exp);
991 } else {
992 err.span_label(prev_sp, label_exp);
993 err.span_label(sp, "unexpected token");
994 }
995 Err(err)
996 }
997
998 pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> {
999 if self.eat(&token::Semi) {
1000 return Ok(());
1001 }
1002 let sm = self.sess.source_map();
1003 let msg = format!("expected `;`, found {}", super::token_descr(&self.token));
1004 let appl = Applicability::MachineApplicable;
1005 if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
1006 // Likely inside a macro, can't provide meaningful suggestions.
1007 return self.expect(&token::Semi).map(drop);
1008 } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
1009 // The current token is in the same line as the prior token, not recoverable.
1010 } else if [token::Comma, token::Colon].contains(&self.token.kind)
1011 && self.prev_token.kind == token::CloseDelim(token::Paren)
1012 {
1013 // Likely typo: The current token is on a new line and is expected to be
1014 // `.`, `;`, `?`, or an operator after a close delimiter token.
1015 //
1016 // let a = std::process::Command::new("echo")
1017 // .arg("1")
1018 // ,arg("2")
1019 // ^
1020 // https://github.com/rust-lang/rust/issues/72253
1021 self.expect(&token::Semi)?;
1022 return Ok(());
1023 } else if self.look_ahead(1, |t| {
1024 t == &token::CloseDelim(token::Brace) || t.can_begin_expr() && t.kind != token::Colon
1025 }) && [token::Comma, token::Colon].contains(&self.token.kind)
1026 {
1027 // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
1028 // either `,` or `:`, and the next token could either start a new statement or is a
1029 // block close. For example:
1030 //
1031 // let x = 32:
1032 // let y = 42;
1033 self.bump();
1034 let sp = self.prev_token.span;
1035 self.struct_span_err(sp, &msg)
1036 .span_suggestion_short(sp, "change this to `;`", ";".to_string(), appl)
1037 .emit();
1038 return Ok(());
1039 } else if self.look_ahead(0, |t| {
1040 t == &token::CloseDelim(token::Brace)
1041 || (
1042 t.can_begin_expr() && t != &token::Semi && t != &token::Pound
1043 // Avoid triggering with too many trailing `#` in raw string.
1044 )
1045 }) {
1046 // Missing semicolon typo. This is triggered if the next token could either start a
1047 // new statement or is a block close. For example:
1048 //
1049 // let x = 32
1050 // let y = 42;
1051 let sp = self.prev_token.span.shrink_to_hi();
1052 self.struct_span_err(sp, &msg)
1053 .span_label(self.token.span, "unexpected token")
1054 .span_suggestion_short(sp, "add `;` here", ";".to_string(), appl)
1055 .emit();
1056 return Ok(());
1057 }
1058 self.expect(&token::Semi).map(drop) // Error unconditionally
1059 }
1060
1061 /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
1062 /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
1063 pub(super) fn recover_incorrect_await_syntax(
1064 &mut self,
1065 lo: Span,
1066 await_sp: Span,
1067 attrs: AttrVec,
1068 ) -> PResult<'a, P<Expr>> {
1069 let (hi, expr, is_question) = if self.token == token::Not {
1070 // Handle `await!(<expr>)`.
1071 self.recover_await_macro()?
1072 } else {
1073 self.recover_await_prefix(await_sp)?
1074 };
1075 let sp = self.error_on_incorrect_await(lo, hi, &expr, is_question);
1076 let expr = self.mk_expr(lo.to(sp), ExprKind::Await(expr), attrs);
1077 self.maybe_recover_from_bad_qpath(expr, true)
1078 }
1079
1080 fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
1081 self.expect(&token::Not)?;
1082 self.expect(&token::OpenDelim(token::Paren))?;
1083 let expr = self.parse_expr()?;
1084 self.expect(&token::CloseDelim(token::Paren))?;
1085 Ok((self.prev_token.span, expr, false))
1086 }
1087
1088 fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
1089 let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
1090 let expr = if self.token == token::OpenDelim(token::Brace) {
1091 // Handle `await { <expr> }`.
1092 // This needs to be handled separatedly from the next arm to avoid
1093 // interpreting `await { <expr> }?` as `<expr>?.await`.
1094 self.parse_block_expr(None, self.token.span, BlockCheckMode::Default, AttrVec::new())
1095 } else {
1096 self.parse_expr()
1097 }
1098 .map_err(|mut err| {
1099 err.span_label(await_sp, "while parsing this incorrect await expression");
1100 err
1101 })?;
1102 Ok((expr.span, expr, is_question))
1103 }
1104
1105 fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span {
1106 let expr_str =
1107 self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(&expr));
1108 let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
1109 let sp = lo.to(hi);
1110 let app = match expr.kind {
1111 ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?`
1112 _ => Applicability::MachineApplicable,
1113 };
1114 self.struct_span_err(sp, "incorrect use of `await`")
1115 .span_suggestion(sp, "`await` is a postfix operation", suggestion, app)
1116 .emit();
1117 sp
1118 }
1119
1120 /// If encountering `future.await()`, consumes and emits an error.
1121 pub(super) fn recover_from_await_method_call(&mut self) {
1122 if self.token == token::OpenDelim(token::Paren)
1123 && self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
1124 {
1125 // future.await()
1126 let lo = self.token.span;
1127 self.bump(); // (
1128 let sp = lo.to(self.token.span);
1129 self.bump(); // )
1130 self.struct_span_err(sp, "incorrect use of `await`")
1131 .span_suggestion(
1132 sp,
1133 "`await` is not a method call, remove the parentheses",
1134 String::new(),
1135 Applicability::MachineApplicable,
1136 )
1137 .emit();
1138 }
1139 }
1140
1141 pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
1142 let is_try = self.token.is_keyword(kw::Try);
1143 let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for !
1144 let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(token::Paren)); //check for (
1145
1146 if is_try && is_questionmark && is_open {
1147 let lo = self.token.span;
1148 self.bump(); //remove try
1149 self.bump(); //remove !
1150 let try_span = lo.to(self.token.span); //we take the try!( span
1151 self.bump(); //remove (
1152 let is_empty = self.token == token::CloseDelim(token::Paren); //check if the block is empty
1153 self.consume_block(token::Paren, ConsumeClosingDelim::No); //eat the block
1154 let hi = self.token.span;
1155 self.bump(); //remove )
1156 let mut err = self.struct_span_err(lo.to(hi), "use of deprecated `try` macro");
1157 err.note("in the 2018 edition `try` is a reserved keyword, and the `try!()` macro is deprecated");
1158 let prefix = if is_empty { "" } else { "alternatively, " };
1159 if !is_empty {
1160 err.multipart_suggestion(
1161 "you can use the `?` operator instead",
1162 vec![(try_span, "".to_owned()), (hi, "?".to_owned())],
1163 Applicability::MachineApplicable,
1164 );
1165 }
1166 err.span_suggestion(lo.shrink_to_lo(), &format!("{}you can still access the deprecated `try!()` macro using the \"raw identifier\" syntax", prefix), "r#".to_string(), Applicability::MachineApplicable);
1167 err.emit();
1168 Ok(self.mk_expr_err(lo.to(hi)))
1169 } else {
1170 Err(self.expected_expression_found()) // The user isn't trying to invoke the try! macro
1171 }
1172 }
1173
1174 /// Recovers a situation like `for ( $pat in $expr )`
1175 /// and suggest writing `for $pat in $expr` instead.
1176 ///
1177 /// This should be called before parsing the `$block`.
1178 pub(super) fn recover_parens_around_for_head(
1179 &mut self,
1180 pat: P<Pat>,
1181 expr: &Expr,
1182 begin_paren: Option<Span>,
1183 ) -> P<Pat> {
1184 match (&self.token.kind, begin_paren) {
1185 (token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
1186 self.bump();
1187
1188 let pat_str = self
1189 // Remove the `(` from the span of the pattern:
1190 .span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap())
1191 .unwrap_or_else(|_| pprust::pat_to_string(&pat));
1192
1193 self.struct_span_err(self.prev_token.span, "unexpected closing `)`")
1194 .span_label(begin_par_sp, "opening `(`")
1195 .span_suggestion(
1196 begin_par_sp.to(self.prev_token.span),
1197 "remove parenthesis in `for` loop",
1198 format!("{} in {}", pat_str, pprust::expr_to_string(&expr)),
1199 // With e.g. `for (x) in y)` this would replace `(x) in y)`
1200 // with `x) in y)` which is syntactically invalid.
1201 // However, this is prevented before we get here.
1202 Applicability::MachineApplicable,
1203 )
1204 .emit();
1205
1206 // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
1207 pat.and_then(|pat| match pat.kind {
1208 PatKind::Paren(pat) => pat,
1209 _ => P(pat),
1210 })
1211 }
1212 _ => pat,
1213 }
1214 }
1215
1216 pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
1217 (self.token == token::Lt && // `foo:<bar`, likely a typoed turbofish.
1218 self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()))
1219 || self.token.is_ident() &&
1220 match node {
1221 // `foo::` → `foo:` or `foo.bar::` → `foo.bar:`
1222 ast::ExprKind::Path(..) | ast::ExprKind::Field(..) => true,
1223 _ => false,
1224 } &&
1225 !self.token.is_reserved_ident() && // v `foo:bar(baz)`
1226 self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren))
1227 || self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) // `foo:bar {`
1228 || self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz`
1229 self.look_ahead(2, |t| t == &token::Lt) &&
1230 self.look_ahead(3, |t| t.is_ident())
1231 || self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz`
1232 self.look_ahead(2, |t| t.is_ident())
1233 || self.look_ahead(1, |t| t == &token::ModSep)
1234 && (self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz`
1235 self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>`
1236 }
1237
1238 pub(super) fn recover_seq_parse_error(
1239 &mut self,
1240 delim: token::DelimToken,
1241 lo: Span,
1242 result: PResult<'a, P<Expr>>,
1243 ) -> P<Expr> {
1244 match result {
1245 Ok(x) => x,
1246 Err(mut err) => {
1247 err.emit();
1248 // Recover from parse error, callers expect the closing delim to be consumed.
1249 self.consume_block(delim, ConsumeClosingDelim::Yes);
1250 self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err, AttrVec::new())
1251 }
1252 }
1253 }
1254
1255 pub(super) fn recover_closing_delimiter(
1256 &mut self,
1257 tokens: &[TokenKind],
1258 mut err: DiagnosticBuilder<'a>,
1259 ) -> PResult<'a, bool> {
1260 let mut pos = None;
1261 // We want to use the last closing delim that would apply.
1262 for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
1263 if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
1264 && Some(self.token.span) > unmatched.unclosed_span
1265 {
1266 pos = Some(i);
1267 }
1268 }
1269 match pos {
1270 Some(pos) => {
1271 // Recover and assume that the detected unclosed delimiter was meant for
1272 // this location. Emit the diagnostic and act as if the delimiter was
1273 // present for the parser's sake.
1274
1275 // Don't attempt to recover from this unclosed delimiter more than once.
1276 let unmatched = self.unclosed_delims.remove(pos);
1277 let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
1278 if unmatched.found_delim.is_none() {
1279 // We encountered `Eof`, set this fact here to avoid complaining about missing
1280 // `fn main()` when we found place to suggest the closing brace.
1281 *self.sess.reached_eof.borrow_mut() = true;
1282 }
1283
1284 // We want to suggest the inclusion of the closing delimiter where it makes
1285 // the most sense, which is immediately after the last token:
1286 //
1287 // {foo(bar {}}
1288 // - ^
1289 // | |
1290 // | help: `)` may belong here
1291 // |
1292 // unclosed delimiter
1293 if let Some(sp) = unmatched.unclosed_span {
1294 err.span_label(sp, "unclosed delimiter");
1295 }
1296 // Backticks should be removed to apply suggestions.
1297 let mut delim = delim.to_string();
1298 delim.retain(|c| c != '`');
1299 err.span_suggestion_short(
1300 self.prev_token.span.shrink_to_hi(),
1301 &format!("`{}` may belong here", delim),
1302 delim,
1303 Applicability::MaybeIncorrect,
1304 );
1305 if unmatched.found_delim.is_none() {
1306 // Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown
1307 // errors which would be emitted elsewhere in the parser and let other error
1308 // recovery consume the rest of the file.
1309 Err(err)
1310 } else {
1311 err.emit();
1312 self.expected_tokens.clear(); // Reduce the number of errors.
1313 Ok(true)
1314 }
1315 }
1316 _ => Err(err),
1317 }
1318 }
1319
1320 /// Eats tokens until we can be relatively sure we reached the end of the
1321 /// statement. This is something of a best-effort heuristic.
1322 ///
1323 /// We terminate when we find an unmatched `}` (without consuming it).
1324 pub(super) fn recover_stmt(&mut self) {
1325 self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
1326 }
1327
1328 /// If `break_on_semi` is `Break`, then we will stop consuming tokens after
1329 /// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
1330 /// approximate -- it can mean we break too early due to macros, but that
1331 /// should only lead to sub-optimal recovery, not inaccurate parsing).
1332 ///
1333 /// If `break_on_block` is `Break`, then we will stop consuming tokens
1334 /// after finding (and consuming) a brace-delimited block.
1335 pub(super) fn recover_stmt_(
1336 &mut self,
1337 break_on_semi: SemiColonMode,
1338 break_on_block: BlockMode,
1339 ) {
1340 let mut brace_depth = 0;
1341 let mut bracket_depth = 0;
1342 let mut in_block = false;
1343 debug!("recover_stmt_ enter loop (semi={:?}, block={:?})", break_on_semi, break_on_block);
1344 loop {
1345 debug!("recover_stmt_ loop {:?}", self.token);
1346 match self.token.kind {
1347 token::OpenDelim(token::DelimToken::Brace) => {
1348 brace_depth += 1;
1349 self.bump();
1350 if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0
1351 {
1352 in_block = true;
1353 }
1354 }
1355 token::OpenDelim(token::DelimToken::Bracket) => {
1356 bracket_depth += 1;
1357 self.bump();
1358 }
1359 token::CloseDelim(token::DelimToken::Brace) => {
1360 if brace_depth == 0 {
1361 debug!("recover_stmt_ return - close delim {:?}", self.token);
1362 break;
1363 }
1364 brace_depth -= 1;
1365 self.bump();
1366 if in_block && bracket_depth == 0 && brace_depth == 0 {
1367 debug!("recover_stmt_ return - block end {:?}", self.token);
1368 break;
1369 }
1370 }
1371 token::CloseDelim(token::DelimToken::Bracket) => {
1372 bracket_depth -= 1;
1373 if bracket_depth < 0 {
1374 bracket_depth = 0;
1375 }
1376 self.bump();
1377 }
1378 token::Eof => {
1379 debug!("recover_stmt_ return - Eof");
1380 break;
1381 }
1382 token::Semi => {
1383 self.bump();
1384 if break_on_semi == SemiColonMode::Break
1385 && brace_depth == 0
1386 && bracket_depth == 0
1387 {
1388 debug!("recover_stmt_ return - Semi");
1389 break;
1390 }
1391 }
1392 token::Comma
1393 if break_on_semi == SemiColonMode::Comma
1394 && brace_depth == 0
1395 && bracket_depth == 0 =>
1396 {
1397 debug!("recover_stmt_ return - Semi");
1398 break;
1399 }
1400 _ => self.bump(),
1401 }
1402 }
1403 }
1404
1405 pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
1406 if self.eat_keyword(kw::In) {
1407 // a common typo: `for _ in in bar {}`
1408 self.struct_span_err(self.prev_token.span, "expected iterable, found keyword `in`")
1409 .span_suggestion_short(
1410 in_span.until(self.prev_token.span),
1411 "remove the duplicated `in`",
1412 String::new(),
1413 Applicability::MachineApplicable,
1414 )
1415 .emit();
1416 }
1417 }
1418
1419 pub(super) fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> {
1420 let token_str = super::token_descr(&self.token);
1421 let msg = &format!("expected `;` or `{{`, found {}", token_str);
1422 let mut err = self.struct_span_err(self.token.span, msg);
1423 err.span_label(self.token.span, "expected `;` or `{`");
1424 Err(err)
1425 }
1426
1427 pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
1428 if let token::DocComment(..) = self.token.kind {
1429 self.struct_span_err(
1430 self.token.span,
1431 "documentation comments cannot be applied to a function parameter's type",
1432 )
1433 .span_label(self.token.span, "doc comments are not allowed here")
1434 .emit();
1435 self.bump();
1436 } else if self.token == token::Pound
1437 && self.look_ahead(1, |t| *t == token::OpenDelim(token::Bracket))
1438 {
1439 let lo = self.token.span;
1440 // Skip every token until next possible arg.
1441 while self.token != token::CloseDelim(token::Bracket) {
1442 self.bump();
1443 }
1444 let sp = lo.to(self.token.span);
1445 self.bump();
1446 self.struct_span_err(sp, "attributes cannot be applied to a function parameter's type")
1447 .span_label(sp, "attributes are not allowed here")
1448 .emit();
1449 }
1450 }
1451
1452 pub(super) fn parameter_without_type(
1453 &mut self,
1454 err: &mut DiagnosticBuilder<'_>,
1455 pat: P<ast::Pat>,
1456 require_name: bool,
1457 first_param: bool,
1458 ) -> Option<Ident> {
1459 // If we find a pattern followed by an identifier, it could be an (incorrect)
1460 // C-style parameter declaration.
1461 if self.check_ident()
1462 && self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseDelim(token::Paren))
1463 {
1464 // `fn foo(String s) {}`
1465 let ident = self.parse_ident().unwrap();
1466 let span = pat.span.with_hi(ident.span.hi());
1467
1468 err.span_suggestion(
1469 span,
1470 "declare the type after the parameter binding",
1471 String::from("<identifier>: <type>"),
1472 Applicability::HasPlaceholders,
1473 );
1474 return Some(ident);
1475 } else if let PatKind::Ident(_, ident, _) = pat.kind {
1476 if require_name
1477 && (self.token == token::Comma
1478 || self.token == token::Lt
1479 || self.token == token::CloseDelim(token::Paren))
1480 {
1481 // `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}`
1482 if first_param {
1483 err.span_suggestion(
1484 pat.span,
1485 "if this is a `self` type, give it a parameter name",
1486 format!("self: {}", ident),
1487 Applicability::MaybeIncorrect,
1488 );
1489 }
1490 // Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to
1491 // `fn foo(HashMap: TypeName<u32>)`.
1492 if self.token != token::Lt {
1493 err.span_suggestion(
1494 pat.span,
1495 "if this is a parameter name, give it a type",
1496 format!("{}: TypeName", ident),
1497 Applicability::HasPlaceholders,
1498 );
1499 }
1500 err.span_suggestion(
1501 pat.span,
1502 "if this is a type, explicitly ignore the parameter name",
1503 format!("_: {}", ident),
1504 Applicability::MachineApplicable,
1505 );
1506 err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)");
1507
1508 // Don't attempt to recover by using the `X` in `X<Y>` as the parameter name.
1509 return if self.token == token::Lt { None } else { Some(ident) };
1510 }
1511 }
1512 None
1513 }
1514
1515 pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
1516 let pat = self.parse_pat(Some("argument name"))?;
1517 self.expect(&token::Colon)?;
1518 let ty = self.parse_ty()?;
1519
1520 struct_span_err!(
1521 self.diagnostic(),
1522 pat.span,
1523 E0642,
1524 "patterns aren't allowed in methods without bodies",
1525 )
1526 .span_suggestion_short(
1527 pat.span,
1528 "give this argument a name or use an underscore to ignore it",
1529 "_".to_owned(),
1530 Applicability::MachineApplicable,
1531 )
1532 .emit();
1533
1534 // Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
1535 let pat =
1536 P(Pat { kind: PatKind::Wild, span: pat.span, id: ast::DUMMY_NODE_ID, tokens: None });
1537 Ok((pat, ty))
1538 }
1539
1540 pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param> {
1541 let sp = param.pat.span;
1542 param.ty.kind = TyKind::Err;
1543 self.struct_span_err(sp, "unexpected `self` parameter in function")
1544 .span_label(sp, "must be the first parameter of an associated function")
1545 .emit();
1546 Ok(param)
1547 }
1548
1549 pub(super) fn consume_block(
1550 &mut self,
1551 delim: token::DelimToken,
1552 consume_close: ConsumeClosingDelim,
1553 ) {
1554 let mut brace_depth = 0;
1555 loop {
1556 if self.eat(&token::OpenDelim(delim)) {
1557 brace_depth += 1;
1558 } else if self.check(&token::CloseDelim(delim)) {
1559 if brace_depth == 0 {
1560 if let ConsumeClosingDelim::Yes = consume_close {
1561 // Some of the callers of this method expect to be able to parse the
1562 // closing delimiter themselves, so we leave it alone. Otherwise we advance
1563 // the parser.
1564 self.bump();
1565 }
1566 return;
1567 } else {
1568 self.bump();
1569 brace_depth -= 1;
1570 continue;
1571 }
1572 } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
1573 return;
1574 } else {
1575 self.bump();
1576 }
1577 }
1578 }
1579
1580 pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
1581 let (span, msg) = match (&self.token.kind, self.subparser_name) {
1582 (&token::Eof, Some(origin)) => {
1583 let sp = self.sess.source_map().next_point(self.token.span);
1584 (sp, format!("expected expression, found end of {}", origin))
1585 }
1586 _ => (
1587 self.token.span,
1588 format!("expected expression, found {}", super::token_descr(&self.token),),
1589 ),
1590 };
1591 let mut err = self.struct_span_err(span, &msg);
1592 let sp = self.sess.source_map().start_point(self.token.span);
1593 if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
1594 self.sess.expr_parentheses_needed(&mut err, *sp, None);
1595 }
1596 err.span_label(span, "expected expression");
1597 err
1598 }
1599
1600 fn consume_tts(
1601 &mut self,
1602 mut acc: i64, // `i64` because malformed code can have more closing delims than opening.
1603 // Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`.
1604 modifier: &[(token::TokenKind, i64)],
1605 ) {
1606 while acc > 0 {
1607 if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) {
1608 acc += *val;
1609 }
1610 if self.token.kind == token::Eof {
1611 break;
1612 }
1613 self.bump();
1614 }
1615 }
1616
1617 /// Replace duplicated recovered parameters with `_` pattern to avoid unnecessary errors.
1618 ///
1619 /// This is necessary because at this point we don't know whether we parsed a function with
1620 /// anonymous parameters or a function with names but no types. In order to minimize
1621 /// unnecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where
1622 /// the parameters are *names* (so we don't emit errors about not being able to find `b` in
1623 /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
1624 /// we deduplicate them to not complain about duplicated parameter names.
1625 pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
1626 let mut seen_inputs = FxHashSet::default();
1627 for input in fn_inputs.iter_mut() {
1628 let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) =
1629 (&input.pat.kind, &input.ty.kind)
1630 {
1631 Some(*ident)
1632 } else {
1633 None
1634 };
1635 if let Some(ident) = opt_ident {
1636 if seen_inputs.contains(&ident) {
1637 input.pat.kind = PatKind::Wild;
1638 }
1639 seen_inputs.insert(ident);
1640 }
1641 }
1642 }
1643 }