]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/build/matches/mod.rs
New upstream version 1.32.0~beta.2+dfsg1
[rustc.git] / src / librustc_mir / build / matches / mod.rs
1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Code related to match expressions. These are sufficiently complex
12 //! to warrant their own module and submodules. :) This main module
13 //! includes the high-level algorithm, the submodules contain the
14 //! details.
15
16 use build::scope::{CachedBlock, DropKind};
17 use build::ForGuard::{self, OutsideGuard, RefWithinGuard, ValWithinGuard};
18 use build::{BlockAnd, BlockAndExtension, Builder};
19 use build::{GuardFrame, GuardFrameLocal, LocalsForNode};
20 use hair::*;
21 use hair::pattern::PatternTypeProjections;
22 use rustc::hir;
23 use rustc::mir::*;
24 use rustc::ty::{self, Ty};
25 use rustc::ty::layout::VariantIdx;
26 use rustc_data_structures::bit_set::BitSet;
27 use rustc_data_structures::fx::FxHashMap;
28 use syntax::ast::{Name, NodeId};
29 use syntax_pos::Span;
30
31 // helper functions, broken out by category:
32 mod simplify;
33 mod test;
34 mod util;
35
36 use std::convert::TryFrom;
37
38 /// ArmHasGuard is isomorphic to a boolean flag. It indicates whether
39 /// a match arm has a guard expression attached to it.
40 #[derive(Copy, Clone, Debug)]
41 pub(crate) struct ArmHasGuard(pub bool);
42
43 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
44 pub fn match_expr(
45 &mut self,
46 destination: &Place<'tcx>,
47 span: Span,
48 mut block: BasicBlock,
49 discriminant: ExprRef<'tcx>,
50 arms: Vec<Arm<'tcx>>,
51 ) -> BlockAnd<()> {
52 let tcx = self.hir.tcx();
53 let discriminant_span = discriminant.span();
54 let discriminant_place = unpack!(block = self.as_place(block, discriminant));
55
56 // Matching on a `discriminant_place` with an uninhabited type doesn't
57 // generate any memory reads by itself, and so if the place "expression"
58 // contains unsafe operations like raw pointer dereferences or union
59 // field projections, we wouldn't know to require an `unsafe` block
60 // around a `match` equivalent to `std::intrinsics::unreachable()`.
61 // See issue #47412 for this hole being discovered in the wild.
62 //
63 // HACK(eddyb) Work around the above issue by adding a dummy inspection
64 // of `discriminant_place`, specifically by applying `ReadForMatch`.
65 //
66 // NOTE: ReadForMatch also checks that the discriminant is initialized.
67 // This is currently needed to not allow matching on an uninitialized,
68 // uninhabited value. If we get never patterns, those will check that
69 // the place is initialized, and so this read would only be used to
70 // check safety.
71
72 let source_info = self.source_info(discriminant_span);
73 self.cfg.push(block, Statement {
74 source_info,
75 kind: StatementKind::FakeRead(
76 FakeReadCause::ForMatchedPlace,
77 discriminant_place.clone(),
78 ),
79 });
80
81 let mut arm_blocks = ArmBlocks {
82 blocks: arms.iter().map(|_| self.cfg.start_new_block()).collect(),
83 };
84
85 // Get the arm bodies and their scopes, while declaring bindings.
86 let arm_bodies: Vec<_> = arms.iter()
87 .map(|arm| {
88 // BUG: use arm lint level
89 let body = self.hir.mirror(arm.body.clone());
90 let scope = self.declare_bindings(
91 None,
92 body.span,
93 LintLevel::Inherited,
94 &arm.patterns[..],
95 ArmHasGuard(arm.guard.is_some()),
96 Some((Some(&discriminant_place), discriminant_span)),
97 );
98 (body, scope.unwrap_or(self.source_scope))
99 })
100 .collect();
101
102 // create binding start block for link them by false edges
103 let candidate_count = arms.iter().fold(0, |ac, c| ac + c.patterns.len());
104 let pre_binding_blocks: Vec<_> = (0..candidate_count + 1)
105 .map(|_| self.cfg.start_new_block())
106 .collect();
107
108 let mut has_guard = false;
109
110 // assemble a list of candidates: there is one candidate per
111 // pattern, which means there may be more than one candidate
112 // *per arm*. These candidates are kept sorted such that the
113 // highest priority candidate comes first in the list.
114 // (i.e. same order as in source)
115
116 let candidates: Vec<_> = arms.iter()
117 .enumerate()
118 .flat_map(|(arm_index, arm)| {
119 arm.patterns
120 .iter()
121 .enumerate()
122 .map(move |(pat_index, pat)| (arm_index, pat_index, pat, arm.guard.clone()))
123 })
124 .zip(
125 pre_binding_blocks
126 .iter()
127 .zip(pre_binding_blocks.iter().skip(1)),
128 )
129 .map(
130 |(
131 (arm_index, pat_index, pattern, guard),
132 (pre_binding_block, next_candidate_pre_binding_block)
133 )| {
134 has_guard |= guard.is_some();
135
136 // One might ask: why not build up the match pair such that it
137 // matches via `borrowed_input_temp.deref()` instead of
138 // using the `discriminant_place` directly, as it is doing here?
139 //
140 // The basic answer is that if you do that, then you end up with
141 // accceses to a shared borrow of the input and that conflicts with
142 // any arms that look like e.g.
143 //
144 // match Some(&4) {
145 // ref mut foo => {
146 // ... /* mutate `foo` in arm body */ ...
147 // }
148 // }
149 //
150 // (Perhaps we could further revise the MIR
151 // construction here so that it only does a
152 // shared borrow at the outset and delays doing
153 // the mutable borrow until after the pattern is
154 // matched *and* the guard (if any) for the arm
155 // has been run.)
156
157 Candidate {
158 span: pattern.span,
159 match_pairs: vec![MatchPair::new(discriminant_place.clone(), pattern)],
160 bindings: vec![],
161 ascriptions: vec![],
162 guard,
163 arm_index,
164 pat_index,
165 pre_binding_block: *pre_binding_block,
166 next_candidate_pre_binding_block: *next_candidate_pre_binding_block,
167 }
168 },
169 )
170 .collect();
171
172 let outer_source_info = self.source_info(span);
173 self.cfg.terminate(
174 *pre_binding_blocks.last().unwrap(),
175 outer_source_info,
176 TerminatorKind::Unreachable,
177 );
178
179 // Maps a place to the kind of Fake borrow that we want to perform on
180 // it: either Shallow or Shared, depending on whether the place is
181 // bound in the match, or just switched on.
182 // If there are no match guards then we don't need any fake borrows,
183 // so don't track them.
184 let mut fake_borrows = if has_guard && tcx.generate_borrow_of_any_match_input() {
185 Some(FxHashMap::default())
186 } else {
187 None
188 };
189
190 let pre_binding_blocks: Vec<_> = candidates
191 .iter()
192 .map(|cand| (cand.pre_binding_block, cand.span))
193 .collect();
194
195 // this will generate code to test discriminant_place and
196 // branch to the appropriate arm block
197 let otherwise = self.match_candidates(
198 discriminant_span,
199 &mut arm_blocks,
200 candidates,
201 block,
202 &mut fake_borrows,
203 );
204
205 if !otherwise.is_empty() {
206 // All matches are exhaustive. However, because some matches
207 // only have exponentially-large exhaustive decision trees, we
208 // sometimes generate an inexhaustive decision tree.
209 //
210 // In that case, the inexhaustive tips of the decision tree
211 // can't be reached - terminate them with an `unreachable`.
212 let source_info = self.source_info(span);
213
214 let mut otherwise = otherwise;
215 otherwise.sort();
216 otherwise.dedup(); // variant switches can introduce duplicate target blocks
217 for block in otherwise {
218 self.cfg
219 .terminate(block, source_info, TerminatorKind::Unreachable);
220 }
221 }
222
223 if let Some(fake_borrows) = fake_borrows {
224 self.add_fake_borrows(&pre_binding_blocks, fake_borrows, source_info, block);
225 }
226
227 // all the arm blocks will rejoin here
228 let end_block = self.cfg.start_new_block();
229
230 let outer_source_info = self.source_info(span);
231 for (arm_index, (body, source_scope)) in arm_bodies.into_iter().enumerate() {
232 let mut arm_block = arm_blocks.blocks[arm_index];
233 // Re-enter the source scope we created the bindings in.
234 self.source_scope = source_scope;
235 unpack!(arm_block = self.into(destination, arm_block, body));
236 self.cfg.terminate(
237 arm_block,
238 outer_source_info,
239 TerminatorKind::Goto { target: end_block },
240 );
241 }
242 self.source_scope = outer_source_info.scope;
243
244 end_block.unit()
245 }
246
247 pub(super) fn expr_into_pattern(
248 &mut self,
249 mut block: BasicBlock,
250 irrefutable_pat: Pattern<'tcx>,
251 initializer: ExprRef<'tcx>,
252 ) -> BlockAnd<()> {
253 match *irrefutable_pat.kind {
254 // Optimize the case of `let x = ...` to write directly into `x`
255 PatternKind::Binding {
256 mode: BindingMode::ByValue,
257 var,
258 subpattern: None,
259 ..
260 } => {
261 let place =
262 self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
263 unpack!(block = self.into(&place, block, initializer));
264
265
266 // Inject a fake read, see comments on `FakeReadCause::ForLet`.
267 let source_info = self.source_info(irrefutable_pat.span);
268 self.cfg.push(
269 block,
270 Statement {
271 source_info,
272 kind: StatementKind::FakeRead(FakeReadCause::ForLet, place),
273 },
274 );
275
276 self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
277 block.unit()
278 }
279
280 // Optimize the case of `let x: T = ...` to write directly
281 // into `x` and then require that `T == typeof(x)`.
282 //
283 // Weirdly, this is needed to prevent the
284 // `intrinsic-move-val.rs` test case from crashing. That
285 // test works with uninitialized values in a rather
286 // dubious way, so it may be that the test is kind of
287 // broken.
288 PatternKind::AscribeUserType {
289 subpattern: Pattern {
290 kind: box PatternKind::Binding {
291 mode: BindingMode::ByValue,
292 var,
293 subpattern: None,
294 ..
295 },
296 ..
297 },
298 user_ty: pat_ascription_ty,
299 user_ty_span,
300 } => {
301 let place =
302 self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
303 unpack!(block = self.into(&place, block, initializer));
304
305 // Inject a fake read, see comments on `FakeReadCause::ForLet`.
306 let pattern_source_info = self.source_info(irrefutable_pat.span);
307 self.cfg.push(
308 block,
309 Statement {
310 source_info: pattern_source_info,
311 kind: StatementKind::FakeRead(FakeReadCause::ForLet, place.clone()),
312 },
313 );
314
315 let ty_source_info = self.source_info(user_ty_span);
316 self.cfg.push(
317 block,
318 Statement {
319 source_info: ty_source_info,
320 kind: StatementKind::AscribeUserType(
321 place,
322 ty::Variance::Invariant,
323 box pat_ascription_ty.user_ty(),
324 ),
325 },
326 );
327
328 self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
329 block.unit()
330 }
331 _ => {
332 let place = unpack!(block = self.as_place(block, initializer));
333 self.place_into_pattern(block, irrefutable_pat, &place, true)
334 }
335 }
336 }
337
338 pub fn place_into_pattern(
339 &mut self,
340 mut block: BasicBlock,
341 irrefutable_pat: Pattern<'tcx>,
342 initializer: &Place<'tcx>,
343 set_match_place: bool,
344 ) -> BlockAnd<()> {
345 // create a dummy candidate
346 let mut candidate = Candidate {
347 span: irrefutable_pat.span,
348 match_pairs: vec![MatchPair::new(initializer.clone(), &irrefutable_pat)],
349 bindings: vec![],
350 ascriptions: vec![],
351 guard: None,
352
353 // since we don't call `match_candidates`, next fields is unused
354 arm_index: 0,
355 pat_index: 0,
356 pre_binding_block: block,
357 next_candidate_pre_binding_block: block,
358 };
359
360 // Simplify the candidate. Since the pattern is irrefutable, this should
361 // always convert all match-pairs into bindings.
362 unpack!(block = self.simplify_candidate(block, &mut candidate));
363
364 if !candidate.match_pairs.is_empty() {
365 span_bug!(
366 candidate.match_pairs[0].pattern.span,
367 "match pairs {:?} remaining after simplifying \
368 irrefutable pattern",
369 candidate.match_pairs
370 );
371 }
372
373 // for matches and function arguments, the place that is being matched
374 // can be set when creating the variables. But the place for
375 // let PATTERN = ... might not even exist until we do the assignment.
376 // so we set it here instead
377 if set_match_place {
378 for binding in &candidate.bindings {
379 let local = self.var_local_id(binding.var_id, OutsideGuard);
380
381 if let Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
382 opt_match_place: Some((ref mut match_place, _)),
383 ..
384 }))) = self.local_decls[local].is_user_variable
385 {
386 *match_place = Some(initializer.clone());
387 } else {
388 bug!("Let binding to non-user variable.")
389 }
390 }
391 }
392
393 self.ascribe_types(block, &candidate.ascriptions);
394
395 // now apply the bindings, which will also declare the variables
396 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
397
398 block.unit()
399 }
400
401 /// Declares the bindings of the given patterns and returns the visibility
402 /// scope for the bindings in these patterns, if such a scope had to be
403 /// created. NOTE: Declaring the bindings should always be done in their
404 /// drop scope.
405 pub fn declare_bindings(
406 &mut self,
407 mut visibility_scope: Option<SourceScope>,
408 scope_span: Span,
409 lint_level: LintLevel,
410 patterns: &[Pattern<'tcx>],
411 has_guard: ArmHasGuard,
412 opt_match_place: Option<(Option<&Place<'tcx>>, Span)>,
413 ) -> Option<SourceScope> {
414 assert!(
415 !(visibility_scope.is_some() && lint_level.is_explicit()),
416 "can't have both a visibility and a lint scope at the same time"
417 );
418 let mut scope = self.source_scope;
419 let num_patterns = patterns.len();
420 self.visit_bindings(
421 &patterns[0],
422 &PatternTypeProjections::none(),
423 &mut |this, mutability, name, mode, var, span, ty, user_ty| {
424 if visibility_scope.is_none() {
425 visibility_scope =
426 Some(this.new_source_scope(scope_span, LintLevel::Inherited, None));
427 // If we have lints, create a new source scope
428 // that marks the lints for the locals. See the comment
429 // on the `source_info` field for why this is needed.
430 if lint_level.is_explicit() {
431 scope = this.new_source_scope(scope_span, lint_level, None);
432 }
433 }
434 let source_info = SourceInfo { span, scope };
435 let visibility_scope = visibility_scope.unwrap();
436 this.declare_binding(
437 source_info,
438 visibility_scope,
439 mutability,
440 name,
441 mode,
442 num_patterns,
443 var,
444 ty,
445 user_ty,
446 has_guard,
447 opt_match_place.map(|(x, y)| (x.cloned(), y)),
448 patterns[0].span,
449 );
450 },
451 );
452 visibility_scope
453 }
454
455 pub fn storage_live_binding(
456 &mut self,
457 block: BasicBlock,
458 var: NodeId,
459 span: Span,
460 for_guard: ForGuard,
461 ) -> Place<'tcx> {
462 let local_id = self.var_local_id(var, for_guard);
463 let source_info = self.source_info(span);
464 self.cfg.push(
465 block,
466 Statement {
467 source_info,
468 kind: StatementKind::StorageLive(local_id),
469 },
470 );
471 let place = Place::Local(local_id);
472 let var_ty = self.local_decls[local_id].ty;
473 let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
474 let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
475 self.schedule_drop(span, region_scope, &place, var_ty, DropKind::Storage);
476 place
477 }
478
479 pub fn schedule_drop_for_binding(&mut self, var: NodeId, span: Span, for_guard: ForGuard) {
480 let local_id = self.var_local_id(var, for_guard);
481 let var_ty = self.local_decls[local_id].ty;
482 let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
483 let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
484 self.schedule_drop(
485 span,
486 region_scope,
487 &Place::Local(local_id),
488 var_ty,
489 DropKind::Value {
490 cached_block: CachedBlock::default(),
491 },
492 );
493 }
494
495 pub(super) fn visit_bindings(
496 &mut self,
497 pattern: &Pattern<'tcx>,
498 pattern_user_ty: &PatternTypeProjections<'tcx>,
499 f: &mut impl FnMut(
500 &mut Self,
501 Mutability,
502 Name,
503 BindingMode,
504 NodeId,
505 Span,
506 Ty<'tcx>,
507 &PatternTypeProjections<'tcx>,
508 ),
509 ) {
510 match *pattern.kind {
511 PatternKind::Binding {
512 mutability,
513 name,
514 mode,
515 var,
516 ty,
517 ref subpattern,
518 ..
519 } => {
520 let pattern_ref_binding; // sidestep temp lifetime limitations.
521 let binding_user_ty = match mode {
522 BindingMode::ByValue => { pattern_user_ty }
523 BindingMode::ByRef(..) => {
524 // If this is a `ref` binding (e.g., `let ref
525 // x: T = ..`), then the type of `x` is not
526 // `T` but rather `&T`.
527 pattern_ref_binding = pattern_user_ty.ref_binding();
528 &pattern_ref_binding
529 }
530 };
531
532 f(self, mutability, name, mode, var, pattern.span, ty, binding_user_ty);
533 if let Some(subpattern) = subpattern.as_ref() {
534 self.visit_bindings(subpattern, pattern_user_ty, f);
535 }
536 }
537 PatternKind::Array {
538 ref prefix,
539 ref slice,
540 ref suffix,
541 }
542 | PatternKind::Slice {
543 ref prefix,
544 ref slice,
545 ref suffix,
546 } => {
547 let from = u32::try_from(prefix.len()).unwrap();
548 let to = u32::try_from(suffix.len()).unwrap();
549 for subpattern in prefix {
550 self.visit_bindings(subpattern, &pattern_user_ty.index(), f);
551 }
552 for subpattern in slice {
553 self.visit_bindings(subpattern, &pattern_user_ty.subslice(from, to), f);
554 }
555 for subpattern in suffix {
556 self.visit_bindings(subpattern, &pattern_user_ty.index(), f);
557 }
558 }
559 PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {}
560 PatternKind::Deref { ref subpattern } => {
561 self.visit_bindings(subpattern, &pattern_user_ty.deref(), f);
562 }
563 PatternKind::AscribeUserType { ref subpattern, ref user_ty, user_ty_span } => {
564 // This corresponds to something like
565 //
566 // ```
567 // let A::<'a>(_): A<'static> = ...;
568 // ```
569 let subpattern_user_ty = pattern_user_ty.add_user_type(user_ty, user_ty_span);
570 self.visit_bindings(subpattern, &subpattern_user_ty, f)
571 }
572
573 PatternKind::Leaf { ref subpatterns } => {
574 for subpattern in subpatterns {
575 let subpattern_user_ty = pattern_user_ty.leaf(subpattern.field);
576 self.visit_bindings(&subpattern.pattern, &subpattern_user_ty, f);
577 }
578 }
579
580 PatternKind::Variant { adt_def, substs: _, variant_index, ref subpatterns } => {
581 for subpattern in subpatterns {
582 let subpattern_user_ty = pattern_user_ty.variant(
583 adt_def, variant_index, subpattern.field);
584 self.visit_bindings(&subpattern.pattern, &subpattern_user_ty, f);
585 }
586 }
587 }
588 }
589 }
590
591 /// List of blocks for each arm (and potentially other metadata in the
592 /// future).
593 struct ArmBlocks {
594 blocks: Vec<BasicBlock>,
595 }
596
597 #[derive(Clone, Debug)]
598 pub struct Candidate<'pat, 'tcx: 'pat> {
599 // span of the original pattern that gave rise to this candidate
600 span: Span,
601
602 // all of these must be satisfied...
603 match_pairs: Vec<MatchPair<'pat, 'tcx>>,
604
605 // ...these bindings established...
606 bindings: Vec<Binding<'tcx>>,
607
608 // ...these types asserted...
609 ascriptions: Vec<Ascription<'tcx>>,
610
611 // ...and the guard must be evaluated...
612 guard: Option<Guard<'tcx>>,
613
614 // ...and then we branch to arm with this index.
615 arm_index: usize,
616
617 // ...and the blocks for add false edges between candidates
618 pre_binding_block: BasicBlock,
619 next_candidate_pre_binding_block: BasicBlock,
620
621 // This uniquely identifies this candidate *within* the arm.
622 pat_index: usize,
623 }
624
625 #[derive(Clone, Debug)]
626 struct Binding<'tcx> {
627 span: Span,
628 source: Place<'tcx>,
629 name: Name,
630 var_id: NodeId,
631 var_ty: Ty<'tcx>,
632 mutability: Mutability,
633 binding_mode: BindingMode<'tcx>,
634 }
635
636 /// Indicates that the type of `source` must be a subtype of the
637 /// user-given type `user_ty`; this is basically a no-op but can
638 /// influence region inference.
639 #[derive(Clone, Debug)]
640 struct Ascription<'tcx> {
641 span: Span,
642 source: Place<'tcx>,
643 user_ty: PatternTypeProjection<'tcx>,
644 }
645
646 #[derive(Clone, Debug)]
647 pub struct MatchPair<'pat, 'tcx: 'pat> {
648 // this place...
649 place: Place<'tcx>,
650
651 // ... must match this pattern.
652 pattern: &'pat Pattern<'tcx>,
653
654 // HACK(eddyb) This is used to toggle whether a Slice pattern
655 // has had its length checked. This is only necessary because
656 // the "rest" part of the pattern right now has type &[T] and
657 // as such, it requires an Rvalue::Slice to be generated.
658 // See RFC 495 / issue #23121 for the eventual (proper) solution.
659 slice_len_checked: bool,
660 }
661
662 #[derive(Clone, Debug, PartialEq)]
663 enum TestKind<'tcx> {
664 // test the branches of enum
665 Switch {
666 adt_def: &'tcx ty::AdtDef,
667 variants: BitSet<VariantIdx>,
668 },
669
670 // test the branches of enum
671 SwitchInt {
672 switch_ty: Ty<'tcx>,
673 options: Vec<u128>,
674 indices: FxHashMap<&'tcx ty::Const<'tcx>, usize>,
675 },
676
677 // test for equality
678 Eq {
679 value: &'tcx ty::Const<'tcx>,
680 ty: Ty<'tcx>,
681 },
682
683 // test whether the value falls within an inclusive or exclusive range
684 Range {
685 lo: &'tcx ty::Const<'tcx>,
686 hi: &'tcx ty::Const<'tcx>,
687 ty: Ty<'tcx>,
688 end: hir::RangeEnd,
689 },
690
691 // test length of the slice is equal to len
692 Len {
693 len: u64,
694 op: BinOp,
695 },
696 }
697
698 #[derive(Debug)]
699 pub struct Test<'tcx> {
700 span: Span,
701 kind: TestKind<'tcx>,
702 }
703
704 ///////////////////////////////////////////////////////////////////////////
705 // Main matching algorithm
706
707 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
708 /// The main match algorithm. It begins with a set of candidates
709 /// `candidates` and has the job of generating code to determine
710 /// which of these candidates, if any, is the correct one. The
711 /// candidates are sorted such that the first item in the list
712 /// has the highest priority. When a candidate is found to match
713 /// the value, we will generate a branch to the appropriate
714 /// block found in `arm_blocks`.
715 ///
716 /// The return value is a list of "otherwise" blocks. These are
717 /// points in execution where we found that *NONE* of the
718 /// candidates apply. In principle, this means that the input
719 /// list was not exhaustive, though at present we sometimes are
720 /// not smart enough to recognize all exhaustive inputs.
721 ///
722 /// It might be surprising that the input can be inexhaustive.
723 /// Indeed, initially, it is not, because all matches are
724 /// exhaustive in Rust. But during processing we sometimes divide
725 /// up the list of candidates and recurse with a non-exhaustive
726 /// list. This is important to keep the size of the generated code
727 /// under control. See `test_candidates` for more details.
728 ///
729 /// If `add_fake_borrows` is true, then places which need fake borrows
730 /// will be added to it.
731 fn match_candidates<'pat>(
732 &mut self,
733 span: Span,
734 arm_blocks: &mut ArmBlocks,
735 mut candidates: Vec<Candidate<'pat, 'tcx>>,
736 mut block: BasicBlock,
737 fake_borrows: &mut Option<FxHashMap<Place<'tcx>, BorrowKind>>,
738 ) -> Vec<BasicBlock> {
739 debug!(
740 "matched_candidate(span={:?}, block={:?}, candidates={:?})",
741 span, block, candidates
742 );
743
744 // Start by simplifying candidates. Once this process is
745 // complete, all the match pairs which remain require some
746 // form of test, whether it be a switch or pattern comparison.
747 for candidate in &mut candidates {
748 unpack!(block = self.simplify_candidate(block, candidate));
749 }
750
751 // The candidates are sorted by priority. Check to see
752 // whether the higher priority candidates (and hence at
753 // the front of the vec) have satisfied all their match
754 // pairs.
755 let fully_matched = candidates
756 .iter()
757 .take_while(|c| c.match_pairs.is_empty())
758 .count();
759 debug!(
760 "match_candidates: {:?} candidates fully matched",
761 fully_matched
762 );
763 let mut unmatched_candidates = candidates.split_off(fully_matched);
764
765 // Insert a *Shared* borrow of any places that are bound.
766 if let Some(fake_borrows) = fake_borrows {
767 for Binding { source, .. }
768 in candidates.iter().flat_map(|candidate| &candidate.bindings)
769 {
770 fake_borrows.insert(source.clone(), BorrowKind::Shared);
771 }
772 }
773
774 let fully_matched_with_guard = candidates.iter().take_while(|c| c.guard.is_some()).count();
775
776 let unreachable_candidates = if fully_matched_with_guard + 1 < candidates.len() {
777 candidates.split_off(fully_matched_with_guard + 1)
778 } else {
779 vec![]
780 };
781
782 for candidate in candidates {
783 // If so, apply any bindings, test the guard (if any), and
784 // branch to the arm.
785 if let Some(b) = self.bind_and_guard_matched_candidate(block, arm_blocks, candidate) {
786 block = b;
787 } else {
788 // if None is returned, then any remaining candidates
789 // are unreachable (at least not through this path).
790 // Link them with false edges.
791 debug!(
792 "match_candidates: add false edges for unreachable {:?} and unmatched {:?}",
793 unreachable_candidates, unmatched_candidates
794 );
795 for candidate in unreachable_candidates {
796 let source_info = self.source_info(candidate.span);
797 let target = self.cfg.start_new_block();
798 if let Some(otherwise) =
799 self.bind_and_guard_matched_candidate(target, arm_blocks, candidate)
800 {
801 self.cfg
802 .terminate(otherwise, source_info, TerminatorKind::Unreachable);
803 }
804 }
805
806 if unmatched_candidates.is_empty() {
807 return vec![];
808 } else {
809 let target = self.cfg.start_new_block();
810 return self.match_candidates(
811 span,
812 arm_blocks,
813 unmatched_candidates,
814 target,
815 &mut None,
816 );
817 }
818 }
819 }
820
821 // If there are no candidates that still need testing, we're done.
822 // Since all matches are exhaustive, execution should never reach this point.
823 if unmatched_candidates.is_empty() {
824 return vec![block];
825 }
826
827 // Test candidates where possible.
828 let (otherwise, tested_candidates) =
829 self.test_candidates(span, arm_blocks, &unmatched_candidates, block, fake_borrows);
830
831 // If the target candidates were exhaustive, then we are done.
832 // But for borrowck continue build decision tree.
833
834 // If all candidates were sorted into `target_candidates` somewhere, then
835 // the initial set was inexhaustive.
836 let untested_candidates = unmatched_candidates.split_off(tested_candidates);
837 if untested_candidates.len() == 0 {
838 return otherwise;
839 }
840
841 // Otherwise, let's process those remaining candidates.
842 let join_block = self.join_otherwise_blocks(span, otherwise);
843 self.match_candidates(span, arm_blocks, untested_candidates, join_block, &mut None)
844 }
845
846 fn join_otherwise_blocks(&mut self, span: Span, mut otherwise: Vec<BasicBlock>) -> BasicBlock {
847 let source_info = self.source_info(span);
848 otherwise.sort();
849 otherwise.dedup(); // variant switches can introduce duplicate target blocks
850 if otherwise.len() == 1 {
851 otherwise[0]
852 } else {
853 let join_block = self.cfg.start_new_block();
854 for block in otherwise {
855 self.cfg.terminate(
856 block,
857 source_info,
858 TerminatorKind::Goto { target: join_block },
859 );
860 }
861 join_block
862 }
863 }
864
865 /// This is the most subtle part of the matching algorithm. At
866 /// this point, the input candidates have been fully simplified,
867 /// and so we know that all remaining match-pairs require some
868 /// sort of test. To decide what test to do, we take the highest
869 /// priority candidate (last one in the list) and extract the
870 /// first match-pair from the list. From this we decide what kind
871 /// of test is needed using `test`, defined in the `test` module.
872 ///
873 /// *Note:* taking the first match pair is somewhat arbitrary, and
874 /// we might do better here by choosing more carefully what to
875 /// test.
876 ///
877 /// For example, consider the following possible match-pairs:
878 ///
879 /// 1. `x @ Some(P)` -- we will do a `Switch` to decide what variant `x` has
880 /// 2. `x @ 22` -- we will do a `SwitchInt`
881 /// 3. `x @ 3..5` -- we will do a range test
882 /// 4. etc.
883 ///
884 /// Once we know what sort of test we are going to perform, this
885 /// test may also help us with other candidates. So we walk over
886 /// the candidates (from high to low priority) and check. This
887 /// gives us, for each outcome of the test, a transformed list of
888 /// candidates. For example, if we are testing the current
889 /// variant of `x.0`, and we have a candidate `{x.0 @ Some(v), x.1
890 /// @ 22}`, then we would have a resulting candidate of `{(x.0 as
891 /// Some).0 @ v, x.1 @ 22}`. Note that the first match-pair is now
892 /// simpler (and, in fact, irrefutable).
893 ///
894 /// But there may also be candidates that the test just doesn't
895 /// apply to. The classical example involves wildcards:
896 ///
897 /// ```
898 /// # let (x, y, z) = (true, true, true);
899 /// match (x, y, z) {
900 /// (true, _, true) => true, // (0)
901 /// (_, true, _) => true, // (1)
902 /// (false, false, _) => false, // (2)
903 /// (true, _, false) => false, // (3)
904 /// }
905 /// ```
906 ///
907 /// In that case, after we test on `x`, there are 2 overlapping candidate
908 /// sets:
909 ///
910 /// - If the outcome is that `x` is true, candidates 0, 1, and 3
911 /// - If the outcome is that `x` is false, candidates 1 and 2
912 ///
913 /// Here, the traditional "decision tree" method would generate 2
914 /// separate code-paths for the 2 separate cases.
915 ///
916 /// In some cases, this duplication can create an exponential amount of
917 /// code. This is most easily seen by noticing that this method terminates
918 /// with precisely the reachable arms being reachable - but that problem
919 /// is trivially NP-complete:
920 ///
921 /// ```rust
922 /// match (var0, var1, var2, var3, ..) {
923 /// (true, _, _, false, true, ...) => false,
924 /// (_, true, true, false, _, ...) => false,
925 /// (false, _, false, false, _, ...) => false,
926 /// ...
927 /// _ => true
928 /// }
929 /// ```
930 ///
931 /// Here the last arm is reachable only if there is an assignment to
932 /// the variables that does not match any of the literals. Therefore,
933 /// compilation would take an exponential amount of time in some cases.
934 ///
935 /// That kind of exponential worst-case might not occur in practice, but
936 /// our simplistic treatment of constants and guards would make it occur
937 /// in very common situations - for example #29740:
938 ///
939 /// ```rust
940 /// match x {
941 /// "foo" if foo_guard => ...,
942 /// "bar" if bar_guard => ...,
943 /// "baz" if baz_guard => ...,
944 /// ...
945 /// }
946 /// ```
947 ///
948 /// Here we first test the match-pair `x @ "foo"`, which is an `Eq` test.
949 ///
950 /// It might seem that we would end up with 2 disjoint candidate
951 /// sets, consisting of the first candidate or the other 3, but our
952 /// algorithm doesn't reason about "foo" being distinct from the other
953 /// constants; it considers the latter arms to potentially match after
954 /// both outcomes, which obviously leads to an exponential amount
955 /// of tests.
956 ///
957 /// To avoid these kinds of problems, our algorithm tries to ensure
958 /// the amount of generated tests is linear. When we do a k-way test,
959 /// we return an additional "unmatched" set alongside the obvious `k`
960 /// sets. When we encounter a candidate that would be present in more
961 /// than one of the sets, we put it and all candidates below it into the
962 /// "unmatched" set. This ensures these `k+1` sets are disjoint.
963 ///
964 /// After we perform our test, we branch into the appropriate candidate
965 /// set and recurse with `match_candidates`. These sub-matches are
966 /// obviously inexhaustive - as we discarded our otherwise set - so
967 /// we set their continuation to do `match_candidates` on the
968 /// "unmatched" set (which is again inexhaustive).
969 ///
970 /// If you apply this to the above test, you basically wind up
971 /// with an if-else-if chain, testing each candidate in turn,
972 /// which is precisely what we want.
973 ///
974 /// In addition to avoiding exponential-time blowups, this algorithm
975 /// also has nice property that each guard and arm is only generated
976 /// once.
977 fn test_candidates<'pat>(
978 &mut self,
979 span: Span,
980 arm_blocks: &mut ArmBlocks,
981 candidates: &[Candidate<'pat, 'tcx>],
982 block: BasicBlock,
983 fake_borrows: &mut Option<FxHashMap<Place<'tcx>, BorrowKind>>,
984 ) -> (Vec<BasicBlock>, usize) {
985 // extract the match-pair from the highest priority candidate
986 let match_pair = &candidates.first().unwrap().match_pairs[0];
987 let mut test = self.test(match_pair);
988
989 // most of the time, the test to perform is simply a function
990 // of the main candidate; but for a test like SwitchInt, we
991 // may want to add cases based on the candidates that are
992 // available
993 match test.kind {
994 TestKind::SwitchInt {
995 switch_ty,
996 ref mut options,
997 ref mut indices,
998 } => {
999 for candidate in candidates.iter() {
1000 if !self.add_cases_to_switch(
1001 &match_pair.place,
1002 candidate,
1003 switch_ty,
1004 options,
1005 indices,
1006 ) {
1007 break;
1008 }
1009 }
1010 }
1011 TestKind::Switch {
1012 adt_def: _,
1013 ref mut variants,
1014 } => {
1015 for candidate in candidates.iter() {
1016 if !self.add_variants_to_switch(&match_pair.place, candidate, variants) {
1017 break;
1018 }
1019 }
1020 }
1021 _ => {}
1022 }
1023
1024 // Insert a Shallow borrow of any places that is switched on.
1025 fake_borrows.as_mut().map(|fb| {
1026 fb.entry(match_pair.place.clone()).or_insert(BorrowKind::Shallow)
1027 });
1028
1029 // perform the test, branching to one of N blocks. For each of
1030 // those N possible outcomes, create a (initially empty)
1031 // vector of candidates. Those are the candidates that still
1032 // apply if the test has that particular outcome.
1033 debug!(
1034 "match_candidates: test={:?} match_pair={:?}",
1035 test, match_pair
1036 );
1037 let target_blocks = self.perform_test(block, &match_pair.place, &test);
1038 let mut target_candidates: Vec<_> = (0..target_blocks.len()).map(|_| vec![]).collect();
1039
1040 // Sort the candidates into the appropriate vector in
1041 // `target_candidates`. Note that at some point we may
1042 // encounter a candidate where the test is not relevant; at
1043 // that point, we stop sorting.
1044 let tested_candidates = candidates
1045 .iter()
1046 .take_while(|c| {
1047 self.sort_candidate(&match_pair.place, &test, c, &mut target_candidates)
1048 })
1049 .count();
1050 assert!(tested_candidates > 0); // at least the last candidate ought to be tested
1051 debug!("tested_candidates: {}", tested_candidates);
1052 debug!(
1053 "untested_candidates: {}",
1054 candidates.len() - tested_candidates
1055 );
1056
1057 // For each outcome of test, process the candidates that still
1058 // apply. Collect a list of blocks where control flow will
1059 // branch if one of the `target_candidate` sets is not
1060 // exhaustive.
1061 let otherwise: Vec<_> = target_blocks
1062 .into_iter()
1063 .zip(target_candidates)
1064 .flat_map(|(target_block, target_candidates)| {
1065 self.match_candidates(
1066 span,
1067 arm_blocks,
1068 target_candidates,
1069 target_block,
1070 fake_borrows,
1071 )
1072 })
1073 .collect();
1074
1075 (otherwise, tested_candidates)
1076 }
1077
1078 /// Initializes each of the bindings from the candidate by
1079 /// moving/copying/ref'ing the source as appropriate. Tests the
1080 /// guard, if any, and then branches to the arm. Returns the block
1081 /// for the case where the guard fails.
1082 ///
1083 /// Note: we check earlier that if there is a guard, there cannot
1084 /// be move bindings. This isn't really important for the
1085 /// self-consistency of this fn, but the reason for it should be
1086 /// clear: after we've done the assignments, if there were move
1087 /// bindings, further tests would be a use-after-move (which would
1088 /// in turn be detected by the borrowck code that runs on the
1089 /// MIR).
1090 fn bind_and_guard_matched_candidate<'pat>(
1091 &mut self,
1092 mut block: BasicBlock,
1093 arm_blocks: &mut ArmBlocks,
1094 candidate: Candidate<'pat, 'tcx>,
1095 ) -> Option<BasicBlock> {
1096 debug!(
1097 "bind_and_guard_matched_candidate(block={:?}, candidate={:?})",
1098 block, candidate
1099 );
1100
1101 debug_assert!(candidate.match_pairs.is_empty());
1102
1103 self.ascribe_types(block, &candidate.ascriptions);
1104
1105 let arm_block = arm_blocks.blocks[candidate.arm_index];
1106 let candidate_source_info = self.source_info(candidate.span);
1107
1108 self.cfg.terminate(
1109 block,
1110 candidate_source_info,
1111 TerminatorKind::Goto {
1112 target: candidate.pre_binding_block,
1113 },
1114 );
1115
1116 block = self.cfg.start_new_block();
1117 self.cfg.terminate(
1118 candidate.pre_binding_block,
1119 candidate_source_info,
1120 TerminatorKind::FalseEdges {
1121 real_target: block,
1122 imaginary_targets: vec![candidate.next_candidate_pre_binding_block],
1123 },
1124 );
1125
1126 // rust-lang/rust#27282: The `autoref` business deserves some
1127 // explanation here.
1128 //
1129 // The intent of the `autoref` flag is that when it is true,
1130 // then any pattern bindings of type T will map to a `&T`
1131 // within the context of the guard expression, but will
1132 // continue to map to a `T` in the context of the arm body. To
1133 // avoid surfacing this distinction in the user source code
1134 // (which would be a severe change to the language and require
1135 // far more revision to the compiler), when `autoref` is true,
1136 // then any occurrence of the identifier in the guard
1137 // expression will automatically get a deref op applied to it.
1138 //
1139 // So an input like:
1140 //
1141 // ```
1142 // let place = Foo::new();
1143 // match place { foo if inspect(foo)
1144 // => feed(foo), ... }
1145 // ```
1146 //
1147 // will be treated as if it were really something like:
1148 //
1149 // ```
1150 // let place = Foo::new();
1151 // match place { Foo { .. } if { let tmp1 = &place; inspect(*tmp1) }
1152 // => { let tmp2 = place; feed(tmp2) }, ... }
1153 //
1154 // And an input like:
1155 //
1156 // ```
1157 // let place = Foo::new();
1158 // match place { ref mut foo if inspect(foo)
1159 // => feed(foo), ... }
1160 // ```
1161 //
1162 // will be treated as if it were really something like:
1163 //
1164 // ```
1165 // let place = Foo::new();
1166 // match place { Foo { .. } if { let tmp1 = & &mut place; inspect(*tmp1) }
1167 // => { let tmp2 = &mut place; feed(tmp2) }, ... }
1168 // ```
1169 //
1170 // In short, any pattern binding will always look like *some*
1171 // kind of `&T` within the guard at least in terms of how the
1172 // MIR-borrowck views it, and this will ensure that guard
1173 // expressions cannot mutate their the match inputs via such
1174 // bindings. (It also ensures that guard expressions can at
1175 // most *copy* values from such bindings; non-Copy things
1176 // cannot be moved via pattern bindings in guard expressions.)
1177 //
1178 // ----
1179 //
1180 // Implementation notes (under assumption `autoref` is true).
1181 //
1182 // To encode the distinction above, we must inject the
1183 // temporaries `tmp1` and `tmp2`.
1184 //
1185 // There are two cases of interest: binding by-value, and binding by-ref.
1186 //
1187 // 1. Binding by-value: Things are simple.
1188 //
1189 // * Establishing `tmp1` creates a reference into the
1190 // matched place. This code is emitted by
1191 // bind_matched_candidate_for_guard.
1192 //
1193 // * `tmp2` is only initialized "lazily", after we have
1194 // checked the guard. Thus, the code that can trigger
1195 // moves out of the candidate can only fire after the
1196 // guard evaluated to true. This initialization code is
1197 // emitted by bind_matched_candidate_for_arm.
1198 //
1199 // 2. Binding by-reference: Things are tricky.
1200 //
1201 // * Here, the guard expression wants a `&&` or `&&mut`
1202 // into the original input. This means we need to borrow
1203 // a reference that we do not immediately have at hand
1204 // (because all we have is the places associated with the
1205 // match input itself; it is up to us to create a place
1206 // holding a `&` or `&mut` that we can then borrow).
1207
1208 let autoref = self.hir
1209 .tcx()
1210 .all_pat_vars_are_implicit_refs_within_guards();
1211 if let Some(guard) = candidate.guard {
1212 if autoref {
1213 self.bind_matched_candidate_for_guard(
1214 block,
1215 candidate.pat_index,
1216 &candidate.bindings,
1217 );
1218 let guard_frame = GuardFrame {
1219 locals: candidate
1220 .bindings
1221 .iter()
1222 .map(|b| GuardFrameLocal::new(b.var_id, b.binding_mode))
1223 .collect(),
1224 };
1225 debug!("Entering guard building context: {:?}", guard_frame);
1226 self.guard_context.push(guard_frame);
1227 } else {
1228 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
1229 }
1230
1231 // the block to branch to if the guard fails; if there is no
1232 // guard, this block is simply unreachable
1233 let guard = match guard {
1234 Guard::If(e) => self.hir.mirror(e),
1235 };
1236 let source_info = self.source_info(guard.span);
1237 let cond = unpack!(block = self.as_local_operand(block, guard));
1238 if autoref {
1239 let guard_frame = self.guard_context.pop().unwrap();
1240 debug!(
1241 "Exiting guard building context with locals: {:?}",
1242 guard_frame
1243 );
1244 }
1245
1246 let false_edge_block = self.cfg.start_new_block();
1247
1248 // We want to ensure that the matched candidates are bound
1249 // after we have confirmed this candidate *and* any
1250 // associated guard; Binding them on `block` is too soon,
1251 // because that would be before we've checked the result
1252 // from the guard.
1253 //
1254 // But binding them on `arm_block` is *too late*, because
1255 // then all of the candidates for a single arm would be
1256 // bound in the same place, that would cause a case like:
1257 //
1258 // ```rust
1259 // match (30, 2) {
1260 // (mut x, 1) | (2, mut x) if { true } => { ... }
1261 // ... // ^^^^^^^ (this is `arm_block`)
1262 // }
1263 // ```
1264 //
1265 // would yield a `arm_block` something like:
1266 //
1267 // ```
1268 // StorageLive(_4); // _4 is `x`
1269 // _4 = &mut (_1.0: i32); // this is handling `(mut x, 1)` case
1270 // _4 = &mut (_1.1: i32); // this is handling `(2, mut x)` case
1271 // ```
1272 //
1273 // and that is clearly not correct.
1274 let post_guard_block = self.cfg.start_new_block();
1275 self.cfg.terminate(
1276 block,
1277 source_info,
1278 TerminatorKind::if_(self.hir.tcx(), cond, post_guard_block, false_edge_block),
1279 );
1280
1281 if autoref {
1282 self.bind_matched_candidate_for_arm_body(post_guard_block, &candidate.bindings);
1283 }
1284
1285 self.cfg.terminate(
1286 post_guard_block,
1287 source_info,
1288 TerminatorKind::Goto { target: arm_block },
1289 );
1290
1291 let otherwise = self.cfg.start_new_block();
1292
1293 self.cfg.terminate(
1294 false_edge_block,
1295 source_info,
1296 TerminatorKind::FalseEdges {
1297 real_target: otherwise,
1298 imaginary_targets: vec![candidate.next_candidate_pre_binding_block],
1299 },
1300 );
1301 Some(otherwise)
1302 } else {
1303 // (Here, it is not too early to bind the matched
1304 // candidate on `block`, because there is no guard result
1305 // that we have to inspect before we bind them.)
1306 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
1307 self.cfg.terminate(
1308 block,
1309 candidate_source_info,
1310 TerminatorKind::Goto { target: arm_block },
1311 );
1312 None
1313 }
1314 }
1315
1316 /// Append `AscribeUserType` statements onto the end of `block`
1317 /// for each ascription
1318 fn ascribe_types<'pat>(
1319 &mut self,
1320 block: BasicBlock,
1321 ascriptions: &[Ascription<'tcx>],
1322 ) {
1323 for ascription in ascriptions {
1324 let source_info = self.source_info(ascription.span);
1325
1326 debug!(
1327 "adding user ascription at span {:?} of place {:?} and {:?}",
1328 source_info.span,
1329 ascription.source,
1330 ascription.user_ty,
1331 );
1332
1333 self.cfg.push(
1334 block,
1335 Statement {
1336 source_info,
1337 kind: StatementKind::AscribeUserType(
1338 ascription.source.clone(),
1339 ty::Variance::Covariant,
1340 box ascription.user_ty.clone().user_ty(),
1341 ),
1342 },
1343 );
1344 }
1345 }
1346
1347 // Only called when all_pat_vars_are_implicit_refs_within_guards,
1348 // and thus all code/comments assume we are in that context.
1349 fn bind_matched_candidate_for_guard(
1350 &mut self,
1351 block: BasicBlock,
1352 pat_index: usize,
1353 bindings: &[Binding<'tcx>],
1354 ) {
1355 debug!(
1356 "bind_matched_candidate_for_guard(block={:?}, pat_index={:?}, bindings={:?})",
1357 block, pat_index, bindings
1358 );
1359
1360 // Assign each of the bindings. Since we are binding for a
1361 // guard expression, this will never trigger moves out of the
1362 // candidate.
1363 let re_empty = self.hir.tcx().types.re_empty;
1364 for binding in bindings {
1365 let source_info = self.source_info(binding.span);
1366
1367 // For each pattern ident P of type T, `ref_for_guard` is
1368 // a reference R: &T pointing to the location matched by
1369 // the pattern, and every occurrence of P within a guard
1370 // denotes *R.
1371 let ref_for_guard =
1372 self.storage_live_binding(block, binding.var_id, binding.span, RefWithinGuard);
1373 // Question: Why schedule drops if bindings are all
1374 // shared-&'s? Answer: Because schedule_drop_for_binding
1375 // also emits StorageDead's for those locals.
1376 self.schedule_drop_for_binding(binding.var_id, binding.span, RefWithinGuard);
1377 match binding.binding_mode {
1378 BindingMode::ByValue => {
1379 let rvalue = Rvalue::Ref(re_empty, BorrowKind::Shared, binding.source.clone());
1380 self.cfg
1381 .push_assign(block, source_info, &ref_for_guard, rvalue);
1382 }
1383 BindingMode::ByRef(region, borrow_kind) => {
1384 // Tricky business: For `ref id` and `ref mut id`
1385 // patterns, we want `id` within the guard to
1386 // correspond to a temp of type `& &T` or `& &mut
1387 // T` (i.e. a "borrow of a borrow") that is
1388 // implicitly dereferenced.
1389 //
1390 // To borrow a borrow, we need that inner borrow
1391 // to point to. So, create a temp for the inner
1392 // borrow, and then take a reference to it.
1393 //
1394 // Note: the temp created here is *not* the one
1395 // used by the arm body itself. This eases
1396 // observing two-phase borrow restrictions.
1397 let val_for_guard = self.storage_live_binding(
1398 block,
1399 binding.var_id,
1400 binding.span,
1401 ValWithinGuard(pat_index),
1402 );
1403 self.schedule_drop_for_binding(
1404 binding.var_id,
1405 binding.span,
1406 ValWithinGuard(pat_index),
1407 );
1408
1409 // rust-lang/rust#27282: We reuse the two-phase
1410 // borrow infrastructure so that the mutable
1411 // borrow (whose mutabilty is *unusable* within
1412 // the guard) does not conflict with the implicit
1413 // borrow of the whole match input. See additional
1414 // discussion on rust-lang/rust#49870.
1415 let borrow_kind = match borrow_kind {
1416 BorrowKind::Shared
1417 | BorrowKind::Shallow
1418 | BorrowKind::Unique => borrow_kind,
1419 BorrowKind::Mut { .. } => BorrowKind::Mut {
1420 allow_two_phase_borrow: true,
1421 },
1422 };
1423 let rvalue = Rvalue::Ref(region, borrow_kind, binding.source.clone());
1424 self.cfg
1425 .push_assign(block, source_info, &val_for_guard, rvalue);
1426 let rvalue = Rvalue::Ref(region, BorrowKind::Shared, val_for_guard);
1427 self.cfg
1428 .push_assign(block, source_info, &ref_for_guard, rvalue);
1429 }
1430 }
1431 }
1432 }
1433
1434 fn bind_matched_candidate_for_arm_body(
1435 &mut self,
1436 block: BasicBlock,
1437 bindings: &[Binding<'tcx>],
1438 ) {
1439 debug!(
1440 "bind_matched_candidate_for_arm_body(block={:?}, bindings={:?}",
1441 block, bindings
1442 );
1443
1444 // Assign each of the bindings. This may trigger moves out of the candidate.
1445 for binding in bindings {
1446 let source_info = self.source_info(binding.span);
1447 let local =
1448 self.storage_live_binding(block, binding.var_id, binding.span, OutsideGuard);
1449 self.schedule_drop_for_binding(binding.var_id, binding.span, OutsideGuard);
1450 let rvalue = match binding.binding_mode {
1451 BindingMode::ByValue => {
1452 Rvalue::Use(self.consume_by_copy_or_move(binding.source.clone()))
1453 }
1454 BindingMode::ByRef(region, borrow_kind) => {
1455 Rvalue::Ref(region, borrow_kind, binding.source.clone())
1456 }
1457 };
1458 self.cfg.push_assign(block, source_info, &local, rvalue);
1459 }
1460 }
1461
1462 /// Each binding (`ref mut var`/`ref var`/`mut var`/`var`, where
1463 /// the bound `var` has type `T` in the arm body) in a pattern
1464 /// maps to `2+N` locals. The first local is a binding for
1465 /// occurrences of `var` in the guard, which will all have type
1466 /// `&T`. The N locals are bindings for the `T` that is referenced
1467 /// by the first local; they are not used outside of the
1468 /// guard. The last local is a binding for occurrences of `var` in
1469 /// the arm body, which will have type `T`.
1470 ///
1471 /// The reason we have N locals rather than just 1 is to
1472 /// accommodate rust-lang/rust#51348: If the arm has N candidate
1473 /// patterns, then in general they can correspond to distinct
1474 /// parts of the matched data, and we want them to be distinct
1475 /// temps in order to simplify checks performed by our internal
1476 /// leveraging of two-phase borrows).
1477 fn declare_binding(
1478 &mut self,
1479 source_info: SourceInfo,
1480 visibility_scope: SourceScope,
1481 mutability: Mutability,
1482 name: Name,
1483 mode: BindingMode,
1484 num_patterns: usize,
1485 var_id: NodeId,
1486 var_ty: Ty<'tcx>,
1487 user_var_ty: &PatternTypeProjections<'tcx>,
1488 has_guard: ArmHasGuard,
1489 opt_match_place: Option<(Option<Place<'tcx>>, Span)>,
1490 pat_span: Span,
1491 ) {
1492 debug!(
1493 "declare_binding(var_id={:?}, name={:?}, mode={:?}, var_ty={:?}, \
1494 visibility_scope={:?}, source_info={:?})",
1495 var_id, name, mode, var_ty, visibility_scope, source_info
1496 );
1497
1498 let tcx = self.hir.tcx();
1499 let binding_mode = match mode {
1500 BindingMode::ByValue => ty::BindingMode::BindByValue(mutability.into()),
1501 BindingMode::ByRef { .. } => ty::BindingMode::BindByReference(mutability.into()),
1502 };
1503 let local = LocalDecl::<'tcx> {
1504 mutability,
1505 ty: var_ty,
1506 user_ty: user_var_ty.clone().user_ty(),
1507 name: Some(name),
1508 source_info,
1509 visibility_scope,
1510 internal: false,
1511 is_block_tail: None,
1512 is_user_variable: Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
1513 binding_mode,
1514 // hypothetically, `visit_bindings` could try to unzip
1515 // an outermost hir::Ty as we descend, matching up
1516 // idents in pat; but complex w/ unclear UI payoff.
1517 // Instead, just abandon providing diagnostic info.
1518 opt_ty_info: None,
1519 opt_match_place,
1520 pat_span,
1521 }))),
1522 };
1523 let for_arm_body = self.local_decls.push(local.clone());
1524 let locals = if has_guard.0 && tcx.all_pat_vars_are_implicit_refs_within_guards() {
1525 let mut vals_for_guard = Vec::with_capacity(num_patterns);
1526 for _ in 0..num_patterns {
1527 let val_for_guard_idx = self.local_decls.push(LocalDecl {
1528 // This variable isn't mutated but has a name, so has to be
1529 // immutable to avoid the unused mut lint.
1530 mutability: Mutability::Not,
1531 ..local.clone()
1532 });
1533 vals_for_guard.push(val_for_guard_idx);
1534 }
1535 let ref_for_guard = self.local_decls.push(LocalDecl::<'tcx> {
1536 // See previous comment.
1537 mutability: Mutability::Not,
1538 ty: tcx.mk_imm_ref(tcx.types.re_empty, var_ty),
1539 user_ty: UserTypeProjections::none(),
1540 name: Some(name),
1541 source_info,
1542 visibility_scope,
1543 // FIXME: should these secretly injected ref_for_guard's be marked as `internal`?
1544 internal: false,
1545 is_block_tail: None,
1546 is_user_variable: Some(ClearCrossCrate::Set(BindingForm::RefForGuard)),
1547 });
1548 LocalsForNode::ForGuard {
1549 vals_for_guard,
1550 ref_for_guard,
1551 for_arm_body,
1552 }
1553 } else {
1554 LocalsForNode::One(for_arm_body)
1555 };
1556 debug!("declare_binding: vars={:?}", locals);
1557 self.var_indices.insert(var_id, locals);
1558 }
1559
1560 // Determine the fake borrows that are needed to ensure that the place
1561 // will evaluate to the same thing until an arm has been chosen.
1562 fn add_fake_borrows<'pat>(
1563 &mut self,
1564 pre_binding_blocks: &[(BasicBlock, Span)],
1565 fake_borrows: FxHashMap<Place<'tcx>, BorrowKind>,
1566 source_info: SourceInfo,
1567 start_block: BasicBlock,
1568 ) {
1569 let tcx = self.hir.tcx();
1570
1571 debug!("add_fake_borrows pre_binding_blocks = {:?}, fake_borrows = {:?}",
1572 pre_binding_blocks, fake_borrows);
1573
1574 let mut all_fake_borrows = Vec::with_capacity(fake_borrows.len());
1575
1576 // Insert a Shallow borrow of the prefixes of any fake borrows.
1577 for (place, borrow_kind) in fake_borrows
1578 {
1579 {
1580 let mut prefix_cursor = &place;
1581 while let Place::Projection(box Projection { base, elem }) = prefix_cursor {
1582 if let ProjectionElem::Deref = elem {
1583 // Insert a shallow borrow after a deref. For other
1584 // projections the borrow of prefix_cursor will
1585 // conflict with any mutation of base.
1586 all_fake_borrows.push((base.clone(), BorrowKind::Shallow));
1587 }
1588 prefix_cursor = base;
1589 }
1590 }
1591
1592 all_fake_borrows.push((place, borrow_kind));
1593 }
1594
1595 // Deduplicate and ensure a deterministic order.
1596 all_fake_borrows.sort();
1597 all_fake_borrows.dedup();
1598
1599 debug!("add_fake_borrows all_fake_borrows = {:?}", all_fake_borrows);
1600
1601 // Add fake borrows to the start of the match and reads of them before
1602 // the start of each arm.
1603 let mut borrowed_input_temps = Vec::with_capacity(all_fake_borrows.len());
1604
1605 for (matched_place, borrow_kind) in all_fake_borrows {
1606 let borrowed_input =
1607 Rvalue::Ref(tcx.types.re_empty, borrow_kind, matched_place.clone());
1608 let borrowed_input_ty = borrowed_input.ty(&self.local_decls, tcx);
1609 let borrowed_input_temp = self.temp(borrowed_input_ty, source_info.span);
1610 self.cfg.push_assign(
1611 start_block,
1612 source_info,
1613 &borrowed_input_temp,
1614 borrowed_input
1615 );
1616 borrowed_input_temps.push(borrowed_input_temp);
1617 }
1618
1619 // FIXME: This could be a lot of reads (#fake borrows * #patterns).
1620 // The false edges that we currently generate would allow us to only do
1621 // this on the last Candidate, but it's possible that there might not be
1622 // so many false edges in the future, so we read for all Candidates for
1623 // now.
1624 // Another option would be to make our own block and add our own false
1625 // edges to it.
1626 if tcx.emit_read_for_match() {
1627 for &(pre_binding_block, span) in pre_binding_blocks {
1628 let pattern_source_info = self.source_info(span);
1629 for temp in &borrowed_input_temps {
1630 self.cfg.push(pre_binding_block, Statement {
1631 source_info: pattern_source_info,
1632 kind: StatementKind::FakeRead(
1633 FakeReadCause::ForMatchGuard,
1634 temp.clone(),
1635 ),
1636 });
1637 }
1638 }
1639 }
1640 }
1641 }