]> git.proxmox.com Git - rustc.git/blame - src/librustc_mir/build/matches/mod.rs
New upstream version 1.28.0+dfsg1
[rustc.git] / src / librustc_mir / build / matches / mod.rs
CommitLineData
e9174d1e
SL
1// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
0531ce1d 11//! Code related to match expressions. These are sufficiently complex
e9174d1e
SL
12//! to warrant their own module and submodules. :) This main module
13//! includes the high-level algorithm, the submodules contain the
14//! details.
15
92a42be0 16use build::{BlockAnd, BlockAndExtension, Builder};
83c7162d 17use build::{GuardFrame, GuardFrameLocal, LocalsForNode};
94b46f34 18use build::ForGuard::{self, OutsideGuard, RefWithinGuard, ValWithinGuard};
476ff2be 19use rustc_data_structures::fx::FxHashMap;
3157f602 20use rustc_data_structures::bitvec::BitVector;
ea8adc8c 21use rustc::ty::{self, Ty};
c30ab7b3 22use rustc::mir::*;
32a655c1 23use rustc::hir;
e9174d1e 24use hair::*;
b039eaaf 25use syntax::ast::{Name, NodeId};
3157f602 26use syntax_pos::Span;
e9174d1e
SL
27
28// helper functions, broken out by category:
29mod simplify;
30mod test;
31mod util;
32
83c7162d
XL
33/// ArmHasGuard is isomorphic to a boolean flag. It indicates whether
34/// a match arm has a guard expression attached to it.
35#[derive(Copy, Clone, Debug)]
36pub(crate) struct ArmHasGuard(pub bool);
37
a7813a04 38impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
e9174d1e 39 pub fn match_expr(&mut self,
ff7c6d11 40 destination: &Place<'tcx>,
b039eaaf 41 span: Span,
e9174d1e 42 mut block: BasicBlock,
b039eaaf
SL
43 discriminant: ExprRef<'tcx>,
44 arms: Vec<Arm<'tcx>>)
45 -> BlockAnd<()> {
94b46f34 46 let tcx = self.hir.tcx();
ff7c6d11
XL
47 let discriminant_place = unpack!(block = self.as_place(block, discriminant));
48
49 // Matching on a `discriminant_place` with an uninhabited type doesn't
50 // generate any memory reads by itself, and so if the place "expression"
51 // contains unsafe operations like raw pointer dereferences or union
52 // field projections, we wouldn't know to require an `unsafe` block
53 // around a `match` equivalent to `std::intrinsics::unreachable()`.
54 // See issue #47412 for this hole being discovered in the wild.
55 //
56 // HACK(eddyb) Work around the above issue by adding a dummy inspection
57 // of `discriminant_place`, specifically by applying `Rvalue::Discriminant`
58 // (which will work regardless of type) and storing the result in a temp.
94b46f34
XL
59 //
60 // NOTE: Under NLL, the above issue should no longer occur because it
61 // injects a borrow of the matched input, which should have the same effect
62 // as eddyb's hack. Once NLL is the default, we can remove the hack.
63
ff7c6d11
XL
64 let dummy_source_info = self.source_info(span);
65 let dummy_access = Rvalue::Discriminant(discriminant_place.clone());
94b46f34 66 let dummy_ty = dummy_access.ty(&self.local_decls, tcx);
ff7c6d11
XL
67 let dummy_temp = self.temp(dummy_ty, dummy_source_info.span);
68 self.cfg.push_assign(block, dummy_source_info, &dummy_temp, dummy_access);
b039eaaf 69
94b46f34
XL
70 let source_info = self.source_info(span);
71 let borrowed_input_temp = if tcx.generate_borrow_of_any_match_input() {
72 // The region is unknown at this point; we rely on NLL
73 // inference to find an appropriate one. Therefore you can
74 // only use this when NLL is turned on.
75 assert!(tcx.use_mir_borrowck());
76 let borrowed_input =
77 Rvalue::Ref(tcx.types.re_empty, BorrowKind::Shared, discriminant_place.clone());
78 let borrowed_input_ty = borrowed_input.ty(&self.local_decls, tcx);
79 let borrowed_input_temp = self.temp(borrowed_input_ty, span);
80 self.cfg.push_assign(block, source_info, &borrowed_input_temp, borrowed_input);
81 Some(borrowed_input_temp)
82 } else {
83 None
84 };
85
b039eaaf
SL
86 let mut arm_blocks = ArmBlocks {
87 blocks: arms.iter()
88 .map(|_| self.cfg.start_new_block())
89 .collect(),
90 };
e9174d1e 91
3157f602
XL
92 // Get the arm bodies and their scopes, while declaring bindings.
93 let arm_bodies: Vec<_> = arms.iter().map(|arm| {
ea8adc8c 94 // BUG: use arm lint level
a7813a04 95 let body = self.hir.mirror(arm.body.clone());
ea8adc8c
XL
96 let scope = self.declare_bindings(None, body.span,
97 LintLevel::Inherited,
83c7162d
XL
98 &arm.patterns[0],
99 ArmHasGuard(arm.guard.is_some()));
94b46f34 100 (body, scope.unwrap_or(self.source_scope))
a7813a04 101 }).collect();
e9174d1e 102
abe05a73
XL
103 // create binding start block for link them by false edges
104 let candidate_count = arms.iter().fold(0, |ac, c| ac + c.patterns.len());
105 let pre_binding_blocks: Vec<_> = (0..candidate_count + 1)
106 .map(|_| self.cfg.start_new_block()).collect();
107
e9174d1e
SL
108 // assemble a list of candidates: there is one candidate per
109 // pattern, which means there may be more than one candidate
110 // *per arm*. These candidates are kept sorted such that the
9cc50fc6
SL
111 // highest priority candidate comes first in the list.
112 // (i.e. same order as in source)
abe05a73 113
92a42be0 114 let candidates: Vec<_> =
b039eaaf
SL
115 arms.iter()
116 .enumerate()
b039eaaf
SL
117 .flat_map(|(arm_index, arm)| {
118 arm.patterns.iter()
92a42be0 119 .map(move |pat| (arm_index, pat, arm.guard.clone()))
e9174d1e 120 })
abe05a73
XL
121 .zip(pre_binding_blocks.iter().zip(pre_binding_blocks.iter().skip(1)))
122 .map(|((arm_index, pattern, guard),
123 (pre_binding_block, next_candidate_pre_binding_block))| {
94b46f34
XL
124
125 if let (true, Some(borrow_temp)) = (tcx.emit_read_for_match(),
126 borrowed_input_temp.clone()) {
127 // inject a fake read of the borrowed input at
128 // the start of each arm's pattern testing
129 // code.
130 //
131 // This should ensure that you cannot change
132 // the variant for an enum while you are in
133 // the midst of matching on it.
134
135 self.cfg.push(*pre_binding_block, Statement {
136 source_info,
137 kind: StatementKind::ReadForMatch(borrow_temp.clone()),
138 });
139 }
140
141 // One might ask: why not build up the match pair such that it
142 // matches via `borrowed_input_temp.deref()` instead of
143 // using the `discriminant_place` directly, as it is doing here?
144 //
145 // The basic answer is that if you do that, then you end up with
146 // accceses to a shared borrow of the input and that conflicts with
147 // any arms that look like e.g.
148 //
149 // match Some(&4) {
150 // ref mut foo => {
151 // ... /* mutate `foo` in arm body */ ...
152 // }
153 // }
154 //
155 // (Perhaps we could further revise the MIR
156 // construction here so that it only does a
157 // shared borrow at the outset and delays doing
158 // the mutable borrow until after the pattern is
159 // matched *and* the guard (if any) for the arm
160 // has been run.)
161
e9174d1e 162 Candidate {
54a0048b 163 span: pattern.span,
ff7c6d11 164 match_pairs: vec![MatchPair::new(discriminant_place.clone(), pattern)],
e9174d1e 165 bindings: vec![],
3b2f2976
XL
166 guard,
167 arm_index,
abe05a73
XL
168 pre_binding_block: *pre_binding_block,
169 next_candidate_pre_binding_block: *next_candidate_pre_binding_block,
e9174d1e
SL
170 }
171 })
172 .collect();
173
abe05a73
XL
174 let outer_source_info = self.source_info(span);
175 self.cfg.terminate(*pre_binding_blocks.last().unwrap(),
176 outer_source_info, TerminatorKind::Unreachable);
177
ff7c6d11 178 // this will generate code to test discriminant_place and
e9174d1e 179 // branch to the appropriate arm block
92a42be0
SL
180 let otherwise = self.match_candidates(span, &mut arm_blocks, candidates, block);
181
92a42be0 182 if !otherwise.is_empty() {
3157f602
XL
183 // All matches are exhaustive. However, because some matches
184 // only have exponentially-large exhaustive decision trees, we
185 // sometimes generate an inexhaustive decision tree.
186 //
187 // In that case, the inexhaustive tips of the decision tree
188 // can't be reached - terminate them with an `unreachable`.
189 let source_info = self.source_info(span);
190
191 let mut otherwise = otherwise;
192 otherwise.sort();
193 otherwise.dedup(); // variant switches can introduce duplicate target blocks
194 for block in otherwise {
195 self.cfg.terminate(block, source_info, TerminatorKind::Unreachable);
196 }
92a42be0 197 }
e9174d1e
SL
198
199 // all the arm blocks will rejoin here
200 let end_block = self.cfg.start_new_block();
201
3157f602 202 let outer_source_info = self.source_info(span);
94b46f34 203 for (arm_index, (body, source_scope)) in arm_bodies.into_iter().enumerate() {
b039eaaf 204 let mut arm_block = arm_blocks.blocks[arm_index];
94b46f34
XL
205 // Re-enter the source scope we created the bindings in.
206 self.source_scope = source_scope;
a7813a04 207 unpack!(arm_block = self.into(destination, arm_block, body));
3157f602 208 self.cfg.terminate(arm_block, outer_source_info,
54a0048b 209 TerminatorKind::Goto { target: end_block });
e9174d1e 210 }
94b46f34 211 self.source_scope = outer_source_info.scope;
e9174d1e
SL
212
213 end_block.unit()
214 }
215
0531ce1d
XL
216 pub fn user_assert_ty(&mut self, block: BasicBlock, hir_id: hir::HirId,
217 var: NodeId, span: Span) {
218 if self.hir.tcx().sess.opts.debugging_opts.disable_nll_user_type_assert { return; }
219
83c7162d 220 let local_id = self.var_local_id(var, OutsideGuard);
0531ce1d
XL
221 let source_info = self.source_info(span);
222
223 debug!("user_assert_ty: local_id={:?}", hir_id.local_id);
224 if let Some(c_ty) = self.hir.tables.user_provided_tys().get(hir_id) {
225 debug!("user_assert_ty: c_ty={:?}", c_ty);
226 self.cfg.push(block, Statement {
227 source_info,
228 kind: StatementKind::UserAssertTy(*c_ty, local_id),
229 });
230 }
231 }
232
e9174d1e
SL
233 pub fn expr_into_pattern(&mut self,
234 mut block: BasicBlock,
0531ce1d 235 ty: Option<hir::HirId>,
92a42be0 236 irrefutable_pat: Pattern<'tcx>,
b039eaaf
SL
237 initializer: ExprRef<'tcx>)
238 -> BlockAnd<()> {
e9174d1e 239 // optimize the case of `let x = ...`
92a42be0 240 match *irrefutable_pat.kind {
3157f602 241 PatternKind::Binding { mode: BindingMode::ByValue,
e9174d1e 242 var,
3157f602 243 subpattern: None, .. } => {
83c7162d
XL
244 let place = self.storage_live_binding(block, var, irrefutable_pat.span,
245 OutsideGuard);
0531ce1d
XL
246
247 if let Some(ty) = ty {
248 self.user_assert_ty(block, ty, var, irrefutable_pat.span);
249 }
250
ff7c6d11 251 unpack!(block = self.into(&place, block, initializer));
83c7162d 252 self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
8bb4bdeb
XL
253 block.unit()
254 }
255 _ => {
ff7c6d11
XL
256 let place = unpack!(block = self.as_place(block, initializer));
257 self.place_into_pattern(block, irrefutable_pat, &place)
e9174d1e 258 }
e9174d1e 259 }
e9174d1e
SL
260 }
261
ff7c6d11 262 pub fn place_into_pattern(&mut self,
e9174d1e 263 mut block: BasicBlock,
92a42be0 264 irrefutable_pat: Pattern<'tcx>,
ff7c6d11 265 initializer: &Place<'tcx>)
b039eaaf 266 -> BlockAnd<()> {
e9174d1e 267 // create a dummy candidate
92a42be0 268 let mut candidate = Candidate {
54a0048b 269 span: irrefutable_pat.span,
92a42be0 270 match_pairs: vec![MatchPair::new(initializer.clone(), &irrefutable_pat)],
e9174d1e
SL
271 bindings: vec![],
272 guard: None,
abe05a73
XL
273
274 // since we don't call `match_candidates`, next fields is unused
275 arm_index: 0,
276 pre_binding_block: block,
277 next_candidate_pre_binding_block: block
e9174d1e
SL
278 };
279
280 // Simplify the candidate. Since the pattern is irrefutable, this should
281 // always convert all match-pairs into bindings.
282 unpack!(block = self.simplify_candidate(block, &mut candidate));
283
284 if !candidate.match_pairs.is_empty() {
54a0048b
SL
285 span_bug!(candidate.match_pairs[0].pattern.span,
286 "match pairs {:?} remaining after simplifying \
287 irrefutable pattern",
288 candidate.match_pairs);
e9174d1e
SL
289 }
290
291 // now apply the bindings, which will also declare the variables
94b46f34 292 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
e9174d1e
SL
293
294 block.unit()
295 }
296
3157f602
XL
297 /// Declares the bindings of the given pattern and returns the visibility scope
298 /// for the bindings in this patterns, if such a scope had to be created.
299 /// NOTE: Declaring the bindings should always be done in their drop scope.
300 pub fn declare_bindings(&mut self,
94b46f34 301 mut visibility_scope: Option<SourceScope>,
3157f602 302 scope_span: Span,
ea8adc8c 303 lint_level: LintLevel,
83c7162d
XL
304 pattern: &Pattern<'tcx>,
305 has_guard: ArmHasGuard)
94b46f34
XL
306 -> Option<SourceScope> {
307 assert!(!(visibility_scope.is_some() && lint_level.is_explicit()),
308 "can't have both a visibility and a lint scope at the same time");
309 let mut scope = self.source_scope;
310 self.visit_bindings(pattern, &mut |this, mutability, name, mode, var, span, ty| {
311 if visibility_scope.is_none() {
312 visibility_scope = Some(this.new_source_scope(scope_span,
ea8adc8c
XL
313 LintLevel::Inherited,
314 None));
94b46f34 315 // If we have lints, create a new source scope
ff7c6d11 316 // that marks the lints for the locals. See the comment
94b46f34 317 // on the `source_info` field for why this is needed.
ea8adc8c 318 if lint_level.is_explicit() {
94b46f34
XL
319 scope =
320 this.new_source_scope(scope_span, lint_level, None);
ea8adc8c 321 }
e9174d1e 322 }
8bb4bdeb 323 let source_info = SourceInfo {
3b2f2976 324 span,
94b46f34 325 scope,
8bb4bdeb 326 };
94b46f34
XL
327 let visibility_scope = visibility_scope.unwrap();
328 this.declare_binding(source_info, visibility_scope, mutability, name, mode, var,
83c7162d 329 ty, has_guard);
8bb4bdeb 330 });
94b46f34 331 visibility_scope
e9174d1e 332 }
5bcae85e 333
83c7162d
XL
334 pub fn storage_live_binding(&mut self,
335 block: BasicBlock,
336 var: NodeId,
337 span: Span,
338 for_guard: ForGuard)
ff7c6d11 339 -> Place<'tcx>
8bb4bdeb 340 {
83c7162d 341 let local_id = self.var_local_id(var, for_guard);
8bb4bdeb
XL
342 let source_info = self.source_info(span);
343 self.cfg.push(block, Statement {
3b2f2976 344 source_info,
ea8adc8c 345 kind: StatementKind::StorageLive(local_id)
8bb4bdeb 346 });
ff7c6d11 347 Place::Local(local_id)
8bb4bdeb 348 }
5bcae85e 349
83c7162d
XL
350 pub fn schedule_drop_for_binding(&mut self,
351 var: NodeId,
352 span: Span,
353 for_guard: ForGuard) {
354 let local_id = self.var_local_id(var, for_guard);
8bb4bdeb 355 let var_ty = self.local_decls[local_id].ty;
ea8adc8c
XL
356 let hir_id = self.hir.tcx().hir.node_to_hir_id(var);
357 let region_scope = self.hir.region_scope_tree.var_scope(hir_id.local_id);
ff7c6d11 358 self.schedule_drop(span, region_scope, &Place::Local(local_id), var_ty);
8bb4bdeb
XL
359 }
360
3b2f2976 361 pub fn visit_bindings<F>(&mut self, pattern: &Pattern<'tcx>, f: &mut F)
94b46f34 362 where F: FnMut(&mut Self, Mutability, Name, BindingMode, NodeId, Span, Ty<'tcx>)
8bb4bdeb
XL
363 {
364 match *pattern.kind {
94b46f34
XL
365 PatternKind::Binding { mutability, name, mode, var, ty, ref subpattern, .. } => {
366 f(self, mutability, name, mode, var, pattern.span, ty);
5bcae85e 367 if let Some(subpattern) = subpattern.as_ref() {
8bb4bdeb 368 self.visit_bindings(subpattern, f);
5bcae85e
SL
369 }
370 }
371 PatternKind::Array { ref prefix, ref slice, ref suffix } |
372 PatternKind::Slice { ref prefix, ref slice, ref suffix } => {
373 for subpattern in prefix.iter().chain(slice).chain(suffix) {
8bb4bdeb 374 self.visit_bindings(subpattern, f);
5bcae85e
SL
375 }
376 }
377 PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {
378 }
379 PatternKind::Deref { ref subpattern } => {
8bb4bdeb 380 self.visit_bindings(subpattern, f);
5bcae85e
SL
381 }
382 PatternKind::Leaf { ref subpatterns } |
383 PatternKind::Variant { ref subpatterns, .. } => {
384 for subpattern in subpatterns {
8bb4bdeb 385 self.visit_bindings(&subpattern.pattern, f);
5bcae85e
SL
386 }
387 }
388 }
389 }
e9174d1e
SL
390}
391
8bb4bdeb 392
b039eaaf
SL
393/// List of blocks for each arm (and potentially other metadata in the
394/// future).
395struct ArmBlocks {
396 blocks: Vec<BasicBlock>,
397}
398
e9174d1e 399#[derive(Clone, Debug)]
9cc50fc6 400pub struct Candidate<'pat, 'tcx:'pat> {
54a0048b
SL
401 // span of the original pattern that gave rise to this candidate
402 span: Span,
403
e9174d1e 404 // all of these must be satisfied...
92a42be0 405 match_pairs: Vec<MatchPair<'pat, 'tcx>>,
e9174d1e
SL
406
407 // ...these bindings established...
b039eaaf 408 bindings: Vec<Binding<'tcx>>,
e9174d1e
SL
409
410 // ...and the guard must be evaluated...
b039eaaf 411 guard: Option<ExprRef<'tcx>>,
e9174d1e 412
b039eaaf
SL
413 // ...and then we branch to arm with this index.
414 arm_index: usize,
abe05a73
XL
415
416 // ...and the blocks for add false edges between candidates
417 pre_binding_block: BasicBlock,
418 next_candidate_pre_binding_block: BasicBlock,
e9174d1e
SL
419}
420
421#[derive(Clone, Debug)]
b039eaaf
SL
422struct Binding<'tcx> {
423 span: Span,
ff7c6d11 424 source: Place<'tcx>,
b039eaaf
SL
425 name: Name,
426 var_id: NodeId,
427 var_ty: Ty<'tcx>,
e9174d1e 428 mutability: Mutability,
9e0c209e 429 binding_mode: BindingMode<'tcx>,
e9174d1e
SL
430}
431
432#[derive(Clone, Debug)]
9cc50fc6 433pub struct MatchPair<'pat, 'tcx:'pat> {
ff7c6d11
XL
434 // this place...
435 place: Place<'tcx>,
e9174d1e
SL
436
437 // ... must match this pattern.
92a42be0 438 pattern: &'pat Pattern<'tcx>,
54a0048b
SL
439
440 // HACK(eddyb) This is used to toggle whether a Slice pattern
441 // has had its length checked. This is only necessary because
442 // the "rest" part of the pattern right now has type &[T] and
443 // as such, it requires an Rvalue::Slice to be generated.
444 // See RFC 495 / issue #23121 for the eventual (proper) solution.
445 slice_len_checked: bool
e9174d1e
SL
446}
447
448#[derive(Clone, Debug, PartialEq)]
b039eaaf 449enum TestKind<'tcx> {
e9174d1e 450 // test the branches of enum
b039eaaf 451 Switch {
ea8adc8c 452 adt_def: &'tcx ty::AdtDef,
3157f602 453 variants: BitVector,
b039eaaf 454 },
e9174d1e 455
92a42be0
SL
456 // test the branches of enum
457 SwitchInt {
458 switch_ty: Ty<'tcx>,
0531ce1d 459 options: Vec<u128>,
ea8adc8c 460 indices: FxHashMap<&'tcx ty::Const<'tcx>, usize>,
92a42be0
SL
461 },
462
e9174d1e 463 // test for equality
b039eaaf 464 Eq {
ea8adc8c 465 value: &'tcx ty::Const<'tcx>,
b039eaaf
SL
466 ty: Ty<'tcx>,
467 },
e9174d1e 468
32a655c1 469 // test whether the value falls within an inclusive or exclusive range
b039eaaf
SL
470 Range {
471 lo: Literal<'tcx>,
472 hi: Literal<'tcx>,
473 ty: Ty<'tcx>,
32a655c1 474 end: hir::RangeEnd,
b039eaaf 475 },
e9174d1e
SL
476
477 // test length of the slice is equal to len
b039eaaf 478 Len {
54a0048b 479 len: u64,
b039eaaf
SL
480 op: BinOp,
481 },
e9174d1e
SL
482}
483
484#[derive(Debug)]
9cc50fc6 485pub struct Test<'tcx> {
b039eaaf
SL
486 span: Span,
487 kind: TestKind<'tcx>,
e9174d1e
SL
488}
489
490///////////////////////////////////////////////////////////////////////////
491// Main matching algorithm
492
a7813a04 493impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
92a42be0
SL
494 /// The main match algorithm. It begins with a set of candidates
495 /// `candidates` and has the job of generating code to determine
496 /// which of these candidates, if any, is the correct one. The
9cc50fc6
SL
497 /// candidates are sorted such that the first item in the list
498 /// has the highest priority. When a candidate is found to match
499 /// the value, we will generate a branch to the appropriate
92a42be0
SL
500 /// block found in `arm_blocks`.
501 ///
502 /// The return value is a list of "otherwise" blocks. These are
503 /// points in execution where we found that *NONE* of the
504 /// candidates apply. In principle, this means that the input
505 /// list was not exhaustive, though at present we sometimes are
506 /// not smart enough to recognize all exhaustive inputs.
507 ///
508 /// It might be surprising that the input can be inexhaustive.
509 /// Indeed, initially, it is not, because all matches are
510 /// exhaustive in Rust. But during processing we sometimes divide
511 /// up the list of candidates and recurse with a non-exhaustive
512 /// list. This is important to keep the size of the generated code
513 /// under control. See `test_candidates` for more details.
514 fn match_candidates<'pat>(&mut self,
515 span: Span,
516 arm_blocks: &mut ArmBlocks,
517 mut candidates: Vec<Candidate<'pat, 'tcx>>,
518 mut block: BasicBlock)
519 -> Vec<BasicBlock>
e9174d1e 520 {
b039eaaf
SL
521 debug!("matched_candidate(span={:?}, block={:?}, candidates={:?})",
522 span, block, candidates);
e9174d1e
SL
523
524 // Start by simplifying candidates. Once this process is
525 // complete, all the match pairs which remain require some
526 // form of test, whether it be a switch or pattern comparison.
527 for candidate in &mut candidates {
528 unpack!(block = self.simplify_candidate(block, candidate));
529 }
530
9cc50fc6
SL
531 // The candidates are sorted by priority. Check to see
532 // whether the higher priority candidates (and hence at
533 // the front of the vec) have satisfied all their match
e9174d1e
SL
534 // pairs.
535 let fully_matched =
9cc50fc6 536 candidates.iter().take_while(|c| c.match_pairs.is_empty()).count();
e9174d1e 537 debug!("match_candidates: {:?} candidates fully matched", fully_matched);
9cc50fc6 538 let mut unmatched_candidates = candidates.split_off(fully_matched);
abe05a73
XL
539
540 let fully_matched_with_guard =
541 candidates.iter().take_while(|c| c.guard.is_some()).count();
542
543 let unreachable_candidates = if fully_matched_with_guard + 1 < candidates.len() {
544 candidates.split_off(fully_matched_with_guard + 1)
545 } else {
546 vec![]
547 };
548
9cc50fc6 549 for candidate in candidates {
e9174d1e
SL
550 // If so, apply any bindings, test the guard (if any), and
551 // branch to the arm.
b039eaaf
SL
552 if let Some(b) = self.bind_and_guard_matched_candidate(block, arm_blocks, candidate) {
553 block = b;
554 } else {
555 // if None is returned, then any remaining candidates
556 // are unreachable (at least not through this path).
abe05a73
XL
557 // Link them with false edges.
558 debug!("match_candidates: add false edges for unreachable {:?} and unmatched {:?}",
559 unreachable_candidates, unmatched_candidates);
560 for candidate in unreachable_candidates {
561 let source_info = self.source_info(candidate.span);
562 let target = self.cfg.start_new_block();
563 if let Some(otherwise) = self.bind_and_guard_matched_candidate(target,
564 arm_blocks,
565 candidate) {
566 self.cfg.terminate(otherwise, source_info, TerminatorKind::Unreachable);
567 }
568 }
569
570 if unmatched_candidates.is_empty() {
571 return vec![]
572 } else {
573 let target = self.cfg.start_new_block();
574 return self.match_candidates(span, arm_blocks, unmatched_candidates, target);
575 }
e9174d1e
SL
576 }
577 }
578
579 // If there are no candidates that still need testing, we're done.
580 // Since all matches are exhaustive, execution should never reach this point.
9cc50fc6 581 if unmatched_candidates.is_empty() {
92a42be0
SL
582 return vec![block];
583 }
584
585 // Test candidates where possible.
586 let (otherwise, tested_candidates) =
9cc50fc6 587 self.test_candidates(span, arm_blocks, &unmatched_candidates, block);
92a42be0
SL
588
589 // If the target candidates were exhaustive, then we are done.
abe05a73 590 // But for borrowck continue build decision tree.
92a42be0
SL
591
592 // If all candidates were sorted into `target_candidates` somewhere, then
593 // the initial set was inexhaustive.
9cc50fc6
SL
594 let untested_candidates = unmatched_candidates.split_off(tested_candidates);
595 if untested_candidates.len() == 0 {
92a42be0
SL
596 return otherwise;
597 }
598
599 // Otherwise, let's process those remaining candidates.
54a0048b 600 let join_block = self.join_otherwise_blocks(span, otherwise);
9cc50fc6 601 self.match_candidates(span, arm_blocks, untested_candidates, join_block)
92a42be0
SL
602 }
603
604 fn join_otherwise_blocks(&mut self,
54a0048b 605 span: Span,
3157f602 606 mut otherwise: Vec<BasicBlock>)
92a42be0
SL
607 -> BasicBlock
608 {
3157f602
XL
609 let source_info = self.source_info(span);
610 otherwise.sort();
611 otherwise.dedup(); // variant switches can introduce duplicate target blocks
92a42be0
SL
612 if otherwise.len() == 1 {
613 otherwise[0]
614 } else {
615 let join_block = self.cfg.start_new_block();
616 for block in otherwise {
3157f602 617 self.cfg.terminate(block, source_info,
54a0048b 618 TerminatorKind::Goto { target: join_block });
92a42be0
SL
619 }
620 join_block
e9174d1e 621 }
92a42be0 622 }
e9174d1e 623
92a42be0
SL
624 /// This is the most subtle part of the matching algorithm. At
625 /// this point, the input candidates have been fully simplified,
626 /// and so we know that all remaining match-pairs require some
627 /// sort of test. To decide what test to do, we take the highest
628 /// priority candidate (last one in the list) and extract the
629 /// first match-pair from the list. From this we decide what kind
630 /// of test is needed using `test`, defined in the `test` module.
631 ///
632 /// *Note:* taking the first match pair is somewhat arbitrary, and
633 /// we might do better here by choosing more carefully what to
634 /// test.
635 ///
636 /// For example, consider the following possible match-pairs:
637 ///
638 /// 1. `x @ Some(P)` -- we will do a `Switch` to decide what variant `x` has
639 /// 2. `x @ 22` -- we will do a `SwitchInt`
640 /// 3. `x @ 3..5` -- we will do a range test
641 /// 4. etc.
642 ///
643 /// Once we know what sort of test we are going to perform, this
644 /// test may also help us with other candidates. So we walk over
645 /// the candidates (from high to low priority) and check. This
646 /// gives us, for each outcome of the test, a transformed list of
647 /// candidates. For example, if we are testing the current
648 /// variant of `x.0`, and we have a candidate `{x.0 @ Some(v), x.1
649 /// @ 22}`, then we would have a resulting candidate of `{(x.0 as
650 /// Some).0 @ v, x.1 @ 22}`. Note that the first match-pair is now
651 /// simpler (and, in fact, irrefutable).
652 ///
653 /// But there may also be candidates that the test just doesn't
3157f602 654 /// apply to. The classical example involves wildcards:
92a42be0 655 ///
041b39d2
XL
656 /// ```
657 /// # let (x, y, z) = (true, true, true);
3157f602
XL
658 /// match (x, y, z) {
659 /// (true, _, true) => true, // (0)
660 /// (_, true, _) => true, // (1)
661 /// (false, false, _) => false, // (2)
662 /// (true, _, false) => false, // (3)
663 /// }
664 /// ```
665 ///
666 /// In that case, after we test on `x`, there are 2 overlapping candidate
667 /// sets:
668 ///
669 /// - If the outcome is that `x` is true, candidates 0, 1, and 3
670 /// - If the outcome is that `x` is false, candidates 1 and 2
671 ///
672 /// Here, the traditional "decision tree" method would generate 2
673 /// separate code-paths for the 2 separate cases.
674 ///
675 /// In some cases, this duplication can create an exponential amount of
676 /// code. This is most easily seen by noticing that this method terminates
677 /// with precisely the reachable arms being reachable - but that problem
678 /// is trivially NP-complete:
679 ///
680 /// ```rust
681 /// match (var0, var1, var2, var3, ..) {
682 /// (true, _, _, false, true, ...) => false,
683 /// (_, true, true, false, _, ...) => false,
684 /// (false, _, false, false, _, ...) => false,
685 /// ...
686 /// _ => true
687 /// }
688 /// ```
689 ///
690 /// Here the last arm is reachable only if there is an assignment to
691 /// the variables that does not match any of the literals. Therefore,
692 /// compilation would take an exponential amount of time in some cases.
693 ///
694 /// That kind of exponential worst-case might not occur in practice, but
695 /// our simplistic treatment of constants and guards would make it occur
696 /// in very common situations - for example #29740:
697 ///
698 /// ```rust
92a42be0 699 /// match x {
3157f602
XL
700 /// "foo" if foo_guard => ...,
701 /// "bar" if bar_guard => ...,
702 /// "baz" if baz_guard => ...,
703 /// ...
92a42be0
SL
704 /// }
705 /// ```
706 ///
3157f602
XL
707 /// Here we first test the match-pair `x @ "foo"`, which is an `Eq` test.
708 ///
709 /// It might seem that we would end up with 2 disjoint candidate
710 /// sets, consisting of the first candidate or the other 3, but our
711 /// algorithm doesn't reason about "foo" being distinct from the other
712 /// constants; it considers the latter arms to potentially match after
713 /// both outcomes, which obviously leads to an exponential amount
714 /// of tests.
92a42be0 715 ///
3157f602
XL
716 /// To avoid these kinds of problems, our algorithm tries to ensure
717 /// the amount of generated tests is linear. When we do a k-way test,
718 /// we return an additional "unmatched" set alongside the obvious `k`
719 /// sets. When we encounter a candidate that would be present in more
720 /// than one of the sets, we put it and all candidates below it into the
721 /// "unmatched" set. This ensures these `k+1` sets are disjoint.
92a42be0 722 ///
3157f602
XL
723 /// After we perform our test, we branch into the appropriate candidate
724 /// set and recurse with `match_candidates`. These sub-matches are
725 /// obviously inexhaustive - as we discarded our otherwise set - so
726 /// we set their continuation to do `match_candidates` on the
727 /// "unmatched" set (which is again inexhaustive).
92a42be0
SL
728 ///
729 /// If you apply this to the above test, you basically wind up
730 /// with an if-else-if chain, testing each candidate in turn,
731 /// which is precisely what we want.
3157f602
XL
732 ///
733 /// In addition to avoiding exponential-time blowups, this algorithm
734 /// also has nice property that each guard and arm is only generated
735 /// once.
92a42be0
SL
736 fn test_candidates<'pat>(&mut self,
737 span: Span,
738 arm_blocks: &mut ArmBlocks,
739 candidates: &[Candidate<'pat, 'tcx>],
740 block: BasicBlock)
741 -> (Vec<BasicBlock>, usize)
742 {
743 // extract the match-pair from the highest priority candidate
9cc50fc6 744 let match_pair = &candidates.first().unwrap().match_pairs[0];
92a42be0
SL
745 let mut test = self.test(match_pair);
746
747 // most of the time, the test to perform is simply a function
748 // of the main candidate; but for a test like SwitchInt, we
749 // may want to add cases based on the candidates that are
750 // available
751 match test.kind {
752 TestKind::SwitchInt { switch_ty, ref mut options, ref mut indices } => {
9cc50fc6 753 for candidate in candidates.iter() {
ff7c6d11 754 if !self.add_cases_to_switch(&match_pair.place,
92a42be0
SL
755 candidate,
756 switch_ty,
757 options,
758 indices) {
759 break;
760 }
761 }
762 }
3157f602
XL
763 TestKind::Switch { adt_def: _, ref mut variants} => {
764 for candidate in candidates.iter() {
ff7c6d11 765 if !self.add_variants_to_switch(&match_pair.place,
3157f602
XL
766 candidate,
767 variants) {
768 break;
769 }
770 }
771 }
92a42be0
SL
772 _ => { }
773 }
774
775 // perform the test, branching to one of N blocks. For each of
776 // those N possible outcomes, create a (initially empty)
777 // vector of candidates. Those are the candidates that still
778 // apply if the test has that particular outcome.
e9174d1e 779 debug!("match_candidates: test={:?} match_pair={:?}", test, match_pair);
ff7c6d11 780 let target_blocks = self.perform_test(block, &match_pair.place, &test);
92a42be0
SL
781 let mut target_candidates: Vec<_> = (0..target_blocks.len()).map(|_| vec![]).collect();
782
783 // Sort the candidates into the appropriate vector in
784 // `target_candidates`. Note that at some point we may
785 // encounter a candidate where the test is not relevant; at
786 // that point, we stop sorting.
787 let tested_candidates =
788 candidates.iter()
ff7c6d11 789 .take_while(|c| self.sort_candidate(&match_pair.place,
92a42be0
SL
790 &test,
791 c,
792 &mut target_candidates))
793 .count();
794 assert!(tested_candidates > 0); // at least the last candidate ought to be tested
3157f602
XL
795 debug!("tested_candidates: {}", tested_candidates);
796 debug!("untested_candidates: {}", candidates.len() - tested_candidates);
92a42be0
SL
797
798 // For each outcome of test, process the candidates that still
799 // apply. Collect a list of blocks where control flow will
800 // branch if one of the `target_candidate` sets is not
801 // exhaustive.
802 let otherwise: Vec<_> =
803 target_blocks.into_iter()
804 .zip(target_candidates)
805 .flat_map(|(target_block, target_candidates)| {
806 self.match_candidates(span,
807 arm_blocks,
808 target_candidates,
809 target_block)
810 })
811 .collect();
812
813 (otherwise, tested_candidates)
e9174d1e
SL
814 }
815
816 /// Initializes each of the bindings from the candidate by
817 /// moving/copying/ref'ing the source as appropriate. Tests the
818 /// guard, if any, and then branches to the arm. Returns the block
819 /// for the case where the guard fails.
820 ///
821 /// Note: we check earlier that if there is a guard, there cannot
822 /// be move bindings. This isn't really important for the
823 /// self-consistency of this fn, but the reason for it should be
824 /// clear: after we've done the assignments, if there were move
825 /// bindings, further tests would be a use-after-move (which would
826 /// in turn be detected by the borrowck code that runs on the
827 /// MIR).
92a42be0
SL
828 fn bind_and_guard_matched_candidate<'pat>(&mut self,
829 mut block: BasicBlock,
830 arm_blocks: &mut ArmBlocks,
831 candidate: Candidate<'pat, 'tcx>)
832 -> Option<BasicBlock> {
b039eaaf
SL
833 debug!("bind_and_guard_matched_candidate(block={:?}, candidate={:?})",
834 block, candidate);
e9174d1e
SL
835
836 debug_assert!(candidate.match_pairs.is_empty());
837
b039eaaf 838 let arm_block = arm_blocks.blocks[candidate.arm_index];
abe05a73
XL
839 let candidate_source_info = self.source_info(candidate.span);
840
841 self.cfg.terminate(block, candidate_source_info,
842 TerminatorKind::Goto { target: candidate.pre_binding_block });
843
844 block = self.cfg.start_new_block();
845 self.cfg.terminate(candidate.pre_binding_block, candidate_source_info,
846 TerminatorKind::FalseEdges {
847 real_target: block,
848 imaginary_targets:
2c00a5a8
XL
849 vec![candidate.next_candidate_pre_binding_block],
850 });
abe05a73 851
e9174d1e 852
83c7162d
XL
853 // rust-lang/rust#27282: The `autoref` business deserves some
854 // explanation here.
855 //
856 // The intent of the `autoref` flag is that when it is true,
857 // then any pattern bindings of type T will map to a `&T`
858 // within the context of the guard expression, but will
859 // continue to map to a `T` in the context of the arm body. To
860 // avoid surfacing this distinction in the user source code
861 // (which would be a severe change to the language and require
862 // far more revision to the compiler), when `autoref` is true,
863 // then any occurrence of the identifier in the guard
864 // expression will automatically get a deref op applied to it.
865 //
866 // So an input like:
867 //
868 // ```
869 // let place = Foo::new();
870 // match place { foo if inspect(foo)
871 // => feed(foo), ... }
872 // ```
873 //
874 // will be treated as if it were really something like:
875 //
876 // ```
877 // let place = Foo::new();
878 // match place { Foo { .. } if { let tmp1 = &place; inspect(*tmp1) }
879 // => { let tmp2 = place; feed(tmp2) }, ... }
880 //
881 // And an input like:
882 //
883 // ```
884 // let place = Foo::new();
885 // match place { ref mut foo if inspect(foo)
886 // => feed(foo), ... }
887 // ```
888 //
889 // will be treated as if it were really something like:
890 //
891 // ```
892 // let place = Foo::new();
893 // match place { Foo { .. } if { let tmp1 = & &mut place; inspect(*tmp1) }
894 // => { let tmp2 = &mut place; feed(tmp2) }, ... }
895 // ```
896 //
897 // In short, any pattern binding will always look like *some*
898 // kind of `&T` within the guard at least in terms of how the
899 // MIR-borrowck views it, and this will ensure that guard
900 // expressions cannot mutate their the match inputs via such
901 // bindings. (It also ensures that guard expressions can at
902 // most *copy* values from such bindings; non-Copy things
903 // cannot be moved via pattern bindings in guard expressions.)
904 //
905 // ----
906 //
907 // Implementation notes (under assumption `autoref` is true).
908 //
909 // To encode the distinction above, we must inject the
910 // temporaries `tmp1` and `tmp2`.
911 //
912 // There are two cases of interest: binding by-value, and binding by-ref.
913 //
914 // 1. Binding by-value: Things are simple.
915 //
916 // * Establishing `tmp1` creates a reference into the
917 // matched place. This code is emitted by
918 // bind_matched_candidate_for_guard.
919 //
920 // * `tmp2` is only initialized "lazily", after we have
921 // checked the guard. Thus, the code that can trigger
922 // moves out of the candidate can only fire after the
923 // guard evaluated to true. This initialization code is
924 // emitted by bind_matched_candidate_for_arm.
925 //
926 // 2. Binding by-reference: Things are tricky.
927 //
928 // * Here, the guard expression wants a `&&` or `&&mut`
929 // into the original input. This means we need to borrow
930 // a reference that we do not immediately have at hand
931 // (because all we have is the places associated with the
932 // match input itself; it is up to us to create a place
933 // holding a `&` or `&mut` that we can then borrow).
83c7162d
XL
934
935 let autoref = self.hir.tcx().all_pat_vars_are_implicit_refs_within_guards();
e9174d1e 936 if let Some(guard) = candidate.guard {
83c7162d
XL
937 if autoref {
938 self.bind_matched_candidate_for_guard(block, &candidate.bindings);
939 let guard_frame = GuardFrame {
940 locals: candidate.bindings.iter()
941 .map(|b| GuardFrameLocal::new(b.var_id, b.binding_mode))
942 .collect(),
943 };
94b46f34 944 debug!("Entering guard building context: {:?}", guard_frame);
83c7162d
XL
945 self.guard_context.push(guard_frame);
946 } else {
94b46f34 947 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
83c7162d
XL
948 }
949
e9174d1e
SL
950 // the block to branch to if the guard fails; if there is no
951 // guard, this block is simply unreachable
54a0048b 952 let guard = self.hir.mirror(guard);
3157f602 953 let source_info = self.source_info(guard.span);
8bb4bdeb 954 let cond = unpack!(block = self.as_local_operand(block, guard));
83c7162d
XL
955 if autoref {
956 let guard_frame = self.guard_context.pop().unwrap();
94b46f34 957 debug!("Exiting guard building context with locals: {:?}", guard_frame);
83c7162d 958 }
abe05a73
XL
959
960 let false_edge_block = self.cfg.start_new_block();
94b46f34
XL
961
962 // We want to ensure that the matched candidates are bound
963 // after we have confirmed this candidate *and* any
964 // associated guard; Binding them on `block` is too soon,
965 // because that would be before we've checked the result
966 // from the guard.
967 //
968 // But binding them on `arm_block` is *too late*, because
969 // then all of the candidates for a single arm would be
970 // bound in the same place, that would cause a case like:
971 //
972 // ```rust
973 // match (30, 2) {
974 // (mut x, 1) | (2, mut x) if { true } => { ... }
975 // ... // ^^^^^^^ (this is `arm_block`)
976 // }
977 // ```
978 //
979 // would yield a `arm_block` something like:
980 //
981 // ```
982 // StorageLive(_4); // _4 is `x`
983 // _4 = &mut (_1.0: i32); // this is handling `(mut x, 1)` case
984 // _4 = &mut (_1.1: i32); // this is handling `(2, mut x)` case
985 // ```
986 //
987 // and that is clearly not correct.
988 let post_guard_block = self.cfg.start_new_block();
3157f602 989 self.cfg.terminate(block, source_info,
94b46f34
XL
990 TerminatorKind::if_(self.hir.tcx(), cond, post_guard_block,
991 false_edge_block));
abe05a73 992
83c7162d 993 if autoref {
94b46f34 994 self.bind_matched_candidate_for_arm_body(post_guard_block, &candidate.bindings);
83c7162d 995 }
94b46f34
XL
996
997 self.cfg.terminate(post_guard_block, source_info,
998 TerminatorKind::Goto { target: arm_block });
999
1000 let otherwise = self.cfg.start_new_block();
1001
abe05a73
XL
1002 self.cfg.terminate(false_edge_block, source_info,
1003 TerminatorKind::FalseEdges {
1004 real_target: otherwise,
1005 imaginary_targets:
2c00a5a8
XL
1006 vec![candidate.next_candidate_pre_binding_block],
1007 });
e9174d1e
SL
1008 Some(otherwise)
1009 } else {
94b46f34
XL
1010 // (Here, it is not too early to bind the matched
1011 // candidate on `block`, because there is no guard result
1012 // that we have to inspect before we bind them.)
1013 self.bind_matched_candidate_for_arm_body(block, &candidate.bindings);
abe05a73 1014 self.cfg.terminate(block, candidate_source_info,
54a0048b 1015 TerminatorKind::Goto { target: arm_block });
e9174d1e
SL
1016 None
1017 }
1018 }
1019
94b46f34
XL
1020 // Only called when all_pat_vars_are_implicit_refs_within_guards,
1021 // and thus all code/comments assume we are in that context.
83c7162d
XL
1022 fn bind_matched_candidate_for_guard(&mut self,
1023 block: BasicBlock,
1024 bindings: &[Binding<'tcx>]) {
1025 debug!("bind_matched_candidate_for_guard(block={:?}, bindings={:?})",
b039eaaf 1026 block, bindings);
e9174d1e 1027
83c7162d
XL
1028 // Assign each of the bindings. Since we are binding for a
1029 // guard expression, this will never trigger moves out of the
1030 // candidate.
1031 let re_empty = self.hir.tcx().types.re_empty;
1032 for binding in bindings {
1033 let source_info = self.source_info(binding.span);
94b46f34
XL
1034
1035 // For each pattern ident P of type T, `ref_for_guard` is
1036 // a reference R: &T pointing to the location matched by
1037 // the pattern, and every occurrence of P within a guard
1038 // denotes *R.
1039 let ref_for_guard = self.storage_live_binding(
1040 block, binding.var_id, binding.span, RefWithinGuard);
83c7162d
XL
1041 // Question: Why schedule drops if bindings are all
1042 // shared-&'s? Answer: Because schedule_drop_for_binding
1043 // also emits StorageDead's for those locals.
94b46f34 1044 self.schedule_drop_for_binding(binding.var_id, binding.span, RefWithinGuard);
83c7162d
XL
1045 match binding.binding_mode {
1046 BindingMode::ByValue => {
1047 let rvalue = Rvalue::Ref(re_empty, BorrowKind::Shared, binding.source.clone());
94b46f34 1048 self.cfg.push_assign(block, source_info, &ref_for_guard, rvalue);
83c7162d
XL
1049 }
1050 BindingMode::ByRef(region, borrow_kind) => {
94b46f34
XL
1051 // Tricky business: For `ref id` and `ref mut id`
1052 // patterns, we want `id` within the guard to
83c7162d 1053 // correspond to a temp of type `& &T` or `& &mut
94b46f34
XL
1054 // T` (i.e. a "borrow of a borrow") that is
1055 // implicitly dereferenced.
83c7162d 1056 //
94b46f34
XL
1057 // To borrow a borrow, we need that inner borrow
1058 // to point to. So, create a temp for the inner
1059 // borrow, and then take a reference to it.
83c7162d 1060 //
94b46f34
XL
1061 // Note: the temp created here is *not* the one
1062 // used by the arm body itself. This eases
1063 // observing two-phase borrow restrictions.
1064 let val_for_guard = self.storage_live_binding(
1065 block, binding.var_id, binding.span, ValWithinGuard);
1066 self.schedule_drop_for_binding(binding.var_id, binding.span, ValWithinGuard);
1067
1068 // rust-lang/rust#27282: We reuse the two-phase
1069 // borrow infrastructure so that the mutable
1070 // borrow (whose mutabilty is *unusable* within
1071 // the guard) does not conflict with the implicit
1072 // borrow of the whole match input. See additional
1073 // discussion on rust-lang/rust#49870.
1074 let borrow_kind = match borrow_kind {
1075 BorrowKind::Shared | BorrowKind::Unique => borrow_kind,
1076 BorrowKind::Mut { .. } => BorrowKind::Mut { allow_two_phase_borrow: true },
1077 };
83c7162d 1078 let rvalue = Rvalue::Ref(region, borrow_kind, binding.source.clone());
94b46f34
XL
1079 self.cfg.push_assign(block, source_info, &val_for_guard, rvalue);
1080 let rvalue = Rvalue::Ref(region, BorrowKind::Shared, val_for_guard);
1081 self.cfg.push_assign(block, source_info, &ref_for_guard, rvalue);
83c7162d
XL
1082 }
1083 }
1084 }
1085 }
1086
1087 fn bind_matched_candidate_for_arm_body(&mut self,
1088 block: BasicBlock,
94b46f34
XL
1089 bindings: &[Binding<'tcx>]) {
1090 debug!("bind_matched_candidate_for_arm_body(block={:?}, bindings={:?}", block, bindings);
83c7162d 1091
e9174d1e
SL
1092 // Assign each of the bindings. This may trigger moves out of the candidate.
1093 for binding in bindings {
8bb4bdeb 1094 let source_info = self.source_info(binding.span);
83c7162d
XL
1095 let local = self.storage_live_binding(block, binding.var_id, binding.span,
1096 OutsideGuard);
1097 self.schedule_drop_for_binding(binding.var_id, binding.span, OutsideGuard);
e9174d1e 1098 let rvalue = match binding.binding_mode {
83c7162d
XL
1099 BindingMode::ByValue => {
1100 Rvalue::Use(self.consume_by_copy_or_move(binding.source.clone()))
1101 }
1102 BindingMode::ByRef(region, borrow_kind) => {
1103 Rvalue::Ref(region, borrow_kind, binding.source.clone())
1104 }
e9174d1e 1105 };
8bb4bdeb 1106 self.cfg.push_assign(block, source_info, &local, rvalue);
e9174d1e
SL
1107 }
1108 }
1109
83c7162d
XL
1110 /// Each binding (`ref mut var`/`ref var`/`mut var`/`var`, where
1111 /// the bound `var` has type `T` in the arm body) in a pattern
1112 /// maps to *two* locals. The first local is a binding for
1113 /// occurrences of `var` in the guard, which will all have type
1114 /// `&T`. The second local is a binding for occurrences of `var`
1115 /// in the arm body, which will have type `T`.
e9174d1e 1116 fn declare_binding(&mut self,
3157f602 1117 source_info: SourceInfo,
94b46f34 1118 visibility_scope: SourceScope,
e9174d1e 1119 mutability: Mutability,
b039eaaf 1120 name: Name,
94b46f34 1121 mode: BindingMode,
b039eaaf 1122 var_id: NodeId,
83c7162d
XL
1123 var_ty: Ty<'tcx>,
1124 has_guard: ArmHasGuard)
e9174d1e 1125 {
94b46f34
XL
1126 debug!("declare_binding(var_id={:?}, name={:?}, mode={:?}, var_ty={:?}, \
1127 visibility_scope={:?}, source_info={:?})",
1128 var_id, name, mode, var_ty, visibility_scope, source_info);
e9174d1e 1129
83c7162d 1130 let tcx = self.hir.tcx();
94b46f34
XL
1131 let binding_mode = match mode {
1132 BindingMode::ByValue => ty::BindingMode::BindByValue(mutability.into()),
1133 BindingMode::ByRef { .. } => ty::BindingMode::BindByReference(mutability.into()),
1134 };
1135 let local = LocalDecl::<'tcx> {
3b2f2976 1136 mutability,
e9174d1e 1137 ty: var_ty.clone(),
c30ab7b3 1138 name: Some(name),
3b2f2976 1139 source_info,
94b46f34 1140 visibility_scope,
ea8adc8c 1141 internal: false,
94b46f34
XL
1142 is_user_variable: Some(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm {
1143 binding_mode,
1144 // hypothetically, `visit_bindings` could try to unzip
1145 // an outermost hir::Ty as we descend, matching up
1146 // idents in pat; but complex w/ unclear UI payoff.
1147 // Instead, just abandon providing diagnostic info.
1148 opt_ty_info: None,
1149 }))),
1150 };
1151 let for_arm_body = self.local_decls.push(local.clone());
83c7162d 1152 let locals = if has_guard.0 && tcx.all_pat_vars_are_implicit_refs_within_guards() {
94b46f34
XL
1153 let val_for_guard = self.local_decls.push(local);
1154 let ref_for_guard = self.local_decls.push(LocalDecl::<'tcx> {
83c7162d
XL
1155 mutability,
1156 ty: tcx.mk_imm_ref(tcx.types.re_empty, var_ty),
1157 name: Some(name),
1158 source_info,
94b46f34
XL
1159 visibility_scope,
1160 // FIXME: should these secretly injected ref_for_guard's be marked as `internal`?
83c7162d 1161 internal: false,
94b46f34 1162 is_user_variable: None,
83c7162d 1163 });
94b46f34 1164 LocalsForNode::Three { val_for_guard, ref_for_guard, for_arm_body }
83c7162d
XL
1165 } else {
1166 LocalsForNode::One(for_arm_body)
1167 };
1168 debug!("declare_binding: vars={:?}", locals);
1169 self.var_indices.insert(var_id, locals);
e9174d1e
SL
1170 }
1171}