]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/_match.rs
Imported Upstream version 1.9.0+dfsg1
[rustc.git] / src / librustc_trans / _match.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! # Compilation of match statements
12 //!
13 //! I will endeavor to explain the code as best I can. I have only a loose
14 //! understanding of some parts of it.
15 //!
16 //! ## Matching
17 //!
18 //! The basic state of the code is maintained in an array `m` of `Match`
19 //! objects. Each `Match` describes some list of patterns, all of which must
20 //! match against the current list of values. If those patterns match, then
21 //! the arm listed in the match is the correct arm. A given arm may have
22 //! multiple corresponding match entries, one for each alternative that
23 //! remains. As we proceed these sets of matches are adjusted by the various
24 //! `enter_XXX()` functions, each of which adjusts the set of options given
25 //! some information about the value which has been matched.
26 //!
27 //! So, initially, there is one value and N matches, each of which have one
28 //! constituent pattern. N here is usually the number of arms but may be
29 //! greater, if some arms have multiple alternatives. For example, here:
30 //!
31 //! enum Foo { A, B(int), C(usize, usize) }
32 //! match foo {
33 //! A => ...,
34 //! B(x) => ...,
35 //! C(1, 2) => ...,
36 //! C(_) => ...
37 //! }
38 //!
39 //! The value would be `foo`. There would be four matches, each of which
40 //! contains one pattern (and, in one case, a guard). We could collect the
41 //! various options and then compile the code for the case where `foo` is an
42 //! `A`, a `B`, and a `C`. When we generate the code for `C`, we would (1)
43 //! drop the two matches that do not match a `C` and (2) expand the other two
44 //! into two patterns each. In the first case, the two patterns would be `1`
45 //! and `2`, and the in the second case the _ pattern would be expanded into
46 //! `_` and `_`. The two values are of course the arguments to `C`.
47 //!
48 //! Here is a quick guide to the various functions:
49 //!
50 //! - `compile_submatch()`: The main workhouse. It takes a list of values and
51 //! a list of matches and finds the various possibilities that could occur.
52 //!
53 //! - `enter_XXX()`: modifies the list of matches based on some information
54 //! about the value that has been matched. For example,
55 //! `enter_rec_or_struct()` adjusts the values given that a record or struct
56 //! has been matched. This is an infallible pattern, so *all* of the matches
57 //! must be either wildcards or record/struct patterns. `enter_opt()`
58 //! handles the fallible cases, and it is correspondingly more complex.
59 //!
60 //! ## Bindings
61 //!
62 //! We store information about the bound variables for each arm as part of the
63 //! per-arm `ArmData` struct. There is a mapping from identifiers to
64 //! `BindingInfo` structs. These structs contain the mode/id/type of the
65 //! binding, but they also contain an LLVM value which points at an alloca
66 //! called `llmatch`. For by value bindings that are Copy, we also create
67 //! an extra alloca that we copy the matched value to so that any changes
68 //! we do to our copy is not reflected in the original and vice-versa.
69 //! We don't do this if it's a move since the original value can't be used
70 //! and thus allowing us to cheat in not creating an extra alloca.
71 //!
72 //! The `llmatch` binding always stores a pointer into the value being matched
73 //! which points at the data for the binding. If the value being matched has
74 //! type `T`, then, `llmatch` will point at an alloca of type `T*` (and hence
75 //! `llmatch` has type `T**`). So, if you have a pattern like:
76 //!
77 //! let a: A = ...;
78 //! let b: B = ...;
79 //! match (a, b) { (ref c, d) => { ... } }
80 //!
81 //! For `c` and `d`, we would generate allocas of type `C*` and `D*`
82 //! respectively. These are called the `llmatch`. As we match, when we come
83 //! up against an identifier, we store the current pointer into the
84 //! corresponding alloca.
85 //!
86 //! Once a pattern is completely matched, and assuming that there is no guard
87 //! pattern, we will branch to a block that leads to the body itself. For any
88 //! by-value bindings, this block will first load the ptr from `llmatch` (the
89 //! one of type `D*`) and then load a second time to get the actual value (the
90 //! one of type `D`). For by ref bindings, the value of the local variable is
91 //! simply the first alloca.
92 //!
93 //! So, for the example above, we would generate a setup kind of like this:
94 //!
95 //! +-------+
96 //! | Entry |
97 //! +-------+
98 //! |
99 //! +--------------------------------------------+
100 //! | llmatch_c = (addr of first half of tuple) |
101 //! | llmatch_d = (addr of second half of tuple) |
102 //! +--------------------------------------------+
103 //! |
104 //! +--------------------------------------+
105 //! | *llbinding_d = **llmatch_d |
106 //! +--------------------------------------+
107 //!
108 //! If there is a guard, the situation is slightly different, because we must
109 //! execute the guard code. Moreover, we need to do so once for each of the
110 //! alternatives that lead to the arm, because if the guard fails, they may
111 //! have different points from which to continue the search. Therefore, in that
112 //! case, we generate code that looks more like:
113 //!
114 //! +-------+
115 //! | Entry |
116 //! +-------+
117 //! |
118 //! +-------------------------------------------+
119 //! | llmatch_c = (addr of first half of tuple) |
120 //! | llmatch_d = (addr of first half of tuple) |
121 //! +-------------------------------------------+
122 //! |
123 //! +-------------------------------------------------+
124 //! | *llbinding_d = **llmatch_d |
125 //! | check condition |
126 //! | if false { goto next case } |
127 //! | if true { goto body } |
128 //! +-------------------------------------------------+
129 //!
130 //! The handling for the cleanups is a bit... sensitive. Basically, the body
131 //! is the one that invokes `add_clean()` for each binding. During the guard
132 //! evaluation, we add temporary cleanups and revoke them after the guard is
133 //! evaluated (it could fail, after all). Note that guards and moves are
134 //! just plain incompatible.
135 //!
136 //! Some relevant helper functions that manage bindings:
137 //! - `create_bindings_map()`
138 //! - `insert_lllocals()`
139 //!
140 //!
141 //! ## Notes on vector pattern matching.
142 //!
143 //! Vector pattern matching is surprisingly tricky. The problem is that
144 //! the structure of the vector isn't fully known, and slice matches
145 //! can be done on subparts of it.
146 //!
147 //! The way that vector pattern matches are dealt with, then, is as
148 //! follows. First, we make the actual condition associated with a
149 //! vector pattern simply a vector length comparison. So the pattern
150 //! [1, .. x] gets the condition "vec len >= 1", and the pattern
151 //! [.. x] gets the condition "vec len >= 0". The problem here is that
152 //! having the condition "vec len >= 1" hold clearly does not mean that
153 //! only a pattern that has exactly that condition will match. This
154 //! means that it may well be the case that a condition holds, but none
155 //! of the patterns matching that condition match; to deal with this,
156 //! when doing vector length matches, we have match failures proceed to
157 //! the next condition to check.
158 //!
159 //! There are a couple more subtleties to deal with. While the "actual"
160 //! condition associated with vector length tests is simply a test on
161 //! the vector length, the actual vec_len Opt entry contains more
162 //! information used to restrict which matches are associated with it.
163 //! So that all matches in a submatch are matching against the same
164 //! values from inside the vector, they are split up by how many
165 //! elements they match at the front and at the back of the vector. In
166 //! order to make sure that arms are properly checked in order, even
167 //! with the overmatching conditions, each vec_len Opt entry is
168 //! associated with a range of matches.
169 //! Consider the following:
170 //!
171 //! match &[1, 2, 3] {
172 //! [1, 1, .. _] => 0,
173 //! [1, 2, 2, .. _] => 1,
174 //! [1, 2, 3, .. _] => 2,
175 //! [1, 2, .. _] => 3,
176 //! _ => 4
177 //! }
178 //! The proper arm to match is arm 2, but arms 0 and 3 both have the
179 //! condition "len >= 2". If arm 3 was lumped in with arm 0, then the
180 //! wrong branch would be taken. Instead, vec_len Opts are associated
181 //! with a contiguous range of matches that have the same "shape".
182 //! This is sort of ugly and requires a bunch of special handling of
183 //! vec_len options.
184
185 pub use self::BranchKind::*;
186 pub use self::OptResult::*;
187 pub use self::TransBindingMode::*;
188 use self::Opt::*;
189 use self::FailureHandler::*;
190
191 use llvm::{ValueRef, BasicBlockRef};
192 use rustc_const_eval::check_match::{self, StaticInliner};
193 use rustc_const_eval::{compare_lit_exprs, eval_const_expr};
194 use rustc::hir::def::{Def, DefMap};
195 use rustc::hir::def_id::DefId;
196 use middle::expr_use_visitor as euv;
197 use rustc::infer;
198 use middle::lang_items::StrEqFnLangItem;
199 use middle::mem_categorization as mc;
200 use middle::mem_categorization::Categorization;
201 use rustc::hir::pat_util::*;
202 use rustc::ty::subst::Substs;
203 use adt;
204 use base::*;
205 use build::{AddCase, And, Br, CondBr, GEPi, InBoundsGEP, Load, PointerCast};
206 use build::{Not, Store, Sub, add_comment};
207 use build;
208 use callee::{Callee, ArgVals};
209 use cleanup::{self, CleanupMethods, DropHintMethods};
210 use common::*;
211 use consts;
212 use datum::*;
213 use debuginfo::{self, DebugLoc, ToDebugLoc};
214 use expr::{self, Dest};
215 use monomorphize;
216 use tvec;
217 use type_of;
218 use Disr;
219 use value::Value;
220 use rustc::ty::{self, Ty, TyCtxt};
221 use rustc::traits::ProjectionMode;
222 use session::config::NoDebugInfo;
223 use util::common::indenter;
224 use util::nodemap::FnvHashMap;
225 use util::ppaux;
226
227 use std;
228 use std::cell::RefCell;
229 use std::cmp::Ordering;
230 use std::fmt;
231 use std::rc::Rc;
232 use rustc::hir::{self, PatKind};
233 use syntax::ast::{self, DUMMY_NODE_ID, NodeId};
234 use syntax::codemap::Span;
235 use rustc::hir::fold::Folder;
236 use syntax::ptr::P;
237
238 #[derive(Copy, Clone, Debug)]
239 struct ConstantExpr<'a>(&'a hir::Expr);
240
241 impl<'a> ConstantExpr<'a> {
242 fn eq(self, other: ConstantExpr<'a>, tcx: &TyCtxt) -> bool {
243 match compare_lit_exprs(tcx, self.0, other.0) {
244 Some(result) => result == Ordering::Equal,
245 None => bug!("compare_list_exprs: type mismatch"),
246 }
247 }
248 }
249
250 // An option identifying a branch (either a literal, an enum variant or a range)
251 #[derive(Debug)]
252 enum Opt<'a, 'tcx> {
253 ConstantValue(ConstantExpr<'a>, DebugLoc),
254 ConstantRange(ConstantExpr<'a>, ConstantExpr<'a>, DebugLoc),
255 Variant(Disr, Rc<adt::Repr<'tcx>>, DefId, DebugLoc),
256 SliceLengthEqual(usize, DebugLoc),
257 SliceLengthGreaterOrEqual(/* prefix length */ usize,
258 /* suffix length */ usize,
259 DebugLoc),
260 }
261
262 impl<'a, 'tcx> Opt<'a, 'tcx> {
263 fn eq(&self, other: &Opt<'a, 'tcx>, tcx: &TyCtxt<'tcx>) -> bool {
264 match (self, other) {
265 (&ConstantValue(a, _), &ConstantValue(b, _)) => a.eq(b, tcx),
266 (&ConstantRange(a1, a2, _), &ConstantRange(b1, b2, _)) => {
267 a1.eq(b1, tcx) && a2.eq(b2, tcx)
268 }
269 (&Variant(a_disr, ref a_repr, a_def, _),
270 &Variant(b_disr, ref b_repr, b_def, _)) => {
271 a_disr == b_disr && *a_repr == *b_repr && a_def == b_def
272 }
273 (&SliceLengthEqual(a, _), &SliceLengthEqual(b, _)) => a == b,
274 (&SliceLengthGreaterOrEqual(a1, a2, _),
275 &SliceLengthGreaterOrEqual(b1, b2, _)) => {
276 a1 == b1 && a2 == b2
277 }
278 _ => false
279 }
280 }
281
282 fn trans<'blk>(&self, mut bcx: Block<'blk, 'tcx>) -> OptResult<'blk, 'tcx> {
283 use consts::TrueConst::Yes;
284 let _icx = push_ctxt("match::trans_opt");
285 let ccx = bcx.ccx();
286 match *self {
287 ConstantValue(ConstantExpr(lit_expr), _) => {
288 let lit_ty = bcx.tcx().node_id_to_type(lit_expr.id);
289 let expr = consts::const_expr(ccx, &lit_expr, bcx.fcx.param_substs, None, Yes);
290 let llval = match expr {
291 Ok((llval, _)) => llval,
292 Err(err) => bcx.ccx().sess().span_fatal(lit_expr.span, &err.description()),
293 };
294 let lit_datum = immediate_rvalue(llval, lit_ty);
295 let lit_datum = unpack_datum!(bcx, lit_datum.to_appropriate_datum(bcx));
296 SingleResult(Result::new(bcx, lit_datum.val))
297 }
298 ConstantRange(ConstantExpr(ref l1), ConstantExpr(ref l2), _) => {
299 let l1 = match consts::const_expr(ccx, &l1, bcx.fcx.param_substs, None, Yes) {
300 Ok((l1, _)) => l1,
301 Err(err) => bcx.ccx().sess().span_fatal(l1.span, &err.description()),
302 };
303 let l2 = match consts::const_expr(ccx, &l2, bcx.fcx.param_substs, None, Yes) {
304 Ok((l2, _)) => l2,
305 Err(err) => bcx.ccx().sess().span_fatal(l2.span, &err.description()),
306 };
307 RangeResult(Result::new(bcx, l1), Result::new(bcx, l2))
308 }
309 Variant(disr_val, ref repr, _, _) => {
310 SingleResult(Result::new(bcx, adt::trans_case(bcx, &repr, disr_val)))
311 }
312 SliceLengthEqual(length, _) => {
313 SingleResult(Result::new(bcx, C_uint(ccx, length)))
314 }
315 SliceLengthGreaterOrEqual(prefix, suffix, _) => {
316 LowerBound(Result::new(bcx, C_uint(ccx, prefix + suffix)))
317 }
318 }
319 }
320
321 fn debug_loc(&self) -> DebugLoc {
322 match *self {
323 ConstantValue(_,debug_loc) |
324 ConstantRange(_, _, debug_loc) |
325 Variant(_, _, _, debug_loc) |
326 SliceLengthEqual(_, debug_loc) |
327 SliceLengthGreaterOrEqual(_, _, debug_loc) => debug_loc
328 }
329 }
330 }
331
332 #[derive(Copy, Clone, PartialEq)]
333 pub enum BranchKind {
334 NoBranch,
335 Single,
336 Switch,
337 Compare,
338 CompareSliceLength
339 }
340
341 pub enum OptResult<'blk, 'tcx: 'blk> {
342 SingleResult(Result<'blk, 'tcx>),
343 RangeResult(Result<'blk, 'tcx>, Result<'blk, 'tcx>),
344 LowerBound(Result<'blk, 'tcx>)
345 }
346
347 #[derive(Clone, Copy, PartialEq)]
348 pub enum TransBindingMode {
349 /// By-value binding for a copy type: copies from matched data
350 /// into a fresh LLVM alloca.
351 TrByCopy(/* llbinding */ ValueRef),
352
353 /// By-value binding for a non-copy type where we copy into a
354 /// fresh LLVM alloca; this most accurately reflects the language
355 /// semantics (e.g. it properly handles overwrites of the matched
356 /// input), but potentially injects an unwanted copy.
357 TrByMoveIntoCopy(/* llbinding */ ValueRef),
358
359 /// Binding a non-copy type by reference under the hood; this is
360 /// a codegen optimization to avoid unnecessary memory traffic.
361 TrByMoveRef,
362
363 /// By-ref binding exposed in the original source input.
364 TrByRef,
365 }
366
367 impl TransBindingMode {
368 /// if binding by making a fresh copy; returns the alloca that it
369 /// will copy into; otherwise None.
370 fn alloca_if_copy(&self) -> Option<ValueRef> {
371 match *self {
372 TrByCopy(llbinding) | TrByMoveIntoCopy(llbinding) => Some(llbinding),
373 TrByMoveRef | TrByRef => None,
374 }
375 }
376 }
377
378 /// Information about a pattern binding:
379 /// - `llmatch` is a pointer to a stack slot. The stack slot contains a
380 /// pointer into the value being matched. Hence, llmatch has type `T**`
381 /// where `T` is the value being matched.
382 /// - `trmode` is the trans binding mode
383 /// - `id` is the node id of the binding
384 /// - `ty` is the Rust type of the binding
385 #[derive(Clone, Copy)]
386 pub struct BindingInfo<'tcx> {
387 pub llmatch: ValueRef,
388 pub trmode: TransBindingMode,
389 pub id: ast::NodeId,
390 pub span: Span,
391 pub ty: Ty<'tcx>,
392 }
393
394 type BindingsMap<'tcx> = FnvHashMap<ast::Name, BindingInfo<'tcx>>;
395
396 struct ArmData<'p, 'blk, 'tcx: 'blk> {
397 bodycx: Block<'blk, 'tcx>,
398 arm: &'p hir::Arm,
399 bindings_map: BindingsMap<'tcx>
400 }
401
402 /// Info about Match.
403 /// If all `pats` are matched then arm `data` will be executed.
404 /// As we proceed `bound_ptrs` are filled with pointers to values to be bound,
405 /// these pointers are stored in llmatch variables just before executing `data` arm.
406 struct Match<'a, 'p: 'a, 'blk: 'a, 'tcx: 'blk> {
407 pats: Vec<&'p hir::Pat>,
408 data: &'a ArmData<'p, 'blk, 'tcx>,
409 bound_ptrs: Vec<(ast::Name, ValueRef)>,
410 // Thread along renamings done by the check_match::StaticInliner, so we can
411 // map back to original NodeIds
412 pat_renaming_map: Option<&'a FnvHashMap<(NodeId, Span), NodeId>>
413 }
414
415 impl<'a, 'p, 'blk, 'tcx> fmt::Debug for Match<'a, 'p, 'blk, 'tcx> {
416 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
417 if ppaux::verbose() {
418 // for many programs, this just take too long to serialize
419 write!(f, "{:?}", self.pats)
420 } else {
421 write!(f, "{} pats", self.pats.len())
422 }
423 }
424 }
425
426 fn has_nested_bindings(m: &[Match], col: usize) -> bool {
427 for br in m {
428 match br.pats[col].node {
429 PatKind::Ident(_, _, Some(_)) => return true,
430 _ => ()
431 }
432 }
433 return false;
434 }
435
436 // As noted in `fn match_datum`, we should eventually pass around a
437 // `Datum<Lvalue>` for the `val`; but until we get to that point, this
438 // `MatchInput` struct will serve -- it has everything `Datum<Lvalue>`
439 // does except for the type field.
440 #[derive(Copy, Clone)]
441 pub struct MatchInput { val: ValueRef, lval: Lvalue }
442
443 impl<'tcx> Datum<'tcx, Lvalue> {
444 pub fn match_input(&self) -> MatchInput {
445 MatchInput {
446 val: self.val,
447 lval: self.kind,
448 }
449 }
450 }
451
452 impl fmt::Debug for MatchInput {
453 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
454 fmt::Debug::fmt(&Value(self.val), f)
455 }
456 }
457
458 impl MatchInput {
459 fn from_val(val: ValueRef) -> MatchInput {
460 MatchInput {
461 val: val,
462 lval: Lvalue::new("MatchInput::from_val"),
463 }
464 }
465
466 fn to_datum<'tcx>(self, ty: Ty<'tcx>) -> Datum<'tcx, Lvalue> {
467 Datum::new(self.val, ty, self.lval)
468 }
469 }
470
471 fn expand_nested_bindings<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
472 m: &[Match<'a, 'p, 'blk, 'tcx>],
473 col: usize,
474 val: MatchInput)
475 -> Vec<Match<'a, 'p, 'blk, 'tcx>> {
476 debug!("expand_nested_bindings(bcx={}, m={:?}, col={}, val={:?})",
477 bcx.to_str(), m, col, val);
478 let _indenter = indenter();
479
480 m.iter().map(|br| {
481 let mut bound_ptrs = br.bound_ptrs.clone();
482 let mut pat = br.pats[col];
483 loop {
484 pat = match pat.node {
485 PatKind::Ident(_, ref path, Some(ref inner)) => {
486 bound_ptrs.push((path.node.name, val.val));
487 &inner
488 },
489 _ => break
490 }
491 }
492
493 let mut pats = br.pats.clone();
494 pats[col] = pat;
495 Match {
496 pats: pats,
497 data: &br.data,
498 bound_ptrs: bound_ptrs,
499 pat_renaming_map: br.pat_renaming_map,
500 }
501 }).collect()
502 }
503
504 fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
505 dm: &RefCell<DefMap>,
506 m: &[Match<'a, 'p, 'blk, 'tcx>],
507 col: usize,
508 val: MatchInput,
509 mut e: F)
510 -> Vec<Match<'a, 'p, 'blk, 'tcx>> where
511 F: FnMut(&[&'p hir::Pat]) -> Option<Vec<&'p hir::Pat>>,
512 {
513 debug!("enter_match(bcx={}, m={:?}, col={}, val={:?})",
514 bcx.to_str(), m, col, val);
515 let _indenter = indenter();
516
517 m.iter().filter_map(|br| {
518 e(&br.pats).map(|pats| {
519 let this = br.pats[col];
520 let mut bound_ptrs = br.bound_ptrs.clone();
521 match this.node {
522 PatKind::Ident(_, ref path, None) => {
523 if pat_is_binding(&dm.borrow(), &this) {
524 bound_ptrs.push((path.node.name, val.val));
525 }
526 }
527 PatKind::Vec(ref before, Some(ref slice), ref after) => {
528 if let PatKind::Ident(_, ref path, None) = slice.node {
529 let subslice_val = bind_subslice_pat(
530 bcx, this.id, val,
531 before.len(), after.len());
532 bound_ptrs.push((path.node.name, subslice_val));
533 }
534 }
535 _ => {}
536 }
537 Match {
538 pats: pats,
539 data: br.data,
540 bound_ptrs: bound_ptrs,
541 pat_renaming_map: br.pat_renaming_map,
542 }
543 })
544 }).collect()
545 }
546
547 fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
548 dm: &RefCell<DefMap>,
549 m: &[Match<'a, 'p, 'blk, 'tcx>],
550 col: usize,
551 val: MatchInput)
552 -> Vec<Match<'a, 'p, 'blk, 'tcx>> {
553 debug!("enter_default(bcx={}, m={:?}, col={}, val={:?})",
554 bcx.to_str(), m, col, val);
555 let _indenter = indenter();
556
557 // Collect all of the matches that can match against anything.
558 enter_match(bcx, dm, m, col, val, |pats| {
559 if pat_is_binding_or_wild(&dm.borrow(), &pats[col]) {
560 let mut r = pats[..col].to_vec();
561 r.extend_from_slice(&pats[col + 1..]);
562 Some(r)
563 } else {
564 None
565 }
566 })
567 }
568
569 // <pcwalton> nmatsakis: what does enter_opt do?
570 // <pcwalton> in trans/match
571 // <pcwalton> trans/match.rs is like stumbling around in a dark cave
572 // <nmatsakis> pcwalton: the enter family of functions adjust the set of
573 // patterns as needed
574 // <nmatsakis> yeah, at some point I kind of achieved some level of
575 // understanding
576 // <nmatsakis> anyhow, they adjust the patterns given that something of that
577 // kind has been found
578 // <nmatsakis> pcwalton: ok, right, so enter_XXX() adjusts the patterns, as I
579 // said
580 // <nmatsakis> enter_match() kind of embodies the generic code
581 // <nmatsakis> it is provided with a function that tests each pattern to see
582 // if it might possibly apply and so forth
583 // <nmatsakis> so, if you have a pattern like {a: _, b: _, _} and one like _
584 // <nmatsakis> then _ would be expanded to (_, _)
585 // <nmatsakis> one spot for each of the sub-patterns
586 // <nmatsakis> enter_opt() is one of the more complex; it covers the fallible
587 // cases
588 // <nmatsakis> enter_rec_or_struct() or enter_tuple() are simpler, since they
589 // are infallible patterns
590 // <nmatsakis> so all patterns must either be records (resp. tuples) or
591 // wildcards
592
593 /// The above is now outdated in that enter_match() now takes a function that
594 /// takes the complete row of patterns rather than just the first one.
595 /// Also, most of the enter_() family functions have been unified with
596 /// the check_match specialization step.
597 fn enter_opt<'a, 'p, 'blk, 'tcx>(
598 bcx: Block<'blk, 'tcx>,
599 _: ast::NodeId,
600 dm: &RefCell<DefMap>,
601 m: &[Match<'a, 'p, 'blk, 'tcx>],
602 opt: &Opt,
603 col: usize,
604 variant_size: usize,
605 val: MatchInput)
606 -> Vec<Match<'a, 'p, 'blk, 'tcx>> {
607 debug!("enter_opt(bcx={}, m={:?}, opt={:?}, col={}, val={:?})",
608 bcx.to_str(), m, *opt, col, val);
609 let _indenter = indenter();
610
611 let ctor = match opt {
612 &ConstantValue(ConstantExpr(expr), _) => check_match::ConstantValue(
613 eval_const_expr(bcx.tcx(), &expr)
614 ),
615 &ConstantRange(ConstantExpr(lo), ConstantExpr(hi), _) => check_match::ConstantRange(
616 eval_const_expr(bcx.tcx(), &lo),
617 eval_const_expr(bcx.tcx(), &hi)
618 ),
619 &SliceLengthEqual(n, _) =>
620 check_match::Slice(n),
621 &SliceLengthGreaterOrEqual(before, after, _) =>
622 check_match::SliceWithSubslice(before, after),
623 &Variant(_, _, def_id, _) =>
624 check_match::Constructor::Variant(def_id)
625 };
626
627 let param_env = bcx.tcx().empty_parameter_environment();
628 let mcx = check_match::MatchCheckCtxt {
629 tcx: bcx.tcx(),
630 param_env: param_env,
631 };
632 enter_match(bcx, dm, m, col, val, |pats|
633 check_match::specialize(&mcx, &pats[..], &ctor, col, variant_size)
634 )
635 }
636
637 // Returns the options in one column of matches. An option is something that
638 // needs to be conditionally matched at runtime; for example, the discriminant
639 // on a set of enum variants or a literal.
640 fn get_branches<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
641 m: &[Match<'a, 'p, 'blk, 'tcx>],
642 col: usize)
643 -> Vec<Opt<'p, 'tcx>> {
644 let tcx = bcx.tcx();
645
646 let mut found: Vec<Opt> = vec![];
647 for br in m {
648 let cur = br.pats[col];
649 let debug_loc = match br.pat_renaming_map {
650 Some(pat_renaming_map) => {
651 match pat_renaming_map.get(&(cur.id, cur.span)) {
652 Some(&id) => DebugLoc::At(id, cur.span),
653 None => DebugLoc::At(cur.id, cur.span),
654 }
655 }
656 None => DebugLoc::None
657 };
658
659 let opt = match cur.node {
660 PatKind::Lit(ref l) => {
661 ConstantValue(ConstantExpr(&l), debug_loc)
662 }
663 PatKind::Ident(..) | PatKind::Path(..) |
664 PatKind::TupleStruct(..) | PatKind::Struct(..) => {
665 // This is either an enum variant or a variable binding.
666 let opt_def = tcx.def_map.borrow().get(&cur.id).map(|d| d.full_def());
667 match opt_def {
668 Some(Def::Variant(enum_id, var_id)) => {
669 let variant = tcx.lookup_adt_def(enum_id).variant_with_id(var_id);
670 Variant(Disr::from(variant.disr_val),
671 adt::represent_node(bcx, cur.id),
672 var_id,
673 debug_loc)
674 }
675 _ => continue
676 }
677 }
678 PatKind::Range(ref l1, ref l2) => {
679 ConstantRange(ConstantExpr(&l1), ConstantExpr(&l2), debug_loc)
680 }
681 PatKind::Vec(ref before, None, ref after) => {
682 SliceLengthEqual(before.len() + after.len(), debug_loc)
683 }
684 PatKind::Vec(ref before, Some(_), ref after) => {
685 SliceLengthGreaterOrEqual(before.len(), after.len(), debug_loc)
686 }
687 _ => continue
688 };
689
690 if !found.iter().any(|x| x.eq(&opt, tcx)) {
691 found.push(opt);
692 }
693 }
694 found
695 }
696
697 struct ExtractedBlock<'blk, 'tcx: 'blk> {
698 vals: Vec<ValueRef>,
699 bcx: Block<'blk, 'tcx>,
700 }
701
702 fn extract_variant_args<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
703 repr: &adt::Repr<'tcx>,
704 disr_val: Disr,
705 val: MatchInput)
706 -> ExtractedBlock<'blk, 'tcx> {
707 let _icx = push_ctxt("match::extract_variant_args");
708 // Assume enums are always sized for now.
709 let val = adt::MaybeSizedValue::sized(val.val);
710 let args = (0..adt::num_args(repr, disr_val)).map(|i| {
711 adt::trans_field_ptr(bcx, repr, val, disr_val, i)
712 }).collect();
713
714 ExtractedBlock { vals: args, bcx: bcx }
715 }
716
717 /// Helper for converting from the ValueRef that we pass around in the match code, which is always
718 /// an lvalue, into a Datum. Eventually we should just pass around a Datum and be done with it.
719 fn match_datum<'tcx>(val: MatchInput, left_ty: Ty<'tcx>) -> Datum<'tcx, Lvalue> {
720 val.to_datum(left_ty)
721 }
722
723 fn bind_subslice_pat(bcx: Block,
724 pat_id: ast::NodeId,
725 val: MatchInput,
726 offset_left: usize,
727 offset_right: usize) -> ValueRef {
728 let _icx = push_ctxt("match::bind_subslice_pat");
729 let vec_ty = node_id_type(bcx, pat_id);
730 let vec_ty_contents = match vec_ty.sty {
731 ty::TyBox(ty) => ty,
732 ty::TyRef(_, mt) | ty::TyRawPtr(mt) => mt.ty,
733 _ => vec_ty
734 };
735 let unit_ty = vec_ty_contents.sequence_element_type(bcx.tcx());
736 let vec_datum = match_datum(val, vec_ty);
737 let (base, len) = vec_datum.get_vec_base_and_len(bcx);
738
739 let slice_begin = InBoundsGEP(bcx, base, &[C_uint(bcx.ccx(), offset_left)]);
740 let slice_len_offset = C_uint(bcx.ccx(), offset_left + offset_right);
741 let slice_len = Sub(bcx, len, slice_len_offset, DebugLoc::None);
742 let slice_ty = bcx.tcx().mk_imm_ref(bcx.tcx().mk_region(ty::ReStatic),
743 bcx.tcx().mk_slice(unit_ty));
744 let scratch = rvalue_scratch_datum(bcx, slice_ty, "");
745 Store(bcx, slice_begin, expr::get_dataptr(bcx, scratch.val));
746 Store(bcx, slice_len, expr::get_meta(bcx, scratch.val));
747 scratch.val
748 }
749
750 fn extract_vec_elems<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
751 left_ty: Ty<'tcx>,
752 before: usize,
753 after: usize,
754 val: MatchInput)
755 -> ExtractedBlock<'blk, 'tcx> {
756 let _icx = push_ctxt("match::extract_vec_elems");
757 let vec_datum = match_datum(val, left_ty);
758 let (base, len) = vec_datum.get_vec_base_and_len(bcx);
759 let mut elems = vec![];
760 elems.extend((0..before).map(|i| GEPi(bcx, base, &[i])));
761 elems.extend((0..after).rev().map(|i| {
762 InBoundsGEP(bcx, base, &[
763 Sub(bcx, len, C_uint(bcx.ccx(), i + 1), DebugLoc::None)
764 ])
765 }));
766 ExtractedBlock { vals: elems, bcx: bcx }
767 }
768
769 // Macro for deciding whether any of the remaining matches fit a given kind of
770 // pattern. Note that, because the macro is well-typed, either ALL of the
771 // matches should fit that sort of pattern or NONE (however, some of the
772 // matches may be wildcards like _ or identifiers).
773 macro_rules! any_pat {
774 ($m:expr, $col:expr, $pattern:pat) => (
775 ($m).iter().any(|br| {
776 match br.pats[$col].node {
777 $pattern => true,
778 _ => false
779 }
780 })
781 )
782 }
783
784 fn any_uniq_pat(m: &[Match], col: usize) -> bool {
785 any_pat!(m, col, PatKind::Box(_))
786 }
787
788 fn any_region_pat(m: &[Match], col: usize) -> bool {
789 any_pat!(m, col, PatKind::Ref(..))
790 }
791
792 fn any_irrefutable_adt_pat(tcx: &TyCtxt, m: &[Match], col: usize) -> bool {
793 m.iter().any(|br| {
794 let pat = br.pats[col];
795 match pat.node {
796 PatKind::Tup(_) => true,
797 PatKind::Struct(..) | PatKind::TupleStruct(..) |
798 PatKind::Path(..) | PatKind::Ident(_, _, None) => {
799 match tcx.def_map.borrow().get(&pat.id).unwrap().full_def() {
800 Def::Struct(..) | Def::TyAlias(..) => true,
801 _ => false,
802 }
803 }
804 _ => false
805 }
806 })
807 }
808
809 /// What to do when the pattern match fails.
810 enum FailureHandler {
811 Infallible,
812 JumpToBasicBlock(BasicBlockRef),
813 Unreachable
814 }
815
816 impl FailureHandler {
817 fn is_fallible(&self) -> bool {
818 match *self {
819 Infallible => false,
820 _ => true
821 }
822 }
823
824 fn is_infallible(&self) -> bool {
825 !self.is_fallible()
826 }
827
828 fn handle_fail(&self, bcx: Block) {
829 match *self {
830 Infallible =>
831 bug!("attempted to panic in a non-panicking panic handler!"),
832 JumpToBasicBlock(basic_block) =>
833 Br(bcx, basic_block, DebugLoc::None),
834 Unreachable =>
835 build::Unreachable(bcx)
836 }
837 }
838 }
839
840 fn pick_column_to_specialize(def_map: &RefCell<DefMap>, m: &[Match]) -> Option<usize> {
841 fn pat_score(def_map: &RefCell<DefMap>, pat: &hir::Pat) -> usize {
842 match pat.node {
843 PatKind::Ident(_, _, Some(ref inner)) => pat_score(def_map, &inner),
844 _ if pat_is_refutable(&def_map.borrow(), pat) => 1,
845 _ => 0
846 }
847 }
848
849 let column_score = |m: &[Match], col: usize| -> usize {
850 let total_score = m.iter()
851 .map(|row| row.pats[col])
852 .map(|pat| pat_score(def_map, pat))
853 .sum();
854
855 // Irrefutable columns always go first, they'd only be duplicated in the branches.
856 if total_score == 0 {
857 std::usize::MAX
858 } else {
859 total_score
860 }
861 };
862
863 let column_contains_any_nonwild_patterns = |&col: &usize| -> bool {
864 m.iter().any(|row| match row.pats[col].node {
865 PatKind::Wild => false,
866 _ => true
867 })
868 };
869
870 (0..m[0].pats.len())
871 .filter(column_contains_any_nonwild_patterns)
872 .map(|col| (col, column_score(m, col)))
873 .max_by_key(|&(_, score)| score)
874 .map(|(col, _)| col)
875 }
876
877 // Compiles a comparison between two things.
878 fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
879 lhs: ValueRef,
880 rhs: ValueRef,
881 rhs_t: Ty<'tcx>,
882 debug_loc: DebugLoc)
883 -> Result<'blk, 'tcx> {
884 fn compare_str<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
885 lhs_data: ValueRef,
886 lhs_len: ValueRef,
887 rhs_data: ValueRef,
888 rhs_len: ValueRef,
889 rhs_t: Ty<'tcx>,
890 debug_loc: DebugLoc)
891 -> Result<'blk, 'tcx> {
892 let did = langcall(bcx,
893 None,
894 &format!("comparison of `{}`", rhs_t),
895 StrEqFnLangItem);
896 let args = [lhs_data, lhs_len, rhs_data, rhs_len];
897 Callee::def(bcx.ccx(), did, bcx.tcx().mk_substs(Substs::empty()))
898 .call(bcx, debug_loc, ArgVals(&args), None)
899 }
900
901 let _icx = push_ctxt("compare_values");
902 if rhs_t.is_scalar() {
903 let cmp = compare_scalar_types(cx, lhs, rhs, rhs_t, hir::BiEq, debug_loc);
904 return Result::new(cx, cmp);
905 }
906
907 match rhs_t.sty {
908 ty::TyRef(_, mt) => match mt.ty.sty {
909 ty::TyStr => {
910 let lhs_data = Load(cx, expr::get_dataptr(cx, lhs));
911 let lhs_len = Load(cx, expr::get_meta(cx, lhs));
912 let rhs_data = Load(cx, expr::get_dataptr(cx, rhs));
913 let rhs_len = Load(cx, expr::get_meta(cx, rhs));
914 compare_str(cx, lhs_data, lhs_len, rhs_data, rhs_len, rhs_t, debug_loc)
915 }
916 ty::TyArray(ty, _) | ty::TySlice(ty) => match ty.sty {
917 ty::TyUint(ast::UintTy::U8) => {
918 // NOTE: cast &[u8] and &[u8; N] to &str and abuse the str_eq lang item,
919 // which calls memcmp().
920 let pat_len = val_ty(rhs).element_type().array_length();
921 let ty_str_slice = cx.tcx().mk_static_str();
922
923 let rhs_data = GEPi(cx, rhs, &[0, 0]);
924 let rhs_len = C_uint(cx.ccx(), pat_len);
925
926 let lhs_data;
927 let lhs_len;
928 if val_ty(lhs) == val_ty(rhs) {
929 // Both the discriminant and the pattern are thin pointers
930 lhs_data = GEPi(cx, lhs, &[0, 0]);
931 lhs_len = C_uint(cx.ccx(), pat_len);
932 } else {
933 // The discriminant is a fat pointer
934 let llty_str_slice = type_of::type_of(cx.ccx(), ty_str_slice).ptr_to();
935 let lhs_str = PointerCast(cx, lhs, llty_str_slice);
936 lhs_data = Load(cx, expr::get_dataptr(cx, lhs_str));
937 lhs_len = Load(cx, expr::get_meta(cx, lhs_str));
938 }
939
940 compare_str(cx, lhs_data, lhs_len, rhs_data, rhs_len, rhs_t, debug_loc)
941 },
942 _ => bug!("only byte strings supported in compare_values"),
943 },
944 _ => bug!("only string and byte strings supported in compare_values"),
945 },
946 _ => bug!("only scalars, byte strings, and strings supported in compare_values"),
947 }
948 }
949
950 /// For each binding in `data.bindings_map`, adds an appropriate entry into the `fcx.lllocals` map
951 fn insert_lllocals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
952 bindings_map: &BindingsMap<'tcx>,
953 cs: Option<cleanup::ScopeId>)
954 -> Block<'blk, 'tcx> {
955 for (&name, &binding_info) in bindings_map {
956 let (llval, aliases_other_state) = match binding_info.trmode {
957 // By value mut binding for a copy type: load from the ptr
958 // into the matched value and copy to our alloca
959 TrByCopy(llbinding) |
960 TrByMoveIntoCopy(llbinding) => {
961 let llval = Load(bcx, binding_info.llmatch);
962 let lvalue = match binding_info.trmode {
963 TrByCopy(..) =>
964 Lvalue::new("_match::insert_lllocals"),
965 TrByMoveIntoCopy(..) => {
966 // match_input moves from the input into a
967 // separate stack slot.
968 //
969 // E.g. consider moving the value `D(A)` out
970 // of the tuple `(D(A), D(B))` and into the
971 // local variable `x` via the pattern `(x,_)`,
972 // leaving the remainder of the tuple `(_,
973 // D(B))` still to be dropped in the future.
974 //
975 // Thus, here we must zero the place that we
976 // are moving *from*, because we do not yet
977 // track drop flags for a fragmented parent
978 // match input expression.
979 //
980 // Longer term we will be able to map the move
981 // into `(x, _)` up to the parent path that
982 // owns the whole tuple, and mark the
983 // corresponding stack-local drop-flag
984 // tracking the first component of the tuple.
985 let hint_kind = HintKind::ZeroAndMaintain;
986 Lvalue::new_with_hint("_match::insert_lllocals (match_input)",
987 bcx, binding_info.id, hint_kind)
988 }
989 _ => bug!(),
990 };
991 let datum = Datum::new(llval, binding_info.ty, lvalue);
992 call_lifetime_start(bcx, llbinding);
993 bcx = datum.store_to(bcx, llbinding);
994 if let Some(cs) = cs {
995 bcx.fcx.schedule_lifetime_end(cs, llbinding);
996 }
997
998 (llbinding, false)
999 },
1000
1001 // By value move bindings: load from the ptr into the matched value
1002 TrByMoveRef => (Load(bcx, binding_info.llmatch), true),
1003
1004 // By ref binding: use the ptr into the matched value
1005 TrByRef => (binding_info.llmatch, true),
1006 };
1007
1008
1009 // A local that aliases some other state must be zeroed, since
1010 // the other state (e.g. some parent data that we matched
1011 // into) will still have its subcomponents (such as this
1012 // local) destructed at the end of the parent's scope. Longer
1013 // term, we will properly map such parents to the set of
1014 // unique drop flags for its fragments.
1015 let hint_kind = if aliases_other_state {
1016 HintKind::ZeroAndMaintain
1017 } else {
1018 HintKind::DontZeroJustUse
1019 };
1020 let lvalue = Lvalue::new_with_hint("_match::insert_lllocals (local)",
1021 bcx,
1022 binding_info.id,
1023 hint_kind);
1024 let datum = Datum::new(llval, binding_info.ty, lvalue);
1025 if let Some(cs) = cs {
1026 let opt_datum = lvalue.dropflag_hint(bcx);
1027 bcx.fcx.schedule_lifetime_end(cs, binding_info.llmatch);
1028 bcx.fcx.schedule_drop_and_fill_mem(cs, llval, binding_info.ty, opt_datum);
1029 }
1030
1031 debug!("binding {} to {:?}", binding_info.id, Value(llval));
1032 bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum);
1033 debuginfo::create_match_binding_metadata(bcx, name, binding_info);
1034 }
1035 bcx
1036 }
1037
1038 fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1039 guard_expr: &hir::Expr,
1040 data: &ArmData<'p, 'blk, 'tcx>,
1041 m: &[Match<'a, 'p, 'blk, 'tcx>],
1042 vals: &[MatchInput],
1043 chk: &FailureHandler,
1044 has_genuine_default: bool)
1045 -> Block<'blk, 'tcx> {
1046 debug!("compile_guard(bcx={}, guard_expr={:?}, m={:?}, vals={:?})",
1047 bcx.to_str(), guard_expr, m, vals);
1048 let _indenter = indenter();
1049
1050 let mut bcx = insert_lllocals(bcx, &data.bindings_map, None);
1051
1052 let val = unpack_datum!(bcx, expr::trans(bcx, guard_expr));
1053 let val = val.to_llbool(bcx);
1054
1055 for (_, &binding_info) in &data.bindings_map {
1056 if let Some(llbinding) = binding_info.trmode.alloca_if_copy() {
1057 call_lifetime_end(bcx, llbinding)
1058 }
1059 }
1060
1061 for (_, &binding_info) in &data.bindings_map {
1062 bcx.fcx.lllocals.borrow_mut().remove(&binding_info.id);
1063 }
1064
1065 with_cond(bcx, Not(bcx, val, guard_expr.debug_loc()), |bcx| {
1066 for (_, &binding_info) in &data.bindings_map {
1067 call_lifetime_end(bcx, binding_info.llmatch);
1068 }
1069 match chk {
1070 // If the default arm is the only one left, move on to the next
1071 // condition explicitly rather than (possibly) falling back to
1072 // the default arm.
1073 &JumpToBasicBlock(_) if m.len() == 1 && has_genuine_default => {
1074 chk.handle_fail(bcx);
1075 }
1076 _ => {
1077 compile_submatch(bcx, m, vals, chk, has_genuine_default);
1078 }
1079 };
1080 bcx
1081 })
1082 }
1083
1084 fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1085 m: &[Match<'a, 'p, 'blk, 'tcx>],
1086 vals: &[MatchInput],
1087 chk: &FailureHandler,
1088 has_genuine_default: bool) {
1089 debug!("compile_submatch(bcx={}, m={:?}, vals=[{:?}])",
1090 bcx.to_str(), m, vals);
1091 let _indenter = indenter();
1092 let _icx = push_ctxt("match::compile_submatch");
1093 let mut bcx = bcx;
1094 if m.is_empty() {
1095 if chk.is_fallible() {
1096 chk.handle_fail(bcx);
1097 }
1098 return;
1099 }
1100
1101 let tcx = bcx.tcx();
1102 let def_map = &tcx.def_map;
1103 match pick_column_to_specialize(def_map, m) {
1104 Some(col) => {
1105 let val = vals[col];
1106 if has_nested_bindings(m, col) {
1107 let expanded = expand_nested_bindings(bcx, m, col, val);
1108 compile_submatch_continue(bcx,
1109 &expanded[..],
1110 vals,
1111 chk,
1112 col,
1113 val,
1114 has_genuine_default)
1115 } else {
1116 compile_submatch_continue(bcx, m, vals, chk, col, val, has_genuine_default)
1117 }
1118 }
1119 None => {
1120 let data = &m[0].data;
1121 for &(ref name, ref value_ptr) in &m[0].bound_ptrs {
1122 let binfo = *data.bindings_map.get(name).unwrap();
1123 call_lifetime_start(bcx, binfo.llmatch);
1124 if binfo.trmode == TrByRef && type_is_fat_ptr(bcx.tcx(), binfo.ty) {
1125 expr::copy_fat_ptr(bcx, *value_ptr, binfo.llmatch);
1126 }
1127 else {
1128 Store(bcx, *value_ptr, binfo.llmatch);
1129 }
1130 }
1131 match data.arm.guard {
1132 Some(ref guard_expr) => {
1133 bcx = compile_guard(bcx,
1134 &guard_expr,
1135 m[0].data,
1136 &m[1..m.len()],
1137 vals,
1138 chk,
1139 has_genuine_default);
1140 }
1141 _ => ()
1142 }
1143 Br(bcx, data.bodycx.llbb, DebugLoc::None);
1144 }
1145 }
1146 }
1147
1148 fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1149 m: &[Match<'a, 'p, 'blk, 'tcx>],
1150 vals: &[MatchInput],
1151 chk: &FailureHandler,
1152 col: usize,
1153 val: MatchInput,
1154 has_genuine_default: bool) {
1155 let fcx = bcx.fcx;
1156 let tcx = bcx.tcx();
1157 let dm = &tcx.def_map;
1158
1159 let mut vals_left = vals[0..col].to_vec();
1160 vals_left.extend_from_slice(&vals[col + 1..]);
1161 let ccx = bcx.fcx.ccx;
1162
1163 // Find a real id (we're adding placeholder wildcard patterns, but
1164 // each column is guaranteed to have at least one real pattern)
1165 let pat_id = m.iter().map(|br| br.pats[col].id)
1166 .find(|&id| id != DUMMY_NODE_ID)
1167 .unwrap_or(DUMMY_NODE_ID);
1168
1169 let left_ty = if pat_id == DUMMY_NODE_ID {
1170 tcx.mk_nil()
1171 } else {
1172 node_id_type(bcx, pat_id)
1173 };
1174
1175 let mcx = check_match::MatchCheckCtxt {
1176 tcx: bcx.tcx(),
1177 param_env: bcx.tcx().empty_parameter_environment(),
1178 };
1179 let adt_vals = if any_irrefutable_adt_pat(bcx.tcx(), m, col) {
1180 let repr = adt::represent_type(bcx.ccx(), left_ty);
1181 let arg_count = adt::num_args(&repr, Disr(0));
1182 let (arg_count, struct_val) = if type_is_sized(bcx.tcx(), left_ty) {
1183 (arg_count, val.val)
1184 } else {
1185 // For an unsized ADT (i.e. DST struct), we need to treat
1186 // the last field specially: instead of simply passing a
1187 // ValueRef pointing to that field, as with all the others,
1188 // we skip it and instead construct a 'fat ptr' below.
1189 (arg_count - 1, Load(bcx, expr::get_dataptr(bcx, val.val)))
1190 };
1191 let mut field_vals: Vec<ValueRef> = (0..arg_count).map(|ix|
1192 // By definition, these are all sized
1193 adt::trans_field_ptr(bcx, &repr, adt::MaybeSizedValue::sized(struct_val), Disr(0), ix)
1194 ).collect();
1195
1196 match left_ty.sty {
1197 ty::TyStruct(def, substs) if !type_is_sized(bcx.tcx(), left_ty) => {
1198 // The last field is technically unsized but
1199 // since we can only ever match that field behind
1200 // a reference we construct a fat ptr here.
1201 let unsized_ty = def.struct_variant().fields.last().map(|field| {
1202 monomorphize::field_ty(bcx.tcx(), substs, field)
1203 }).unwrap();
1204 let scratch = alloc_ty(bcx, unsized_ty, "__struct_field_fat_ptr");
1205
1206 let meta = Load(bcx, expr::get_meta(bcx, val.val));
1207 let struct_val = adt::MaybeSizedValue::unsized_(struct_val, meta);
1208
1209 let data = adt::trans_field_ptr(bcx, &repr, struct_val, Disr(0), arg_count);
1210 Store(bcx, data, expr::get_dataptr(bcx, scratch));
1211 Store(bcx, meta, expr::get_meta(bcx, scratch));
1212 field_vals.push(scratch);
1213 }
1214 _ => {}
1215 }
1216 Some(field_vals)
1217 } else if any_uniq_pat(m, col) || any_region_pat(m, col) {
1218 Some(vec!(Load(bcx, val.val)))
1219 } else {
1220 match left_ty.sty {
1221 ty::TyArray(_, n) => {
1222 let args = extract_vec_elems(bcx, left_ty, n, 0, val);
1223 Some(args.vals)
1224 }
1225 _ => None
1226 }
1227 };
1228 match adt_vals {
1229 Some(field_vals) => {
1230 let pats = enter_match(bcx, dm, m, col, val, |pats|
1231 check_match::specialize(&mcx, pats,
1232 &check_match::Single, col,
1233 field_vals.len())
1234 );
1235 let mut vals: Vec<_> = field_vals.into_iter()
1236 .map(|v|MatchInput::from_val(v))
1237 .collect();
1238 vals.extend_from_slice(&vals_left);
1239 compile_submatch(bcx, &pats, &vals, chk, has_genuine_default);
1240 return;
1241 }
1242 _ => ()
1243 }
1244
1245 // Decide what kind of branch we need
1246 let opts = get_branches(bcx, m, col);
1247 debug!("options={:?}", opts);
1248 let mut kind = NoBranch;
1249 let mut test_val = val.val;
1250 debug!("test_val={:?}", Value(test_val));
1251 if !opts.is_empty() {
1252 match opts[0] {
1253 ConstantValue(..) | ConstantRange(..) => {
1254 test_val = load_if_immediate(bcx, val.val, left_ty);
1255 kind = if left_ty.is_integral() {
1256 Switch
1257 } else {
1258 Compare
1259 };
1260 }
1261 Variant(_, ref repr, _, _) => {
1262 let (the_kind, val_opt) = adt::trans_switch(bcx, &repr,
1263 val.val, true);
1264 kind = the_kind;
1265 if let Some(tval) = val_opt { test_val = tval; }
1266 }
1267 SliceLengthEqual(..) | SliceLengthGreaterOrEqual(..) => {
1268 let (_, len) = tvec::get_base_and_len(bcx, val.val, left_ty);
1269 test_val = len;
1270 kind = Switch;
1271 }
1272 }
1273 }
1274 for o in &opts {
1275 match *o {
1276 ConstantRange(..) => { kind = Compare; break },
1277 SliceLengthGreaterOrEqual(..) => { kind = CompareSliceLength; break },
1278 _ => ()
1279 }
1280 }
1281 let else_cx = match kind {
1282 NoBranch | Single => bcx,
1283 _ => bcx.fcx.new_temp_block("match_else")
1284 };
1285 let sw = if kind == Switch {
1286 build::Switch(bcx, test_val, else_cx.llbb, opts.len())
1287 } else {
1288 C_int(ccx, 0) // Placeholder for when not using a switch
1289 };
1290
1291 let defaults = enter_default(else_cx, dm, m, col, val);
1292 let exhaustive = chk.is_infallible() && defaults.is_empty();
1293 let len = opts.len();
1294
1295 if exhaustive && kind == Switch {
1296 build::Unreachable(else_cx);
1297 }
1298
1299 // Compile subtrees for each option
1300 for (i, opt) in opts.iter().enumerate() {
1301 // In some cases of range and vector pattern matching, we need to
1302 // override the failure case so that instead of failing, it proceeds
1303 // to try more matching. branch_chk, then, is the proper failure case
1304 // for the current conditional branch.
1305 let mut branch_chk = None;
1306 let mut opt_cx = else_cx;
1307 let debug_loc = opt.debug_loc();
1308
1309 if kind == Switch || !exhaustive || i + 1 < len {
1310 opt_cx = bcx.fcx.new_temp_block("match_case");
1311 match kind {
1312 Single => Br(bcx, opt_cx.llbb, debug_loc),
1313 Switch => {
1314 match opt.trans(bcx) {
1315 SingleResult(r) => {
1316 AddCase(sw, r.val, opt_cx.llbb);
1317 bcx = r.bcx;
1318 }
1319 _ => {
1320 bug!(
1321 "in compile_submatch, expected \
1322 opt.trans() to return a SingleResult")
1323 }
1324 }
1325 }
1326 Compare | CompareSliceLength => {
1327 let t = if kind == Compare {
1328 left_ty
1329 } else {
1330 tcx.types.usize // vector length
1331 };
1332 let Result { bcx: after_cx, val: matches } = {
1333 match opt.trans(bcx) {
1334 SingleResult(Result { bcx, val }) => {
1335 compare_values(bcx, test_val, val, t, debug_loc)
1336 }
1337 RangeResult(Result { val: vbegin, .. },
1338 Result { bcx, val: vend }) => {
1339 let llge = compare_scalar_types(bcx, test_val, vbegin,
1340 t, hir::BiGe, debug_loc);
1341 let llle = compare_scalar_types(bcx, test_val, vend,
1342 t, hir::BiLe, debug_loc);
1343 Result::new(bcx, And(bcx, llge, llle, DebugLoc::None))
1344 }
1345 LowerBound(Result { bcx, val }) => {
1346 Result::new(bcx, compare_scalar_types(bcx, test_val,
1347 val, t, hir::BiGe,
1348 debug_loc))
1349 }
1350 }
1351 };
1352 bcx = fcx.new_temp_block("compare_next");
1353
1354 // If none of the sub-cases match, and the current condition
1355 // is guarded or has multiple patterns, move on to the next
1356 // condition, if there is any, rather than falling back to
1357 // the default.
1358 let guarded = m[i].data.arm.guard.is_some();
1359 let multi_pats = m[i].pats.len() > 1;
1360 if i + 1 < len && (guarded || multi_pats || kind == CompareSliceLength) {
1361 branch_chk = Some(JumpToBasicBlock(bcx.llbb));
1362 }
1363 CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb, debug_loc);
1364 }
1365 _ => ()
1366 }
1367 } else if kind == Compare || kind == CompareSliceLength {
1368 Br(bcx, else_cx.llbb, debug_loc);
1369 }
1370
1371 let mut size = 0;
1372 let mut unpacked = Vec::new();
1373 match *opt {
1374 Variant(disr_val, ref repr, _, _) => {
1375 let ExtractedBlock {vals: argvals, bcx: new_bcx} =
1376 extract_variant_args(opt_cx, &repr, disr_val, val);
1377 size = argvals.len();
1378 unpacked = argvals;
1379 opt_cx = new_bcx;
1380 }
1381 SliceLengthEqual(len, _) => {
1382 let args = extract_vec_elems(opt_cx, left_ty, len, 0, val);
1383 size = args.vals.len();
1384 unpacked = args.vals.clone();
1385 opt_cx = args.bcx;
1386 }
1387 SliceLengthGreaterOrEqual(before, after, _) => {
1388 let args = extract_vec_elems(opt_cx, left_ty, before, after, val);
1389 size = args.vals.len();
1390 unpacked = args.vals.clone();
1391 opt_cx = args.bcx;
1392 }
1393 ConstantValue(..) | ConstantRange(..) => ()
1394 }
1395 let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val);
1396 let mut opt_vals: Vec<_> = unpacked.into_iter()
1397 .map(|v|MatchInput::from_val(v))
1398 .collect();
1399 opt_vals.extend_from_slice(&vals_left[..]);
1400 compile_submatch(opt_cx,
1401 &opt_ms[..],
1402 &opt_vals[..],
1403 branch_chk.as_ref().unwrap_or(chk),
1404 has_genuine_default);
1405 }
1406
1407 // Compile the fall-through case, if any
1408 if !exhaustive && kind != Single {
1409 if kind == Compare || kind == CompareSliceLength {
1410 Br(bcx, else_cx.llbb, DebugLoc::None);
1411 }
1412 match chk {
1413 // If there is only one default arm left, move on to the next
1414 // condition explicitly rather than (eventually) falling back to
1415 // the last default arm.
1416 &JumpToBasicBlock(_) if defaults.len() == 1 && has_genuine_default => {
1417 chk.handle_fail(else_cx);
1418 }
1419 _ => {
1420 compile_submatch(else_cx,
1421 &defaults[..],
1422 &vals_left[..],
1423 chk,
1424 has_genuine_default);
1425 }
1426 }
1427 }
1428 }
1429
1430 pub fn trans_match<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1431 match_expr: &hir::Expr,
1432 discr_expr: &hir::Expr,
1433 arms: &[hir::Arm],
1434 dest: Dest)
1435 -> Block<'blk, 'tcx> {
1436 let _icx = push_ctxt("match::trans_match");
1437 trans_match_inner(bcx, match_expr.id, discr_expr, arms, dest)
1438 }
1439
1440 /// Checks whether the binding in `discr` is assigned to anywhere in the expression `body`
1441 fn is_discr_reassigned(bcx: Block, discr: &hir::Expr, body: &hir::Expr) -> bool {
1442 let (vid, field) = match discr.node {
1443 hir::ExprPath(..) => match bcx.def(discr.id) {
1444 Def::Local(_, vid) | Def::Upvar(_, vid, _, _) => (vid, None),
1445 _ => return false
1446 },
1447 hir::ExprField(ref base, field) => {
1448 let vid = match bcx.tcx().def_map.borrow().get(&base.id).map(|d| d.full_def()) {
1449 Some(Def::Local(_, vid)) | Some(Def::Upvar(_, vid, _, _)) => vid,
1450 _ => return false
1451 };
1452 (vid, Some(mc::NamedField(field.node)))
1453 },
1454 hir::ExprTupField(ref base, field) => {
1455 let vid = match bcx.tcx().def_map.borrow().get(&base.id).map(|d| d.full_def()) {
1456 Some(Def::Local(_, vid)) | Some(Def::Upvar(_, vid, _, _)) => vid,
1457 _ => return false
1458 };
1459 (vid, Some(mc::PositionalField(field.node)))
1460 },
1461 _ => return false
1462 };
1463
1464 let mut rc = ReassignmentChecker {
1465 node: vid,
1466 field: field,
1467 reassigned: false
1468 };
1469 {
1470 let infcx = infer::normalizing_infer_ctxt(bcx.tcx(),
1471 &bcx.tcx().tables,
1472 ProjectionMode::Any);
1473 let mut visitor = euv::ExprUseVisitor::new(&mut rc, &infcx);
1474 visitor.walk_expr(body);
1475 }
1476 rc.reassigned
1477 }
1478
1479 struct ReassignmentChecker {
1480 node: ast::NodeId,
1481 field: Option<mc::FieldName>,
1482 reassigned: bool
1483 }
1484
1485 // Determine if the expression we're matching on is reassigned to within
1486 // the body of the match's arm.
1487 // We only care for the `mutate` callback since this check only matters
1488 // for cases where the matched value is moved.
1489 impl<'tcx> euv::Delegate<'tcx> for ReassignmentChecker {
1490 fn consume(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: euv::ConsumeMode) {}
1491 fn matched_pat(&mut self, _: &hir::Pat, _: mc::cmt, _: euv::MatchMode) {}
1492 fn consume_pat(&mut self, _: &hir::Pat, _: mc::cmt, _: euv::ConsumeMode) {}
1493 fn borrow(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: ty::Region,
1494 _: ty::BorrowKind, _: euv::LoanCause) {}
1495 fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {}
1496
1497 fn mutate(&mut self, _: ast::NodeId, _: Span, cmt: mc::cmt, _: euv::MutateMode) {
1498 match cmt.cat {
1499 Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
1500 Categorization::Local(vid) => self.reassigned |= self.node == vid,
1501 Categorization::Interior(ref base_cmt, mc::InteriorField(field)) => {
1502 match base_cmt.cat {
1503 Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
1504 Categorization::Local(vid) => {
1505 self.reassigned |= self.node == vid &&
1506 (self.field.is_none() || Some(field) == self.field)
1507 },
1508 _ => {}
1509 }
1510 },
1511 _ => {}
1512 }
1513 }
1514 }
1515
1516 fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &hir::Pat,
1517 discr: &hir::Expr, body: &hir::Expr)
1518 -> BindingsMap<'tcx> {
1519 // Create the bindings map, which is a mapping from each binding name
1520 // to an alloca() that will be the value for that local variable.
1521 // Note that we use the names because each binding will have many ids
1522 // from the various alternatives.
1523 let ccx = bcx.ccx();
1524 let tcx = bcx.tcx();
1525 let reassigned = is_discr_reassigned(bcx, discr, body);
1526 let mut bindings_map = FnvHashMap();
1527 pat_bindings(&tcx.def_map, &pat, |bm, p_id, span, path1| {
1528 let name = path1.node;
1529 let variable_ty = node_id_type(bcx, p_id);
1530 let llvariable_ty = type_of::type_of(ccx, variable_ty);
1531 let tcx = bcx.tcx();
1532 let param_env = tcx.empty_parameter_environment();
1533
1534 let llmatch;
1535 let trmode;
1536 let moves_by_default = variable_ty.moves_by_default(&param_env, span);
1537 match bm {
1538 hir::BindByValue(_) if !moves_by_default || reassigned =>
1539 {
1540 llmatch = alloca(bcx, llvariable_ty.ptr_to(), "__llmatch");
1541 let llcopy = alloca(bcx, llvariable_ty, &bcx.name(name));
1542 trmode = if moves_by_default {
1543 TrByMoveIntoCopy(llcopy)
1544 } else {
1545 TrByCopy(llcopy)
1546 };
1547 }
1548 hir::BindByValue(_) => {
1549 // in this case, the final type of the variable will be T,
1550 // but during matching we need to store a *T as explained
1551 // above
1552 llmatch = alloca(bcx, llvariable_ty.ptr_to(), &bcx.name(name));
1553 trmode = TrByMoveRef;
1554 }
1555 hir::BindByRef(_) => {
1556 llmatch = alloca(bcx, llvariable_ty, &bcx.name(name));
1557 trmode = TrByRef;
1558 }
1559 };
1560 bindings_map.insert(name, BindingInfo {
1561 llmatch: llmatch,
1562 trmode: trmode,
1563 id: p_id,
1564 span: span,
1565 ty: variable_ty
1566 });
1567 });
1568 return bindings_map;
1569 }
1570
1571 fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
1572 match_id: ast::NodeId,
1573 discr_expr: &hir::Expr,
1574 arms: &[hir::Arm],
1575 dest: Dest) -> Block<'blk, 'tcx> {
1576 let _icx = push_ctxt("match::trans_match_inner");
1577 let fcx = scope_cx.fcx;
1578 let mut bcx = scope_cx;
1579 let tcx = bcx.tcx();
1580
1581 let discr_datum = unpack_datum!(bcx, expr::trans_to_lvalue(bcx, discr_expr,
1582 "match"));
1583 if bcx.unreachable.get() {
1584 return bcx;
1585 }
1586
1587 let t = node_id_type(bcx, discr_expr.id);
1588 let chk = if t.is_empty(tcx) {
1589 Unreachable
1590 } else {
1591 Infallible
1592 };
1593
1594 let arm_datas: Vec<ArmData> = arms.iter().map(|arm| ArmData {
1595 bodycx: fcx.new_id_block("case_body", arm.body.id),
1596 arm: arm,
1597 bindings_map: create_bindings_map(bcx, &arm.pats[0], discr_expr, &arm.body)
1598 }).collect();
1599
1600 let mut pat_renaming_map = if scope_cx.sess().opts.debuginfo != NoDebugInfo {
1601 Some(FnvHashMap())
1602 } else {
1603 None
1604 };
1605
1606 let arm_pats: Vec<Vec<P<hir::Pat>>> = {
1607 let mut static_inliner = StaticInliner::new(scope_cx.tcx(),
1608 pat_renaming_map.as_mut());
1609 arm_datas.iter().map(|arm_data| {
1610 arm_data.arm.pats.iter().map(|p| static_inliner.fold_pat((*p).clone())).collect()
1611 }).collect()
1612 };
1613
1614 let mut matches = Vec::new();
1615 for (arm_data, pats) in arm_datas.iter().zip(&arm_pats) {
1616 matches.extend(pats.iter().map(|p| Match {
1617 pats: vec![&p],
1618 data: arm_data,
1619 bound_ptrs: Vec::new(),
1620 pat_renaming_map: pat_renaming_map.as_ref()
1621 }));
1622 }
1623
1624 // `compile_submatch` works one column of arm patterns a time and
1625 // then peels that column off. So as we progress, it may become
1626 // impossible to tell whether we have a genuine default arm, i.e.
1627 // `_ => foo` or not. Sometimes it is important to know that in order
1628 // to decide whether moving on to the next condition or falling back
1629 // to the default arm.
1630 let has_default = arms.last().map_or(false, |arm| {
1631 arm.pats.len() == 1
1632 && arm.pats.last().unwrap().node == PatKind::Wild
1633 });
1634
1635 compile_submatch(bcx, &matches[..], &[discr_datum.match_input()], &chk, has_default);
1636
1637 let mut arm_cxs = Vec::new();
1638 for arm_data in &arm_datas {
1639 let mut bcx = arm_data.bodycx;
1640
1641 // insert bindings into the lllocals map and add cleanups
1642 let cs = fcx.push_custom_cleanup_scope();
1643 bcx = insert_lllocals(bcx, &arm_data.bindings_map, Some(cleanup::CustomScope(cs)));
1644 bcx = expr::trans_into(bcx, &arm_data.arm.body, dest);
1645 bcx = fcx.pop_and_trans_custom_cleanup_scope(bcx, cs);
1646 arm_cxs.push(bcx);
1647 }
1648
1649 bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[..]);
1650 return bcx;
1651 }
1652
1653 /// Generates code for a local variable declaration like `let <pat>;` or `let <pat> =
1654 /// <opt_init_expr>`.
1655 pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1656 local: &hir::Local)
1657 -> Block<'blk, 'tcx> {
1658 let _icx = push_ctxt("match::store_local");
1659 let mut bcx = bcx;
1660 let tcx = bcx.tcx();
1661 let pat = &local.pat;
1662
1663 fn create_dummy_locals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1664 pat: &hir::Pat)
1665 -> Block<'blk, 'tcx> {
1666 let _icx = push_ctxt("create_dummy_locals");
1667 // create dummy memory for the variables if we have no
1668 // value to store into them immediately
1669 let tcx = bcx.tcx();
1670 pat_bindings(&tcx.def_map, pat, |_, p_id, _, path1| {
1671 let scope = cleanup::var_scope(tcx, p_id);
1672 bcx = mk_binding_alloca(
1673 bcx, p_id, path1.node, scope, (),
1674 "_match::store_local::create_dummy_locals",
1675 |(), bcx, Datum { val: llval, ty, kind }| {
1676 // Dummy-locals start out uninitialized, so set their
1677 // drop-flag hints (if any) to "moved."
1678 if let Some(hint) = kind.dropflag_hint(bcx) {
1679 let moved_hint = adt::DTOR_MOVED_HINT;
1680 debug!("store moved_hint={} for hint={:?}, uninitialized dummy",
1681 moved_hint, hint);
1682 Store(bcx, C_u8(bcx.fcx.ccx, moved_hint), hint.to_value().value());
1683 }
1684
1685 if kind.drop_flag_info.must_zero() {
1686 // if no drop-flag hint, or the hint requires
1687 // we maintain the embedded drop-flag, then
1688 // mark embedded drop-flag(s) as moved
1689 // (i.e. "already dropped").
1690 drop_done_fill_mem(bcx, llval, ty);
1691 }
1692 bcx
1693 });
1694 });
1695 bcx
1696 }
1697
1698 match local.init {
1699 Some(ref init_expr) => {
1700 // Optimize the "let x = expr" case. This just writes
1701 // the result of evaluating `expr` directly into the alloca
1702 // for `x`. Often the general path results in similar or the
1703 // same code post-optimization, but not always. In particular,
1704 // in unsafe code, you can have expressions like
1705 //
1706 // let x = intrinsics::uninit();
1707 //
1708 // In such cases, the more general path is unsafe, because
1709 // it assumes it is matching against a valid value.
1710 match simple_name(pat) {
1711 Some(name) => {
1712 let var_scope = cleanup::var_scope(tcx, local.id);
1713 return mk_binding_alloca(
1714 bcx, pat.id, name, var_scope, (),
1715 "_match::store_local",
1716 |(), bcx, Datum { val: v, .. }| expr::trans_into(bcx, &init_expr,
1717 expr::SaveIn(v)));
1718 }
1719
1720 None => {}
1721 }
1722
1723 // General path.
1724 let init_datum =
1725 unpack_datum!(bcx, expr::trans_to_lvalue(bcx, &init_expr, "let"));
1726 if bcx.sess().asm_comments() {
1727 add_comment(bcx, "creating zeroable ref llval");
1728 }
1729 let var_scope = cleanup::var_scope(tcx, local.id);
1730 bind_irrefutable_pat(bcx, pat, init_datum.match_input(), var_scope)
1731 }
1732 None => {
1733 create_dummy_locals(bcx, pat)
1734 }
1735 }
1736 }
1737
1738 fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
1739 p_id: ast::NodeId,
1740 name: ast::Name,
1741 cleanup_scope: cleanup::ScopeId,
1742 arg: A,
1743 caller_name: &'static str,
1744 populate: F)
1745 -> Block<'blk, 'tcx> where
1746 F: FnOnce(A, Block<'blk, 'tcx>, Datum<'tcx, Lvalue>) -> Block<'blk, 'tcx>,
1747 {
1748 let var_ty = node_id_type(bcx, p_id);
1749
1750 // Allocate memory on stack for the binding.
1751 let llval = alloc_ty(bcx, var_ty, &bcx.name(name));
1752 let lvalue = Lvalue::new_with_hint(caller_name, bcx, p_id, HintKind::DontZeroJustUse);
1753 let datum = Datum::new(llval, var_ty, lvalue);
1754
1755 debug!("mk_binding_alloca cleanup_scope={:?} llval={:?} var_ty={:?}",
1756 cleanup_scope, Value(llval), var_ty);
1757
1758 // Subtle: be sure that we *populate* the memory *before*
1759 // we schedule the cleanup.
1760 call_lifetime_start(bcx, llval);
1761 let bcx = populate(arg, bcx, datum);
1762 bcx.fcx.schedule_lifetime_end(cleanup_scope, llval);
1763 bcx.fcx.schedule_drop_mem(cleanup_scope, llval, var_ty, lvalue.dropflag_hint(bcx));
1764
1765 // Now that memory is initialized and has cleanup scheduled,
1766 // insert datum into the local variable map.
1767 bcx.fcx.lllocals.borrow_mut().insert(p_id, datum);
1768 bcx
1769 }
1770
1771 /// A simple version of the pattern matching code that only handles
1772 /// irrefutable patterns. This is used in let/argument patterns,
1773 /// not in match statements. Unifying this code with the code above
1774 /// sounds nice, but in practice it produces very inefficient code,
1775 /// since the match code is so much more general. In most cases,
1776 /// LLVM is able to optimize the code, but it causes longer compile
1777 /// times and makes the generated code nigh impossible to read.
1778 ///
1779 /// # Arguments
1780 /// - bcx: starting basic block context
1781 /// - pat: the irrefutable pattern being matched.
1782 /// - val: the value being matched -- must be an lvalue (by ref, with cleanup)
1783 pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1784 pat: &hir::Pat,
1785 val: MatchInput,
1786 cleanup_scope: cleanup::ScopeId)
1787 -> Block<'blk, 'tcx> {
1788 debug!("bind_irrefutable_pat(bcx={}, pat={:?}, val={:?})",
1789 bcx.to_str(), pat, val);
1790
1791 if bcx.sess().asm_comments() {
1792 add_comment(bcx, &format!("bind_irrefutable_pat(pat={:?})",
1793 pat));
1794 }
1795
1796 let _indenter = indenter();
1797
1798 let _icx = push_ctxt("match::bind_irrefutable_pat");
1799 let mut bcx = bcx;
1800 let tcx = bcx.tcx();
1801 let ccx = bcx.ccx();
1802 match pat.node {
1803 PatKind::Ident(pat_binding_mode, ref path1, ref inner) => {
1804 if pat_is_binding(&tcx.def_map.borrow(), &pat) {
1805 // Allocate the stack slot where the value of this
1806 // binding will live and place it into the appropriate
1807 // map.
1808 bcx = mk_binding_alloca(
1809 bcx, pat.id, path1.node.name, cleanup_scope, (),
1810 "_match::bind_irrefutable_pat",
1811 |(), bcx, Datum { val: llval, ty, kind: _ }| {
1812 match pat_binding_mode {
1813 hir::BindByValue(_) => {
1814 // By value binding: move the value that `val`
1815 // points at into the binding's stack slot.
1816 let d = val.to_datum(ty);
1817 d.store_to(bcx, llval)
1818 }
1819
1820 hir::BindByRef(_) => {
1821 // By ref binding: the value of the variable
1822 // is the pointer `val` itself or fat pointer referenced by `val`
1823 if type_is_fat_ptr(bcx.tcx(), ty) {
1824 expr::copy_fat_ptr(bcx, val.val, llval);
1825 }
1826 else {
1827 Store(bcx, val.val, llval);
1828 }
1829
1830 bcx
1831 }
1832 }
1833 });
1834 }
1835
1836 if let Some(ref inner_pat) = *inner {
1837 bcx = bind_irrefutable_pat(bcx, &inner_pat, val, cleanup_scope);
1838 }
1839 }
1840 PatKind::TupleStruct(_, ref sub_pats) => {
1841 let opt_def = bcx.tcx().def_map.borrow().get(&pat.id).map(|d| d.full_def());
1842 match opt_def {
1843 Some(Def::Variant(enum_id, var_id)) => {
1844 let repr = adt::represent_node(bcx, pat.id);
1845 let vinfo = ccx.tcx().lookup_adt_def(enum_id).variant_with_id(var_id);
1846 let args = extract_variant_args(bcx,
1847 &repr,
1848 Disr::from(vinfo.disr_val),
1849 val);
1850 if let Some(ref sub_pat) = *sub_pats {
1851 for (i, &argval) in args.vals.iter().enumerate() {
1852 bcx = bind_irrefutable_pat(
1853 bcx,
1854 &sub_pat[i],
1855 MatchInput::from_val(argval),
1856 cleanup_scope);
1857 }
1858 }
1859 }
1860 Some(Def::Struct(..)) => {
1861 match *sub_pats {
1862 None => {
1863 // This is a unit-like struct. Nothing to do here.
1864 }
1865 Some(ref elems) => {
1866 // This is the tuple struct case.
1867 let repr = adt::represent_node(bcx, pat.id);
1868 let val = adt::MaybeSizedValue::sized(val.val);
1869 for (i, elem) in elems.iter().enumerate() {
1870 let fldptr = adt::trans_field_ptr(bcx, &repr,
1871 val, Disr(0), i);
1872 bcx = bind_irrefutable_pat(
1873 bcx,
1874 &elem,
1875 MatchInput::from_val(fldptr),
1876 cleanup_scope);
1877 }
1878 }
1879 }
1880 }
1881 _ => {
1882 // Nothing to do here.
1883 }
1884 }
1885 }
1886 PatKind::Struct(_, ref fields, _) => {
1887 let tcx = bcx.tcx();
1888 let pat_ty = node_id_type(bcx, pat.id);
1889 let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
1890 let pat_v = VariantInfo::of_node(tcx, pat_ty, pat.id);
1891
1892 let val = if type_is_sized(tcx, pat_ty) {
1893 adt::MaybeSizedValue::sized(val.val)
1894 } else {
1895 let data = Load(bcx, expr::get_dataptr(bcx, val.val));
1896 let meta = Load(bcx, expr::get_meta(bcx, val.val));
1897 adt::MaybeSizedValue::unsized_(data, meta)
1898 };
1899
1900 for f in fields {
1901 let name = f.node.name;
1902 let field_idx = pat_v.field_index(name);
1903 let mut fldptr = adt::trans_field_ptr(
1904 bcx,
1905 &pat_repr,
1906 val,
1907 pat_v.discr,
1908 field_idx);
1909
1910 let fty = pat_v.fields[field_idx].1;
1911 // If it's not sized, then construct a fat pointer instead of
1912 // a regular one
1913 if !type_is_sized(tcx, fty) {
1914 let scratch = alloc_ty(bcx, fty, "__struct_field_fat_ptr");
1915 debug!("Creating fat pointer {:?}", Value(scratch));
1916 Store(bcx, fldptr, expr::get_dataptr(bcx, scratch));
1917 Store(bcx, val.meta, expr::get_meta(bcx, scratch));
1918 fldptr = scratch;
1919 }
1920 bcx = bind_irrefutable_pat(bcx,
1921 &f.node.pat,
1922 MatchInput::from_val(fldptr),
1923 cleanup_scope);
1924 }
1925 }
1926 PatKind::Tup(ref elems) => {
1927 let repr = adt::represent_node(bcx, pat.id);
1928 let val = adt::MaybeSizedValue::sized(val.val);
1929 for (i, elem) in elems.iter().enumerate() {
1930 let fldptr = adt::trans_field_ptr(bcx, &repr, val, Disr(0), i);
1931 bcx = bind_irrefutable_pat(
1932 bcx,
1933 &elem,
1934 MatchInput::from_val(fldptr),
1935 cleanup_scope);
1936 }
1937 }
1938 PatKind::Box(ref inner) => {
1939 let pat_ty = node_id_type(bcx, inner.id);
1940 // Pass along DSTs as fat pointers.
1941 let val = if type_is_fat_ptr(tcx, pat_ty) {
1942 // We need to check for this, as the pattern could be binding
1943 // a fat pointer by-value.
1944 if let PatKind::Ident(hir::BindByRef(_),_,_) = inner.node {
1945 val.val
1946 } else {
1947 Load(bcx, val.val)
1948 }
1949 } else if type_is_sized(tcx, pat_ty) {
1950 Load(bcx, val.val)
1951 } else {
1952 val.val
1953 };
1954 bcx = bind_irrefutable_pat(
1955 bcx, &inner, MatchInput::from_val(val), cleanup_scope);
1956 }
1957 PatKind::Ref(ref inner, _) => {
1958 let pat_ty = node_id_type(bcx, inner.id);
1959 // Pass along DSTs as fat pointers.
1960 let val = if type_is_fat_ptr(tcx, pat_ty) {
1961 // We need to check for this, as the pattern could be binding
1962 // a fat pointer by-value.
1963 if let PatKind::Ident(hir::BindByRef(_),_,_) = inner.node {
1964 val.val
1965 } else {
1966 Load(bcx, val.val)
1967 }
1968 } else if type_is_sized(tcx, pat_ty) {
1969 Load(bcx, val.val)
1970 } else {
1971 val.val
1972 };
1973 bcx = bind_irrefutable_pat(
1974 bcx,
1975 &inner,
1976 MatchInput::from_val(val),
1977 cleanup_scope);
1978 }
1979 PatKind::Vec(ref before, ref slice, ref after) => {
1980 let pat_ty = node_id_type(bcx, pat.id);
1981 let mut extracted = extract_vec_elems(bcx, pat_ty, before.len(), after.len(), val);
1982 match slice {
1983 &Some(_) => {
1984 extracted.vals.insert(
1985 before.len(),
1986 bind_subslice_pat(bcx, pat.id, val, before.len(), after.len())
1987 );
1988 }
1989 &None => ()
1990 }
1991 bcx = before
1992 .iter()
1993 .chain(slice.iter())
1994 .chain(after.iter())
1995 .zip(extracted.vals)
1996 .fold(bcx, |bcx, (inner, elem)| {
1997 bind_irrefutable_pat(
1998 bcx,
1999 &inner,
2000 MatchInput::from_val(elem),
2001 cleanup_scope)
2002 });
2003 }
2004 PatKind::Path(..) | PatKind::QPath(..) | PatKind::Wild | PatKind::Lit(_) |
2005 PatKind::Range(_, _) => ()
2006 }
2007 return bcx;
2008 }