]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/_match.rs
New upstream version 1.12.0+dfsg1
[rustc.git] / src / librustc_trans / _match.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! # Compilation of match statements
12 //!
13 //! I will endeavor to explain the code as best I can. I have only a loose
14 //! understanding of some parts of it.
15 //!
16 //! ## Matching
17 //!
18 //! The basic state of the code is maintained in an array `m` of `Match`
19 //! objects. Each `Match` describes some list of patterns, all of which must
20 //! match against the current list of values. If those patterns match, then
21 //! the arm listed in the match is the correct arm. A given arm may have
22 //! multiple corresponding match entries, one for each alternative that
23 //! remains. As we proceed these sets of matches are adjusted by the various
24 //! `enter_XXX()` functions, each of which adjusts the set of options given
25 //! some information about the value which has been matched.
26 //!
27 //! So, initially, there is one value and N matches, each of which have one
28 //! constituent pattern. N here is usually the number of arms but may be
29 //! greater, if some arms have multiple alternatives. For example, here:
30 //!
31 //! enum Foo { A, B(int), C(usize, usize) }
32 //! match foo {
33 //! A => ...,
34 //! B(x) => ...,
35 //! C(1, 2) => ...,
36 //! C(_) => ...
37 //! }
38 //!
39 //! The value would be `foo`. There would be four matches, each of which
40 //! contains one pattern (and, in one case, a guard). We could collect the
41 //! various options and then compile the code for the case where `foo` is an
42 //! `A`, a `B`, and a `C`. When we generate the code for `C`, we would (1)
43 //! drop the two matches that do not match a `C` and (2) expand the other two
44 //! into two patterns each. In the first case, the two patterns would be `1`
45 //! and `2`, and the in the second case the _ pattern would be expanded into
46 //! `_` and `_`. The two values are of course the arguments to `C`.
47 //!
48 //! Here is a quick guide to the various functions:
49 //!
50 //! - `compile_submatch()`: The main workhouse. It takes a list of values and
51 //! a list of matches and finds the various possibilities that could occur.
52 //!
53 //! - `enter_XXX()`: modifies the list of matches based on some information
54 //! about the value that has been matched. For example,
55 //! `enter_rec_or_struct()` adjusts the values given that a record or struct
56 //! has been matched. This is an infallible pattern, so *all* of the matches
57 //! must be either wildcards or record/struct patterns. `enter_opt()`
58 //! handles the fallible cases, and it is correspondingly more complex.
59 //!
60 //! ## Bindings
61 //!
62 //! We store information about the bound variables for each arm as part of the
63 //! per-arm `ArmData` struct. There is a mapping from identifiers to
64 //! `BindingInfo` structs. These structs contain the mode/id/type of the
65 //! binding, but they also contain an LLVM value which points at an alloca
66 //! called `llmatch`. For by value bindings that are Copy, we also create
67 //! an extra alloca that we copy the matched value to so that any changes
68 //! we do to our copy is not reflected in the original and vice-versa.
69 //! We don't do this if it's a move since the original value can't be used
70 //! and thus allowing us to cheat in not creating an extra alloca.
71 //!
72 //! The `llmatch` binding always stores a pointer into the value being matched
73 //! which points at the data for the binding. If the value being matched has
74 //! type `T`, then, `llmatch` will point at an alloca of type `T*` (and hence
75 //! `llmatch` has type `T**`). So, if you have a pattern like:
76 //!
77 //! let a: A = ...;
78 //! let b: B = ...;
79 //! match (a, b) { (ref c, d) => { ... } }
80 //!
81 //! For `c` and `d`, we would generate allocas of type `C*` and `D*`
82 //! respectively. These are called the `llmatch`. As we match, when we come
83 //! up against an identifier, we store the current pointer into the
84 //! corresponding alloca.
85 //!
86 //! Once a pattern is completely matched, and assuming that there is no guard
87 //! pattern, we will branch to a block that leads to the body itself. For any
88 //! by-value bindings, this block will first load the ptr from `llmatch` (the
89 //! one of type `D*`) and then load a second time to get the actual value (the
90 //! one of type `D`). For by ref bindings, the value of the local variable is
91 //! simply the first alloca.
92 //!
93 //! So, for the example above, we would generate a setup kind of like this:
94 //!
95 //! +-------+
96 //! | Entry |
97 //! +-------+
98 //! |
99 //! +--------------------------------------------+
100 //! | llmatch_c = (addr of first half of tuple) |
101 //! | llmatch_d = (addr of second half of tuple) |
102 //! +--------------------------------------------+
103 //! |
104 //! +--------------------------------------+
105 //! | *llbinding_d = **llmatch_d |
106 //! +--------------------------------------+
107 //!
108 //! If there is a guard, the situation is slightly different, because we must
109 //! execute the guard code. Moreover, we need to do so once for each of the
110 //! alternatives that lead to the arm, because if the guard fails, they may
111 //! have different points from which to continue the search. Therefore, in that
112 //! case, we generate code that looks more like:
113 //!
114 //! +-------+
115 //! | Entry |
116 //! +-------+
117 //! |
118 //! +-------------------------------------------+
119 //! | llmatch_c = (addr of first half of tuple) |
120 //! | llmatch_d = (addr of first half of tuple) |
121 //! +-------------------------------------------+
122 //! |
123 //! +-------------------------------------------------+
124 //! | *llbinding_d = **llmatch_d |
125 //! | check condition |
126 //! | if false { goto next case } |
127 //! | if true { goto body } |
128 //! +-------------------------------------------------+
129 //!
130 //! The handling for the cleanups is a bit... sensitive. Basically, the body
131 //! is the one that invokes `add_clean()` for each binding. During the guard
132 //! evaluation, we add temporary cleanups and revoke them after the guard is
133 //! evaluated (it could fail, after all). Note that guards and moves are
134 //! just plain incompatible.
135 //!
136 //! Some relevant helper functions that manage bindings:
137 //! - `create_bindings_map()`
138 //! - `insert_lllocals()`
139 //!
140 //!
141 //! ## Notes on vector pattern matching.
142 //!
143 //! Vector pattern matching is surprisingly tricky. The problem is that
144 //! the structure of the vector isn't fully known, and slice matches
145 //! can be done on subparts of it.
146 //!
147 //! The way that vector pattern matches are dealt with, then, is as
148 //! follows. First, we make the actual condition associated with a
149 //! vector pattern simply a vector length comparison. So the pattern
150 //! [1, .. x] gets the condition "vec len >= 1", and the pattern
151 //! [.. x] gets the condition "vec len >= 0". The problem here is that
152 //! having the condition "vec len >= 1" hold clearly does not mean that
153 //! only a pattern that has exactly that condition will match. This
154 //! means that it may well be the case that a condition holds, but none
155 //! of the patterns matching that condition match; to deal with this,
156 //! when doing vector length matches, we have match failures proceed to
157 //! the next condition to check.
158 //!
159 //! There are a couple more subtleties to deal with. While the "actual"
160 //! condition associated with vector length tests is simply a test on
161 //! the vector length, the actual vec_len Opt entry contains more
162 //! information used to restrict which matches are associated with it.
163 //! So that all matches in a submatch are matching against the same
164 //! values from inside the vector, they are split up by how many
165 //! elements they match at the front and at the back of the vector. In
166 //! order to make sure that arms are properly checked in order, even
167 //! with the overmatching conditions, each vec_len Opt entry is
168 //! associated with a range of matches.
169 //! Consider the following:
170 //!
171 //! match &[1, 2, 3] {
172 //! [1, 1, .. _] => 0,
173 //! [1, 2, 2, .. _] => 1,
174 //! [1, 2, 3, .. _] => 2,
175 //! [1, 2, .. _] => 3,
176 //! _ => 4
177 //! }
178 //! The proper arm to match is arm 2, but arms 0 and 3 both have the
179 //! condition "len >= 2". If arm 3 was lumped in with arm 0, then the
180 //! wrong branch would be taken. Instead, vec_len Opts are associated
181 //! with a contiguous range of matches that have the same "shape".
182 //! This is sort of ugly and requires a bunch of special handling of
183 //! vec_len options.
184
185 pub use self::BranchKind::*;
186 pub use self::OptResult::*;
187 pub use self::TransBindingMode::*;
188 use self::Opt::*;
189 use self::FailureHandler::*;
190
191 use llvm::{ValueRef, BasicBlockRef};
192 use rustc_const_eval::check_match::{self, Constructor, StaticInliner};
193 use rustc_const_eval::{compare_lit_exprs, eval_const_expr, fatal_const_eval_err};
194 use rustc::hir::def::{Def, DefMap};
195 use rustc::hir::def_id::DefId;
196 use middle::expr_use_visitor as euv;
197 use middle::lang_items::StrEqFnLangItem;
198 use middle::mem_categorization as mc;
199 use middle::mem_categorization::Categorization;
200 use rustc::hir::pat_util::*;
201 use rustc::ty::subst::Substs;
202 use adt;
203 use base::*;
204 use build::{AddCase, And, Br, CondBr, GEPi, InBoundsGEP, Load, PointerCast};
205 use build::{Not, Store, Sub, add_comment};
206 use build;
207 use callee::{Callee, ArgVals};
208 use cleanup::{self, CleanupMethods, DropHintMethods};
209 use common::*;
210 use consts;
211 use datum::*;
212 use debuginfo::{self, DebugLoc, ToDebugLoc};
213 use expr::{self, Dest};
214 use monomorphize;
215 use tvec;
216 use type_of;
217 use Disr;
218 use value::Value;
219 use rustc::ty::{self, Ty, TyCtxt};
220 use rustc::traits::Reveal;
221 use session::config::NoDebugInfo;
222 use util::common::indenter;
223 use util::nodemap::FnvHashMap;
224 use util::ppaux;
225
226 use std;
227 use std::cell::RefCell;
228 use std::cmp::Ordering;
229 use std::fmt;
230 use std::rc::Rc;
231 use rustc::hir::{self, PatKind};
232 use syntax::ast::{self, DUMMY_NODE_ID, NodeId};
233 use syntax_pos::Span;
234 use rustc::hir::fold::Folder;
235 use syntax::ptr::P;
236
237 #[derive(Copy, Clone, Debug)]
238 struct ConstantExpr<'a>(&'a hir::Expr);
239
240 impl<'a> ConstantExpr<'a> {
241 fn eq<'b, 'tcx>(self, other: ConstantExpr<'a>, tcx: TyCtxt<'b, 'tcx, 'tcx>) -> bool {
242 match compare_lit_exprs(tcx, self.0.span, self.0, other.0) {
243 Ok(result) => result == Ordering::Equal,
244 Err(_) => bug!("compare_list_exprs: type mismatch"),
245 }
246 }
247 }
248
249 // An option identifying a branch (either a literal, an enum variant or a range)
250 #[derive(Debug)]
251 enum Opt<'a, 'tcx> {
252 ConstantValue(ConstantExpr<'a>, DebugLoc),
253 ConstantRange(ConstantExpr<'a>, ConstantExpr<'a>, DebugLoc),
254 Variant(Disr, Rc<adt::Repr<'tcx>>, DefId, DebugLoc),
255 SliceLengthEqual(usize, DebugLoc),
256 SliceLengthGreaterOrEqual(/* prefix length */ usize,
257 /* suffix length */ usize,
258 DebugLoc),
259 }
260
261 impl<'a, 'b, 'tcx> Opt<'a, 'tcx> {
262 fn eq(&self, other: &Opt<'a, 'tcx>, tcx: TyCtxt<'b, 'tcx, 'tcx>) -> bool {
263 match (self, other) {
264 (&ConstantValue(a, _), &ConstantValue(b, _)) => a.eq(b, tcx),
265 (&ConstantRange(a1, a2, _), &ConstantRange(b1, b2, _)) => {
266 a1.eq(b1, tcx) && a2.eq(b2, tcx)
267 }
268 (&Variant(a_disr, ref a_repr, a_def, _),
269 &Variant(b_disr, ref b_repr, b_def, _)) => {
270 a_disr == b_disr && *a_repr == *b_repr && a_def == b_def
271 }
272 (&SliceLengthEqual(a, _), &SliceLengthEqual(b, _)) => a == b,
273 (&SliceLengthGreaterOrEqual(a1, a2, _),
274 &SliceLengthGreaterOrEqual(b1, b2, _)) => {
275 a1 == b1 && a2 == b2
276 }
277 _ => false
278 }
279 }
280
281 fn trans<'blk>(&self, mut bcx: Block<'blk, 'tcx>) -> OptResult<'blk, 'tcx> {
282 use consts::TrueConst::Yes;
283 let _icx = push_ctxt("match::trans_opt");
284 let ccx = bcx.ccx();
285 match *self {
286 ConstantValue(ConstantExpr(lit_expr), _) => {
287 let lit_ty = bcx.tcx().node_id_to_type(lit_expr.id);
288 let expr = consts::const_expr(ccx, &lit_expr, bcx.fcx.param_substs, None, Yes);
289 let llval = match expr {
290 Ok((llval, _)) => llval,
291 Err(err) => {
292 fatal_const_eval_err(bcx.tcx(), err.as_inner(), lit_expr.span, "pattern");
293 }
294 };
295 let lit_datum = immediate_rvalue(llval, lit_ty);
296 let lit_datum = unpack_datum!(bcx, lit_datum.to_appropriate_datum(bcx));
297 SingleResult(Result::new(bcx, lit_datum.val))
298 }
299 ConstantRange(ConstantExpr(ref l1), ConstantExpr(ref l2), _) => {
300 let l1 = match consts::const_expr(ccx, &l1, bcx.fcx.param_substs, None, Yes) {
301 Ok((l1, _)) => l1,
302 Err(err) => fatal_const_eval_err(bcx.tcx(), err.as_inner(), l1.span, "pattern"),
303 };
304 let l2 = match consts::const_expr(ccx, &l2, bcx.fcx.param_substs, None, Yes) {
305 Ok((l2, _)) => l2,
306 Err(err) => fatal_const_eval_err(bcx.tcx(), err.as_inner(), l2.span, "pattern"),
307 };
308 RangeResult(Result::new(bcx, l1), Result::new(bcx, l2))
309 }
310 Variant(disr_val, ref repr, _, _) => {
311 SingleResult(Result::new(bcx, adt::trans_case(bcx, &repr, disr_val)))
312 }
313 SliceLengthEqual(length, _) => {
314 SingleResult(Result::new(bcx, C_uint(ccx, length)))
315 }
316 SliceLengthGreaterOrEqual(prefix, suffix, _) => {
317 LowerBound(Result::new(bcx, C_uint(ccx, prefix + suffix)))
318 }
319 }
320 }
321
322 fn debug_loc(&self) -> DebugLoc {
323 match *self {
324 ConstantValue(_,debug_loc) |
325 ConstantRange(_, _, debug_loc) |
326 Variant(_, _, _, debug_loc) |
327 SliceLengthEqual(_, debug_loc) |
328 SliceLengthGreaterOrEqual(_, _, debug_loc) => debug_loc
329 }
330 }
331 }
332
333 #[derive(Copy, Clone, PartialEq)]
334 pub enum BranchKind {
335 NoBranch,
336 Single,
337 Switch,
338 Compare,
339 CompareSliceLength
340 }
341
342 pub enum OptResult<'blk, 'tcx: 'blk> {
343 SingleResult(Result<'blk, 'tcx>),
344 RangeResult(Result<'blk, 'tcx>, Result<'blk, 'tcx>),
345 LowerBound(Result<'blk, 'tcx>)
346 }
347
348 #[derive(Clone, Copy, PartialEq)]
349 pub enum TransBindingMode {
350 /// By-value binding for a copy type: copies from matched data
351 /// into a fresh LLVM alloca.
352 TrByCopy(/* llbinding */ ValueRef),
353
354 /// By-value binding for a non-copy type where we copy into a
355 /// fresh LLVM alloca; this most accurately reflects the language
356 /// semantics (e.g. it properly handles overwrites of the matched
357 /// input), but potentially injects an unwanted copy.
358 TrByMoveIntoCopy(/* llbinding */ ValueRef),
359
360 /// Binding a non-copy type by reference under the hood; this is
361 /// a codegen optimization to avoid unnecessary memory traffic.
362 TrByMoveRef,
363
364 /// By-ref binding exposed in the original source input.
365 TrByRef,
366 }
367
368 impl TransBindingMode {
369 /// if binding by making a fresh copy; returns the alloca that it
370 /// will copy into; otherwise None.
371 fn alloca_if_copy(&self) -> Option<ValueRef> {
372 match *self {
373 TrByCopy(llbinding) | TrByMoveIntoCopy(llbinding) => Some(llbinding),
374 TrByMoveRef | TrByRef => None,
375 }
376 }
377 }
378
379 /// Information about a pattern binding:
380 /// - `llmatch` is a pointer to a stack slot. The stack slot contains a
381 /// pointer into the value being matched. Hence, llmatch has type `T**`
382 /// where `T` is the value being matched.
383 /// - `trmode` is the trans binding mode
384 /// - `id` is the node id of the binding
385 /// - `ty` is the Rust type of the binding
386 #[derive(Clone, Copy)]
387 pub struct BindingInfo<'tcx> {
388 pub llmatch: ValueRef,
389 pub trmode: TransBindingMode,
390 pub id: ast::NodeId,
391 pub span: Span,
392 pub ty: Ty<'tcx>,
393 }
394
395 type BindingsMap<'tcx> = FnvHashMap<ast::Name, BindingInfo<'tcx>>;
396
397 struct ArmData<'p, 'blk, 'tcx: 'blk> {
398 bodycx: Block<'blk, 'tcx>,
399 arm: &'p hir::Arm,
400 bindings_map: BindingsMap<'tcx>
401 }
402
403 /// Info about Match.
404 /// If all `pats` are matched then arm `data` will be executed.
405 /// As we proceed `bound_ptrs` are filled with pointers to values to be bound,
406 /// these pointers are stored in llmatch variables just before executing `data` arm.
407 struct Match<'a, 'p: 'a, 'blk: 'a, 'tcx: 'blk> {
408 pats: Vec<&'p hir::Pat>,
409 data: &'a ArmData<'p, 'blk, 'tcx>,
410 bound_ptrs: Vec<(ast::Name, ValueRef)>,
411 // Thread along renamings done by the check_match::StaticInliner, so we can
412 // map back to original NodeIds
413 pat_renaming_map: Option<&'a FnvHashMap<(NodeId, Span), NodeId>>
414 }
415
416 impl<'a, 'p, 'blk, 'tcx> fmt::Debug for Match<'a, 'p, 'blk, 'tcx> {
417 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
418 if ppaux::verbose() {
419 // for many programs, this just take too long to serialize
420 write!(f, "{:?}", self.pats)
421 } else {
422 write!(f, "{} pats", self.pats.len())
423 }
424 }
425 }
426
427 fn has_nested_bindings(m: &[Match], col: usize) -> bool {
428 for br in m {
429 if let PatKind::Binding(_, _, Some(..)) = br.pats[col].node {
430 return true
431 }
432 }
433 false
434 }
435
436 // As noted in `fn match_datum`, we should eventually pass around a
437 // `Datum<Lvalue>` for the `val`; but until we get to that point, this
438 // `MatchInput` struct will serve -- it has everything `Datum<Lvalue>`
439 // does except for the type field.
440 #[derive(Copy, Clone)]
441 pub struct MatchInput { val: ValueRef, lval: Lvalue }
442
443 impl<'tcx> Datum<'tcx, Lvalue> {
444 pub fn match_input(&self) -> MatchInput {
445 MatchInput {
446 val: self.val,
447 lval: self.kind,
448 }
449 }
450 }
451
452 impl fmt::Debug for MatchInput {
453 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
454 fmt::Debug::fmt(&Value(self.val), f)
455 }
456 }
457
458 impl MatchInput {
459 fn from_val(val: ValueRef) -> MatchInput {
460 MatchInput {
461 val: val,
462 lval: Lvalue::new("MatchInput::from_val"),
463 }
464 }
465
466 fn to_datum<'tcx>(self, ty: Ty<'tcx>) -> Datum<'tcx, Lvalue> {
467 Datum::new(self.val, ty, self.lval)
468 }
469 }
470
471 fn expand_nested_bindings<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
472 m: &[Match<'a, 'p, 'blk, 'tcx>],
473 col: usize,
474 val: MatchInput)
475 -> Vec<Match<'a, 'p, 'blk, 'tcx>> {
476 debug!("expand_nested_bindings(bcx={}, m={:?}, col={}, val={:?})",
477 bcx.to_str(), m, col, val);
478 let _indenter = indenter();
479
480 m.iter().map(|br| {
481 let mut bound_ptrs = br.bound_ptrs.clone();
482 let mut pat = br.pats[col];
483 loop {
484 pat = match pat.node {
485 PatKind::Binding(_, ref path, Some(ref inner)) => {
486 bound_ptrs.push((path.node, val.val));
487 &inner
488 },
489 _ => break
490 }
491 }
492
493 let mut pats = br.pats.clone();
494 pats[col] = pat;
495 Match {
496 pats: pats,
497 data: &br.data,
498 bound_ptrs: bound_ptrs,
499 pat_renaming_map: br.pat_renaming_map,
500 }
501 }).collect()
502 }
503
504 fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
505 m: &[Match<'a, 'p, 'blk, 'tcx>],
506 col: usize,
507 val: MatchInput,
508 mut e: F)
509 -> Vec<Match<'a, 'p, 'blk, 'tcx>> where
510 F: FnMut(&[(&'p hir::Pat, Option<Ty<'tcx>>)])
511 -> Option<Vec<(&'p hir::Pat, Option<Ty<'tcx>>)>>,
512 {
513 debug!("enter_match(bcx={}, m={:?}, col={}, val={:?})",
514 bcx.to_str(), m, col, val);
515 let _indenter = indenter();
516
517 m.iter().filter_map(|br| {
518 let pats : Vec<_> = br.pats.iter().map(|p| (*p, None)).collect();
519 e(&pats).map(|pats| {
520 let this = br.pats[col];
521 let mut bound_ptrs = br.bound_ptrs.clone();
522 match this.node {
523 PatKind::Binding(_, ref path, None) => {
524 bound_ptrs.push((path.node, val.val));
525 }
526 PatKind::Vec(ref before, Some(ref slice), ref after) => {
527 if let PatKind::Binding(_, ref path, None) = slice.node {
528 let subslice_val = bind_subslice_pat(
529 bcx, this.id, val,
530 before.len(), after.len());
531 bound_ptrs.push((path.node, subslice_val));
532 }
533 }
534 _ => {}
535 }
536 Match {
537 pats: pats.into_iter().map(|p| p.0).collect(),
538 data: br.data,
539 bound_ptrs: bound_ptrs,
540 pat_renaming_map: br.pat_renaming_map,
541 }
542 })
543 }).collect()
544 }
545
546 fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
547 m: &[Match<'a, 'p, 'blk, 'tcx>],
548 col: usize,
549 val: MatchInput)
550 -> Vec<Match<'a, 'p, 'blk, 'tcx>> {
551 debug!("enter_default(bcx={}, m={:?}, col={}, val={:?})",
552 bcx.to_str(), m, col, val);
553 let _indenter = indenter();
554
555 // Collect all of the matches that can match against anything.
556 enter_match(bcx, m, col, val, |pats| {
557 match pats[col].0.node {
558 PatKind::Binding(..) | PatKind::Wild => {
559 let mut r = pats[..col].to_vec();
560 r.extend_from_slice(&pats[col + 1..]);
561 Some(r)
562 }
563 _ => None
564 }
565 })
566 }
567
568 // <pcwalton> nmatsakis: what does enter_opt do?
569 // <pcwalton> in trans/match
570 // <pcwalton> trans/match.rs is like stumbling around in a dark cave
571 // <nmatsakis> pcwalton: the enter family of functions adjust the set of
572 // patterns as needed
573 // <nmatsakis> yeah, at some point I kind of achieved some level of
574 // understanding
575 // <nmatsakis> anyhow, they adjust the patterns given that something of that
576 // kind has been found
577 // <nmatsakis> pcwalton: ok, right, so enter_XXX() adjusts the patterns, as I
578 // said
579 // <nmatsakis> enter_match() kind of embodies the generic code
580 // <nmatsakis> it is provided with a function that tests each pattern to see
581 // if it might possibly apply and so forth
582 // <nmatsakis> so, if you have a pattern like {a: _, b: _, _} and one like _
583 // <nmatsakis> then _ would be expanded to (_, _)
584 // <nmatsakis> one spot for each of the sub-patterns
585 // <nmatsakis> enter_opt() is one of the more complex; it covers the fallible
586 // cases
587 // <nmatsakis> enter_rec_or_struct() or enter_tuple() are simpler, since they
588 // are infallible patterns
589 // <nmatsakis> so all patterns must either be records (resp. tuples) or
590 // wildcards
591
592 /// The above is now outdated in that enter_match() now takes a function that
593 /// takes the complete row of patterns rather than just the first one.
594 /// Also, most of the enter_() family functions have been unified with
595 /// the check_match specialization step.
596 fn enter_opt<'a, 'p, 'blk, 'tcx>(
597 bcx: Block<'blk, 'tcx>,
598 _: ast::NodeId,
599 m: &[Match<'a, 'p, 'blk, 'tcx>],
600 opt: &Opt,
601 col: usize,
602 variant_size: usize,
603 val: MatchInput)
604 -> Vec<Match<'a, 'p, 'blk, 'tcx>> {
605 debug!("enter_opt(bcx={}, m={:?}, opt={:?}, col={}, val={:?})",
606 bcx.to_str(), m, *opt, col, val);
607 let _indenter = indenter();
608
609 let ctor = match opt {
610 &ConstantValue(ConstantExpr(expr), _) => Constructor::ConstantValue(
611 eval_const_expr(bcx.tcx(), &expr)
612 ),
613 &ConstantRange(ConstantExpr(lo), ConstantExpr(hi), _) => Constructor::ConstantRange(
614 eval_const_expr(bcx.tcx(), &lo),
615 eval_const_expr(bcx.tcx(), &hi)
616 ),
617 &SliceLengthEqual(n, _) =>
618 Constructor::Slice(n),
619 &SliceLengthGreaterOrEqual(before, after, _) =>
620 Constructor::SliceWithSubslice(before, after),
621 &Variant(_, _, def_id, _) =>
622 Constructor::Variant(def_id)
623 };
624
625 let param_env = bcx.tcx().empty_parameter_environment();
626 let mcx = check_match::MatchCheckCtxt {
627 tcx: bcx.tcx(),
628 param_env: param_env,
629 };
630 enter_match(bcx, m, col, val, |pats|
631 check_match::specialize(&mcx, &pats[..], &ctor, col, variant_size)
632 )
633 }
634
635 // Returns the options in one column of matches. An option is something that
636 // needs to be conditionally matched at runtime; for example, the discriminant
637 // on a set of enum variants or a literal.
638 fn get_branches<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
639 m: &[Match<'a, 'p, 'blk, 'tcx>],
640 col: usize)
641 -> Vec<Opt<'p, 'tcx>> {
642 let tcx = bcx.tcx();
643
644 let mut found: Vec<Opt> = vec![];
645 for br in m {
646 let cur = br.pats[col];
647 let debug_loc = match br.pat_renaming_map {
648 Some(pat_renaming_map) => {
649 match pat_renaming_map.get(&(cur.id, cur.span)) {
650 Some(&id) => DebugLoc::At(id, cur.span),
651 None => DebugLoc::At(cur.id, cur.span),
652 }
653 }
654 None => DebugLoc::None
655 };
656
657 let opt = match cur.node {
658 PatKind::Lit(ref l) => {
659 ConstantValue(ConstantExpr(&l), debug_loc)
660 }
661 PatKind::Path(..) | PatKind::TupleStruct(..) | PatKind::Struct(..) => {
662 match tcx.expect_def(cur.id) {
663 Def::Variant(enum_id, var_id) => {
664 let variant = tcx.lookup_adt_def(enum_id).variant_with_id(var_id);
665 Variant(Disr::from(variant.disr_val),
666 adt::represent_node(bcx, cur.id),
667 var_id,
668 debug_loc)
669 }
670 _ => continue
671 }
672 }
673 PatKind::Range(ref l1, ref l2) => {
674 ConstantRange(ConstantExpr(&l1), ConstantExpr(&l2), debug_loc)
675 }
676 PatKind::Vec(ref before, None, ref after) => {
677 SliceLengthEqual(before.len() + after.len(), debug_loc)
678 }
679 PatKind::Vec(ref before, Some(_), ref after) => {
680 SliceLengthGreaterOrEqual(before.len(), after.len(), debug_loc)
681 }
682 _ => continue
683 };
684
685 if !found.iter().any(|x| x.eq(&opt, tcx)) {
686 found.push(opt);
687 }
688 }
689 found
690 }
691
692 struct ExtractedBlock<'blk, 'tcx: 'blk> {
693 vals: Vec<ValueRef>,
694 bcx: Block<'blk, 'tcx>,
695 }
696
697 fn extract_variant_args<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
698 repr: &adt::Repr<'tcx>,
699 disr_val: Disr,
700 val: MatchInput)
701 -> ExtractedBlock<'blk, 'tcx> {
702 let _icx = push_ctxt("match::extract_variant_args");
703 // Assume enums are always sized for now.
704 let val = adt::MaybeSizedValue::sized(val.val);
705 let args = (0..adt::num_args(repr, disr_val)).map(|i| {
706 adt::trans_field_ptr(bcx, repr, val, disr_val, i)
707 }).collect();
708
709 ExtractedBlock { vals: args, bcx: bcx }
710 }
711
712 /// Helper for converting from the ValueRef that we pass around in the match code, which is always
713 /// an lvalue, into a Datum. Eventually we should just pass around a Datum and be done with it.
714 fn match_datum<'tcx>(val: MatchInput, left_ty: Ty<'tcx>) -> Datum<'tcx, Lvalue> {
715 val.to_datum(left_ty)
716 }
717
718 fn bind_subslice_pat(bcx: Block,
719 pat_id: ast::NodeId,
720 val: MatchInput,
721 offset_left: usize,
722 offset_right: usize) -> ValueRef {
723 let _icx = push_ctxt("match::bind_subslice_pat");
724 let vec_ty = node_id_type(bcx, pat_id);
725 let vec_ty_contents = match vec_ty.sty {
726 ty::TyBox(ty) => ty,
727 ty::TyRef(_, mt) | ty::TyRawPtr(mt) => mt.ty,
728 _ => vec_ty
729 };
730 let unit_ty = vec_ty_contents.sequence_element_type(bcx.tcx());
731 let vec_datum = match_datum(val, vec_ty);
732 let (base, len) = vec_datum.get_vec_base_and_len(bcx);
733
734 let slice_begin = InBoundsGEP(bcx, base, &[C_uint(bcx.ccx(), offset_left)]);
735 let diff = offset_left + offset_right;
736 if let ty::TyArray(ty, n) = vec_ty_contents.sty {
737 let array_ty = bcx.tcx().mk_array(ty, n-diff);
738 let llty_array = type_of::type_of(bcx.ccx(), array_ty);
739 return PointerCast(bcx, slice_begin, llty_array.ptr_to());
740 }
741
742 let slice_len_offset = C_uint(bcx.ccx(), diff);
743 let slice_len = Sub(bcx, len, slice_len_offset, DebugLoc::None);
744 let slice_ty = bcx.tcx().mk_imm_ref(bcx.tcx().mk_region(ty::ReErased),
745 bcx.tcx().mk_slice(unit_ty));
746 let scratch = rvalue_scratch_datum(bcx, slice_ty, "");
747 Store(bcx, slice_begin, expr::get_dataptr(bcx, scratch.val));
748 Store(bcx, slice_len, expr::get_meta(bcx, scratch.val));
749 scratch.val
750 }
751
752 fn extract_vec_elems<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
753 left_ty: Ty<'tcx>,
754 before: usize,
755 after: usize,
756 val: MatchInput)
757 -> ExtractedBlock<'blk, 'tcx> {
758 let _icx = push_ctxt("match::extract_vec_elems");
759 let vec_datum = match_datum(val, left_ty);
760 let (base, len) = vec_datum.get_vec_base_and_len(bcx);
761 let mut elems = vec![];
762 elems.extend((0..before).map(|i| GEPi(bcx, base, &[i])));
763 elems.extend((0..after).rev().map(|i| {
764 InBoundsGEP(bcx, base, &[
765 Sub(bcx, len, C_uint(bcx.ccx(), i + 1), DebugLoc::None)
766 ])
767 }));
768 ExtractedBlock { vals: elems, bcx: bcx }
769 }
770
771 // Macro for deciding whether any of the remaining matches fit a given kind of
772 // pattern. Note that, because the macro is well-typed, either ALL of the
773 // matches should fit that sort of pattern or NONE (however, some of the
774 // matches may be wildcards like _ or identifiers).
775 macro_rules! any_pat {
776 ($m:expr, $col:expr, $pattern:pat) => (
777 ($m).iter().any(|br| {
778 match br.pats[$col].node {
779 $pattern => true,
780 _ => false
781 }
782 })
783 )
784 }
785
786 fn any_uniq_pat(m: &[Match], col: usize) -> bool {
787 any_pat!(m, col, PatKind::Box(_))
788 }
789
790 fn any_region_pat(m: &[Match], col: usize) -> bool {
791 any_pat!(m, col, PatKind::Ref(..))
792 }
793
794 fn any_irrefutable_adt_pat(tcx: TyCtxt, m: &[Match], col: usize) -> bool {
795 m.iter().any(|br| {
796 let pat = br.pats[col];
797 match pat.node {
798 PatKind::Tuple(..) => true,
799 PatKind::Struct(..) | PatKind::TupleStruct(..) | PatKind::Path(..) => {
800 match tcx.expect_def(pat.id) {
801 Def::Struct(..) | Def::TyAlias(..) | Def::AssociatedTy(..) => true,
802 _ => false,
803 }
804 }
805 _ => false
806 }
807 })
808 }
809
810 /// What to do when the pattern match fails.
811 enum FailureHandler {
812 Infallible,
813 JumpToBasicBlock(BasicBlockRef),
814 Unreachable
815 }
816
817 impl FailureHandler {
818 fn is_fallible(&self) -> bool {
819 match *self {
820 Infallible => false,
821 _ => true
822 }
823 }
824
825 fn is_infallible(&self) -> bool {
826 !self.is_fallible()
827 }
828
829 fn handle_fail(&self, bcx: Block) {
830 match *self {
831 Infallible =>
832 bug!("attempted to panic in a non-panicking panic handler!"),
833 JumpToBasicBlock(basic_block) =>
834 Br(bcx, basic_block, DebugLoc::None),
835 Unreachable =>
836 build::Unreachable(bcx)
837 }
838 }
839 }
840
841 fn pick_column_to_specialize(def_map: &RefCell<DefMap>, m: &[Match]) -> Option<usize> {
842 fn pat_score(def_map: &RefCell<DefMap>, pat: &hir::Pat) -> usize {
843 match pat.node {
844 PatKind::Binding(_, _, Some(ref inner)) => pat_score(def_map, &inner),
845 _ if pat_is_refutable(&def_map.borrow(), pat) => 1,
846 _ => 0
847 }
848 }
849
850 let column_score = |m: &[Match], col: usize| -> usize {
851 let total_score = m.iter()
852 .map(|row| row.pats[col])
853 .map(|pat| pat_score(def_map, pat))
854 .sum();
855
856 // Irrefutable columns always go first, they'd only be duplicated in the branches.
857 if total_score == 0 {
858 std::usize::MAX
859 } else {
860 total_score
861 }
862 };
863
864 let column_contains_any_nonwild_patterns = |&col: &usize| -> bool {
865 m.iter().any(|row| match row.pats[col].node {
866 PatKind::Wild => false,
867 _ => true
868 })
869 };
870
871 (0..m[0].pats.len())
872 .filter(column_contains_any_nonwild_patterns)
873 .map(|col| (col, column_score(m, col)))
874 .max_by_key(|&(_, score)| score)
875 .map(|(col, _)| col)
876 }
877
878 // Compiles a comparison between two things.
879 fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
880 lhs: ValueRef,
881 rhs: ValueRef,
882 rhs_t: Ty<'tcx>,
883 debug_loc: DebugLoc)
884 -> Result<'blk, 'tcx> {
885 fn compare_str<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
886 lhs_data: ValueRef,
887 lhs_len: ValueRef,
888 rhs_data: ValueRef,
889 rhs_len: ValueRef,
890 rhs_t: Ty<'tcx>,
891 debug_loc: DebugLoc)
892 -> Result<'blk, 'tcx> {
893 let did = langcall(bcx.tcx(),
894 None,
895 &format!("comparison of `{}`", rhs_t),
896 StrEqFnLangItem);
897 let args = [lhs_data, lhs_len, rhs_data, rhs_len];
898 Callee::def(bcx.ccx(), did, bcx.tcx().mk_substs(Substs::empty()))
899 .call(bcx, debug_loc, ArgVals(&args), None)
900 }
901
902 let _icx = push_ctxt("compare_values");
903 if rhs_t.is_scalar() {
904 let cmp = compare_scalar_types(cx, lhs, rhs, rhs_t, hir::BiEq, debug_loc);
905 return Result::new(cx, cmp);
906 }
907
908 match rhs_t.sty {
909 ty::TyRef(_, mt) => match mt.ty.sty {
910 ty::TyStr => {
911 let lhs_data = Load(cx, expr::get_dataptr(cx, lhs));
912 let lhs_len = Load(cx, expr::get_meta(cx, lhs));
913 let rhs_data = Load(cx, expr::get_dataptr(cx, rhs));
914 let rhs_len = Load(cx, expr::get_meta(cx, rhs));
915 compare_str(cx, lhs_data, lhs_len, rhs_data, rhs_len, rhs_t, debug_loc)
916 }
917 ty::TyArray(ty, _) | ty::TySlice(ty) => match ty.sty {
918 ty::TyUint(ast::UintTy::U8) => {
919 // NOTE: cast &[u8] and &[u8; N] to &str and abuse the str_eq lang item,
920 // which calls memcmp().
921 let pat_len = val_ty(rhs).element_type().array_length();
922 let ty_str_slice = cx.tcx().mk_static_str();
923
924 let rhs_data = GEPi(cx, rhs, &[0, 0]);
925 let rhs_len = C_uint(cx.ccx(), pat_len);
926
927 let lhs_data;
928 let lhs_len;
929 if val_ty(lhs) == val_ty(rhs) {
930 // Both the discriminant and the pattern are thin pointers
931 lhs_data = GEPi(cx, lhs, &[0, 0]);
932 lhs_len = C_uint(cx.ccx(), pat_len);
933 } else {
934 // The discriminant is a fat pointer
935 let llty_str_slice = type_of::type_of(cx.ccx(), ty_str_slice).ptr_to();
936 let lhs_str = PointerCast(cx, lhs, llty_str_slice);
937 lhs_data = Load(cx, expr::get_dataptr(cx, lhs_str));
938 lhs_len = Load(cx, expr::get_meta(cx, lhs_str));
939 }
940
941 compare_str(cx, lhs_data, lhs_len, rhs_data, rhs_len, rhs_t, debug_loc)
942 },
943 _ => bug!("only byte strings supported in compare_values"),
944 },
945 _ => bug!("only string and byte strings supported in compare_values"),
946 },
947 _ => bug!("only scalars, byte strings, and strings supported in compare_values"),
948 }
949 }
950
951 /// For each binding in `data.bindings_map`, adds an appropriate entry into the `fcx.lllocals` map
952 fn insert_lllocals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
953 bindings_map: &BindingsMap<'tcx>,
954 cs: Option<cleanup::ScopeId>)
955 -> Block<'blk, 'tcx> {
956 for (&name, &binding_info) in bindings_map {
957 let (llval, aliases_other_state) = match binding_info.trmode {
958 // By value mut binding for a copy type: load from the ptr
959 // into the matched value and copy to our alloca
960 TrByCopy(llbinding) |
961 TrByMoveIntoCopy(llbinding) => {
962 let llval = Load(bcx, binding_info.llmatch);
963 let lvalue = match binding_info.trmode {
964 TrByCopy(..) =>
965 Lvalue::new("_match::insert_lllocals"),
966 TrByMoveIntoCopy(..) => {
967 // match_input moves from the input into a
968 // separate stack slot.
969 //
970 // E.g. consider moving the value `D(A)` out
971 // of the tuple `(D(A), D(B))` and into the
972 // local variable `x` via the pattern `(x,_)`,
973 // leaving the remainder of the tuple `(_,
974 // D(B))` still to be dropped in the future.
975 //
976 // Thus, here we must zero the place that we
977 // are moving *from*, because we do not yet
978 // track drop flags for a fragmented parent
979 // match input expression.
980 //
981 // Longer term we will be able to map the move
982 // into `(x, _)` up to the parent path that
983 // owns the whole tuple, and mark the
984 // corresponding stack-local drop-flag
985 // tracking the first component of the tuple.
986 let hint_kind = HintKind::ZeroAndMaintain;
987 Lvalue::new_with_hint("_match::insert_lllocals (match_input)",
988 bcx, binding_info.id, hint_kind)
989 }
990 _ => bug!(),
991 };
992 let datum = Datum::new(llval, binding_info.ty, lvalue);
993 call_lifetime_start(bcx, llbinding);
994 bcx = datum.store_to(bcx, llbinding);
995 if let Some(cs) = cs {
996 bcx.fcx.schedule_lifetime_end(cs, llbinding);
997 }
998
999 (llbinding, false)
1000 },
1001
1002 // By value move bindings: load from the ptr into the matched value
1003 TrByMoveRef => (Load(bcx, binding_info.llmatch), true),
1004
1005 // By ref binding: use the ptr into the matched value
1006 TrByRef => (binding_info.llmatch, true),
1007 };
1008
1009
1010 // A local that aliases some other state must be zeroed, since
1011 // the other state (e.g. some parent data that we matched
1012 // into) will still have its subcomponents (such as this
1013 // local) destructed at the end of the parent's scope. Longer
1014 // term, we will properly map such parents to the set of
1015 // unique drop flags for its fragments.
1016 let hint_kind = if aliases_other_state {
1017 HintKind::ZeroAndMaintain
1018 } else {
1019 HintKind::DontZeroJustUse
1020 };
1021 let lvalue = Lvalue::new_with_hint("_match::insert_lllocals (local)",
1022 bcx,
1023 binding_info.id,
1024 hint_kind);
1025 let datum = Datum::new(llval, binding_info.ty, lvalue);
1026 if let Some(cs) = cs {
1027 let opt_datum = lvalue.dropflag_hint(bcx);
1028 bcx.fcx.schedule_lifetime_end(cs, binding_info.llmatch);
1029 bcx.fcx.schedule_drop_and_fill_mem(cs, llval, binding_info.ty, opt_datum);
1030 }
1031
1032 debug!("binding {} to {:?}", binding_info.id, Value(llval));
1033 bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum);
1034 debuginfo::create_match_binding_metadata(bcx, name, binding_info);
1035 }
1036 bcx
1037 }
1038
1039 fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1040 guard_expr: &hir::Expr,
1041 data: &ArmData<'p, 'blk, 'tcx>,
1042 m: &[Match<'a, 'p, 'blk, 'tcx>],
1043 vals: &[MatchInput],
1044 chk: &FailureHandler,
1045 has_genuine_default: bool)
1046 -> Block<'blk, 'tcx> {
1047 debug!("compile_guard(bcx={}, guard_expr={:?}, m={:?}, vals={:?})",
1048 bcx.to_str(), guard_expr, m, vals);
1049 let _indenter = indenter();
1050
1051 let mut bcx = insert_lllocals(bcx, &data.bindings_map, None);
1052
1053 let val = unpack_datum!(bcx, expr::trans(bcx, guard_expr));
1054 let val = val.to_llbool(bcx);
1055
1056 for (_, &binding_info) in &data.bindings_map {
1057 if let Some(llbinding) = binding_info.trmode.alloca_if_copy() {
1058 call_lifetime_end(bcx, llbinding)
1059 }
1060 }
1061
1062 for (_, &binding_info) in &data.bindings_map {
1063 bcx.fcx.lllocals.borrow_mut().remove(&binding_info.id);
1064 }
1065
1066 with_cond(bcx, Not(bcx, val, guard_expr.debug_loc()), |bcx| {
1067 for (_, &binding_info) in &data.bindings_map {
1068 call_lifetime_end(bcx, binding_info.llmatch);
1069 }
1070 match chk {
1071 // If the default arm is the only one left, move on to the next
1072 // condition explicitly rather than (possibly) falling back to
1073 // the default arm.
1074 &JumpToBasicBlock(_) if m.len() == 1 && has_genuine_default => {
1075 chk.handle_fail(bcx);
1076 }
1077 _ => {
1078 compile_submatch(bcx, m, vals, chk, has_genuine_default);
1079 }
1080 };
1081 bcx
1082 })
1083 }
1084
1085 fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1086 m: &[Match<'a, 'p, 'blk, 'tcx>],
1087 vals: &[MatchInput],
1088 chk: &FailureHandler,
1089 has_genuine_default: bool) {
1090 debug!("compile_submatch(bcx={}, m={:?}, vals=[{:?}])",
1091 bcx.to_str(), m, vals);
1092 let _indenter = indenter();
1093 let _icx = push_ctxt("match::compile_submatch");
1094 let mut bcx = bcx;
1095 if m.is_empty() {
1096 if chk.is_fallible() {
1097 chk.handle_fail(bcx);
1098 }
1099 return;
1100 }
1101
1102 let tcx = bcx.tcx();
1103 let def_map = &tcx.def_map;
1104 match pick_column_to_specialize(def_map, m) {
1105 Some(col) => {
1106 let val = vals[col];
1107 if has_nested_bindings(m, col) {
1108 let expanded = expand_nested_bindings(bcx, m, col, val);
1109 compile_submatch_continue(bcx,
1110 &expanded[..],
1111 vals,
1112 chk,
1113 col,
1114 val,
1115 has_genuine_default)
1116 } else {
1117 compile_submatch_continue(bcx, m, vals, chk, col, val, has_genuine_default)
1118 }
1119 }
1120 None => {
1121 let data = &m[0].data;
1122 for &(ref name, ref value_ptr) in &m[0].bound_ptrs {
1123 let binfo = *data.bindings_map.get(name).unwrap();
1124 call_lifetime_start(bcx, binfo.llmatch);
1125 if binfo.trmode == TrByRef && type_is_fat_ptr(bcx.tcx(), binfo.ty) {
1126 expr::copy_fat_ptr(bcx, *value_ptr, binfo.llmatch);
1127 }
1128 else {
1129 Store(bcx, *value_ptr, binfo.llmatch);
1130 }
1131 }
1132 match data.arm.guard {
1133 Some(ref guard_expr) => {
1134 bcx = compile_guard(bcx,
1135 &guard_expr,
1136 m[0].data,
1137 &m[1..m.len()],
1138 vals,
1139 chk,
1140 has_genuine_default);
1141 }
1142 _ => ()
1143 }
1144 Br(bcx, data.bodycx.llbb, DebugLoc::None);
1145 }
1146 }
1147 }
1148
1149 fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1150 m: &[Match<'a, 'p, 'blk, 'tcx>],
1151 vals: &[MatchInput],
1152 chk: &FailureHandler,
1153 col: usize,
1154 val: MatchInput,
1155 has_genuine_default: bool) {
1156 let fcx = bcx.fcx;
1157 let tcx = bcx.tcx();
1158
1159 let mut vals_left = vals[0..col].to_vec();
1160 vals_left.extend_from_slice(&vals[col + 1..]);
1161 let ccx = bcx.fcx.ccx;
1162
1163 // Find a real id (we're adding placeholder wildcard patterns, but
1164 // each column is guaranteed to have at least one real pattern)
1165 let pat_id = m.iter().map(|br| br.pats[col].id)
1166 .find(|&id| id != DUMMY_NODE_ID)
1167 .unwrap_or(DUMMY_NODE_ID);
1168
1169 let left_ty = if pat_id == DUMMY_NODE_ID {
1170 tcx.mk_nil()
1171 } else {
1172 node_id_type(bcx, pat_id)
1173 };
1174
1175 let mcx = check_match::MatchCheckCtxt {
1176 tcx: bcx.tcx(),
1177 param_env: bcx.tcx().empty_parameter_environment(),
1178 };
1179 let adt_vals = if any_irrefutable_adt_pat(bcx.tcx(), m, col) {
1180 let repr = adt::represent_type(bcx.ccx(), left_ty);
1181 let arg_count = adt::num_args(&repr, Disr(0));
1182 let (arg_count, struct_val) = if type_is_sized(bcx.tcx(), left_ty) {
1183 (arg_count, val.val)
1184 } else {
1185 // For an unsized ADT (i.e. DST struct), we need to treat
1186 // the last field specially: instead of simply passing a
1187 // ValueRef pointing to that field, as with all the others,
1188 // we skip it and instead construct a 'fat ptr' below.
1189 (arg_count - 1, Load(bcx, expr::get_dataptr(bcx, val.val)))
1190 };
1191 let mut field_vals: Vec<ValueRef> = (0..arg_count).map(|ix|
1192 // By definition, these are all sized
1193 adt::trans_field_ptr(bcx, &repr, adt::MaybeSizedValue::sized(struct_val), Disr(0), ix)
1194 ).collect();
1195
1196 match left_ty.sty {
1197 ty::TyStruct(def, substs) if !type_is_sized(bcx.tcx(), left_ty) => {
1198 // The last field is technically unsized but
1199 // since we can only ever match that field behind
1200 // a reference we construct a fat ptr here.
1201 let unsized_ty = def.struct_variant().fields.last().map(|field| {
1202 monomorphize::field_ty(bcx.tcx(), substs, field)
1203 }).unwrap();
1204 let scratch = alloc_ty(bcx, unsized_ty, "__struct_field_fat_ptr");
1205
1206 let meta = Load(bcx, expr::get_meta(bcx, val.val));
1207 let struct_val = adt::MaybeSizedValue::unsized_(struct_val, meta);
1208
1209 let data = adt::trans_field_ptr(bcx, &repr, struct_val, Disr(0), arg_count);
1210 Store(bcx, data, expr::get_dataptr(bcx, scratch));
1211 Store(bcx, meta, expr::get_meta(bcx, scratch));
1212 field_vals.push(scratch);
1213 }
1214 _ => {}
1215 }
1216 Some(field_vals)
1217 } else if any_uniq_pat(m, col) || any_region_pat(m, col) {
1218 let ptr = if type_is_fat_ptr(bcx.tcx(), left_ty) {
1219 val.val
1220 } else {
1221 Load(bcx, val.val)
1222 };
1223 Some(vec!(ptr))
1224 } else {
1225 match left_ty.sty {
1226 ty::TyArray(_, n) => {
1227 let args = extract_vec_elems(bcx, left_ty, n, 0, val);
1228 Some(args.vals)
1229 }
1230 _ => None
1231 }
1232 };
1233 match adt_vals {
1234 Some(field_vals) => {
1235 let pats = enter_match(bcx, m, col, val, |pats|
1236 check_match::specialize(&mcx, pats,
1237 &Constructor::Single, col,
1238 field_vals.len())
1239 );
1240 let mut vals: Vec<_> = field_vals.into_iter()
1241 .map(|v|MatchInput::from_val(v))
1242 .collect();
1243 vals.extend_from_slice(&vals_left);
1244 compile_submatch(bcx, &pats, &vals, chk, has_genuine_default);
1245 return;
1246 }
1247 _ => ()
1248 }
1249
1250 // Decide what kind of branch we need
1251 let opts = get_branches(bcx, m, col);
1252 debug!("options={:?}", opts);
1253 let mut kind = NoBranch;
1254 let mut test_val = val.val;
1255 debug!("test_val={:?}", Value(test_val));
1256 if !opts.is_empty() {
1257 match opts[0] {
1258 ConstantValue(..) | ConstantRange(..) => {
1259 test_val = load_if_immediate(bcx, val.val, left_ty);
1260 kind = if left_ty.is_integral() {
1261 Switch
1262 } else {
1263 Compare
1264 };
1265 }
1266 Variant(_, ref repr, _, _) => {
1267 let (the_kind, val_opt) = adt::trans_switch(bcx, &repr,
1268 val.val, true);
1269 kind = the_kind;
1270 if let Some(tval) = val_opt { test_val = tval; }
1271 }
1272 SliceLengthEqual(..) | SliceLengthGreaterOrEqual(..) => {
1273 let (_, len) = tvec::get_base_and_len(bcx, val.val, left_ty);
1274 test_val = len;
1275 kind = Switch;
1276 }
1277 }
1278 }
1279 for o in &opts {
1280 match *o {
1281 ConstantRange(..) => { kind = Compare; break },
1282 SliceLengthGreaterOrEqual(..) => { kind = CompareSliceLength; break },
1283 _ => ()
1284 }
1285 }
1286 let else_cx = match kind {
1287 NoBranch | Single => bcx,
1288 _ => bcx.fcx.new_temp_block("match_else")
1289 };
1290 let sw = if kind == Switch {
1291 build::Switch(bcx, test_val, else_cx.llbb, opts.len())
1292 } else {
1293 C_int(ccx, 0) // Placeholder for when not using a switch
1294 };
1295
1296 let defaults = enter_default(else_cx, m, col, val);
1297 let exhaustive = chk.is_infallible() && defaults.is_empty();
1298 let len = opts.len();
1299
1300 if exhaustive && kind == Switch {
1301 build::Unreachable(else_cx);
1302 }
1303
1304 // Compile subtrees for each option
1305 for (i, opt) in opts.iter().enumerate() {
1306 // In some cases of range and vector pattern matching, we need to
1307 // override the failure case so that instead of failing, it proceeds
1308 // to try more matching. branch_chk, then, is the proper failure case
1309 // for the current conditional branch.
1310 let mut branch_chk = None;
1311 let mut opt_cx = else_cx;
1312 let debug_loc = opt.debug_loc();
1313
1314 if kind == Switch || !exhaustive || i + 1 < len {
1315 opt_cx = bcx.fcx.new_temp_block("match_case");
1316 match kind {
1317 Single => Br(bcx, opt_cx.llbb, debug_loc),
1318 Switch => {
1319 match opt.trans(bcx) {
1320 SingleResult(r) => {
1321 AddCase(sw, r.val, opt_cx.llbb);
1322 bcx = r.bcx;
1323 }
1324 _ => {
1325 bug!(
1326 "in compile_submatch, expected \
1327 opt.trans() to return a SingleResult")
1328 }
1329 }
1330 }
1331 Compare | CompareSliceLength => {
1332 let t = if kind == Compare {
1333 left_ty
1334 } else {
1335 tcx.types.usize // vector length
1336 };
1337 let Result { bcx: after_cx, val: matches } = {
1338 match opt.trans(bcx) {
1339 SingleResult(Result { bcx, val }) => {
1340 compare_values(bcx, test_val, val, t, debug_loc)
1341 }
1342 RangeResult(Result { val: vbegin, .. },
1343 Result { bcx, val: vend }) => {
1344 let llge = compare_scalar_types(bcx, test_val, vbegin,
1345 t, hir::BiGe, debug_loc);
1346 let llle = compare_scalar_types(bcx, test_val, vend,
1347 t, hir::BiLe, debug_loc);
1348 Result::new(bcx, And(bcx, llge, llle, DebugLoc::None))
1349 }
1350 LowerBound(Result { bcx, val }) => {
1351 Result::new(bcx, compare_scalar_types(bcx, test_val,
1352 val, t, hir::BiGe,
1353 debug_loc))
1354 }
1355 }
1356 };
1357 bcx = fcx.new_temp_block("compare_next");
1358
1359 // If none of the sub-cases match, and the current condition
1360 // is guarded or has multiple patterns, move on to the next
1361 // condition, if there is any, rather than falling back to
1362 // the default.
1363 let guarded = m[i].data.arm.guard.is_some();
1364 let multi_pats = m[i].pats.len() > 1;
1365 if i + 1 < len && (guarded || multi_pats || kind == CompareSliceLength) {
1366 branch_chk = Some(JumpToBasicBlock(bcx.llbb));
1367 }
1368 CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb, debug_loc);
1369 }
1370 _ => ()
1371 }
1372 } else if kind == Compare || kind == CompareSliceLength {
1373 Br(bcx, else_cx.llbb, debug_loc);
1374 }
1375
1376 let mut size = 0;
1377 let mut unpacked = Vec::new();
1378 match *opt {
1379 Variant(disr_val, ref repr, _, _) => {
1380 let ExtractedBlock {vals: argvals, bcx: new_bcx} =
1381 extract_variant_args(opt_cx, &repr, disr_val, val);
1382 size = argvals.len();
1383 unpacked = argvals;
1384 opt_cx = new_bcx;
1385 }
1386 SliceLengthEqual(len, _) => {
1387 let args = extract_vec_elems(opt_cx, left_ty, len, 0, val);
1388 size = args.vals.len();
1389 unpacked = args.vals.clone();
1390 opt_cx = args.bcx;
1391 }
1392 SliceLengthGreaterOrEqual(before, after, _) => {
1393 let args = extract_vec_elems(opt_cx, left_ty, before, after, val);
1394 size = args.vals.len();
1395 unpacked = args.vals.clone();
1396 opt_cx = args.bcx;
1397 }
1398 ConstantValue(..) | ConstantRange(..) => ()
1399 }
1400 let opt_ms = enter_opt(opt_cx, pat_id, m, opt, col, size, val);
1401 let mut opt_vals: Vec<_> = unpacked.into_iter()
1402 .map(|v|MatchInput::from_val(v))
1403 .collect();
1404 opt_vals.extend_from_slice(&vals_left[..]);
1405 compile_submatch(opt_cx,
1406 &opt_ms[..],
1407 &opt_vals[..],
1408 branch_chk.as_ref().unwrap_or(chk),
1409 has_genuine_default);
1410 }
1411
1412 // Compile the fall-through case, if any
1413 if !exhaustive && kind != Single {
1414 if kind == Compare || kind == CompareSliceLength {
1415 Br(bcx, else_cx.llbb, DebugLoc::None);
1416 }
1417 match chk {
1418 // If there is only one default arm left, move on to the next
1419 // condition explicitly rather than (eventually) falling back to
1420 // the last default arm.
1421 &JumpToBasicBlock(_) if defaults.len() == 1 && has_genuine_default => {
1422 chk.handle_fail(else_cx);
1423 }
1424 _ => {
1425 compile_submatch(else_cx,
1426 &defaults[..],
1427 &vals_left[..],
1428 chk,
1429 has_genuine_default);
1430 }
1431 }
1432 }
1433 }
1434
1435 pub fn trans_match<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1436 match_expr: &hir::Expr,
1437 discr_expr: &hir::Expr,
1438 arms: &[hir::Arm],
1439 dest: Dest)
1440 -> Block<'blk, 'tcx> {
1441 let _icx = push_ctxt("match::trans_match");
1442 trans_match_inner(bcx, match_expr.id, discr_expr, arms, dest)
1443 }
1444
1445 /// Checks whether the binding in `discr` is assigned to anywhere in the expression `body`
1446 fn is_discr_reassigned(bcx: Block, discr: &hir::Expr, body: &hir::Expr) -> bool {
1447 let (vid, field) = match discr.node {
1448 hir::ExprPath(..) => match bcx.tcx().expect_def(discr.id) {
1449 Def::Local(_, vid) | Def::Upvar(_, vid, _, _) => (vid, None),
1450 _ => return false
1451 },
1452 hir::ExprField(ref base, field) => {
1453 let vid = match bcx.tcx().expect_def_or_none(base.id) {
1454 Some(Def::Local(_, vid)) | Some(Def::Upvar(_, vid, _, _)) => vid,
1455 _ => return false
1456 };
1457 (vid, Some(mc::NamedField(field.node)))
1458 },
1459 hir::ExprTupField(ref base, field) => {
1460 let vid = match bcx.tcx().expect_def_or_none(base.id) {
1461 Some(Def::Local(_, vid)) | Some(Def::Upvar(_, vid, _, _)) => vid,
1462 _ => return false
1463 };
1464 (vid, Some(mc::PositionalField(field.node)))
1465 },
1466 _ => return false
1467 };
1468
1469 let mut rc = ReassignmentChecker {
1470 node: vid,
1471 field: field,
1472 reassigned: false
1473 };
1474 bcx.tcx().normalizing_infer_ctxt(Reveal::All).enter(|infcx| {
1475 let mut visitor = euv::ExprUseVisitor::new(&mut rc, &infcx);
1476 visitor.walk_expr(body);
1477 });
1478 rc.reassigned
1479 }
1480
1481 struct ReassignmentChecker {
1482 node: ast::NodeId,
1483 field: Option<mc::FieldName>,
1484 reassigned: bool
1485 }
1486
1487 // Determine if the expression we're matching on is reassigned to within
1488 // the body of the match's arm.
1489 // We only care for the `mutate` callback since this check only matters
1490 // for cases where the matched value is moved.
1491 impl<'tcx> euv::Delegate<'tcx> for ReassignmentChecker {
1492 fn consume(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: euv::ConsumeMode) {}
1493 fn matched_pat(&mut self, _: &hir::Pat, _: mc::cmt, _: euv::MatchMode) {}
1494 fn consume_pat(&mut self, _: &hir::Pat, _: mc::cmt, _: euv::ConsumeMode) {}
1495 fn borrow(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: ty::Region,
1496 _: ty::BorrowKind, _: euv::LoanCause) {}
1497 fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {}
1498
1499 fn mutate(&mut self, _: ast::NodeId, _: Span, cmt: mc::cmt, _: euv::MutateMode) {
1500 let cmt_id = |cmt: &mc::cmt| match cmt.cat {
1501 Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, ..}, ..}) |
1502 Categorization::Local(vid) => Some(vid),
1503 Categorization::Interior(ref base_cmt, mc::InteriorField(_)) => Some(base_cmt.id),
1504 _ => None
1505 };
1506 match cmt.cat {
1507 Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
1508 Categorization::Local(vid) => self.reassigned |= self.node == vid,
1509 ref cat => {
1510 let mut cat = cat;
1511 while let &Categorization::Interior(ref base_cmt, mc::InteriorField(field)) = cat {
1512 if let Some(vid) = cmt_id(base_cmt) {
1513 if self.node == vid && (self.field.is_none() || self.field == Some(field)) {
1514 self.reassigned = true;
1515 return;
1516 }
1517 }
1518 cat = &base_cmt.cat;
1519 }
1520 }
1521 }
1522 }
1523 }
1524
1525 fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &hir::Pat,
1526 discr: &hir::Expr, body: &hir::Expr)
1527 -> BindingsMap<'tcx> {
1528 // Create the bindings map, which is a mapping from each binding name
1529 // to an alloca() that will be the value for that local variable.
1530 // Note that we use the names because each binding will have many ids
1531 // from the various alternatives.
1532 let ccx = bcx.ccx();
1533 let reassigned = is_discr_reassigned(bcx, discr, body);
1534 let mut bindings_map = FnvHashMap();
1535 pat_bindings(&pat, |bm, p_id, span, path1| {
1536 let name = path1.node;
1537 let variable_ty = node_id_type(bcx, p_id);
1538 let llvariable_ty = type_of::type_of(ccx, variable_ty);
1539 let tcx = bcx.tcx();
1540 let param_env = tcx.empty_parameter_environment();
1541
1542 let llmatch;
1543 let trmode;
1544 let moves_by_default = variable_ty.moves_by_default(tcx, &param_env, span);
1545 match bm {
1546 hir::BindByValue(_) if !moves_by_default || reassigned =>
1547 {
1548 llmatch = alloca(bcx, llvariable_ty.ptr_to(), "__llmatch");
1549 let llcopy = alloca(bcx, llvariable_ty, &bcx.name(name));
1550 trmode = if moves_by_default {
1551 TrByMoveIntoCopy(llcopy)
1552 } else {
1553 TrByCopy(llcopy)
1554 };
1555 }
1556 hir::BindByValue(_) => {
1557 // in this case, the final type of the variable will be T,
1558 // but during matching we need to store a *T as explained
1559 // above
1560 llmatch = alloca(bcx, llvariable_ty.ptr_to(), &bcx.name(name));
1561 trmode = TrByMoveRef;
1562 }
1563 hir::BindByRef(_) => {
1564 llmatch = alloca(bcx, llvariable_ty, &bcx.name(name));
1565 trmode = TrByRef;
1566 }
1567 };
1568 bindings_map.insert(name, BindingInfo {
1569 llmatch: llmatch,
1570 trmode: trmode,
1571 id: p_id,
1572 span: span,
1573 ty: variable_ty
1574 });
1575 });
1576 return bindings_map;
1577 }
1578
1579 fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
1580 match_id: ast::NodeId,
1581 discr_expr: &hir::Expr,
1582 arms: &[hir::Arm],
1583 dest: Dest) -> Block<'blk, 'tcx> {
1584 let _icx = push_ctxt("match::trans_match_inner");
1585 let fcx = scope_cx.fcx;
1586 let mut bcx = scope_cx;
1587 let tcx = bcx.tcx();
1588
1589 let discr_datum = unpack_datum!(bcx, expr::trans_to_lvalue(bcx, discr_expr,
1590 "match"));
1591 if bcx.unreachable.get() {
1592 return bcx;
1593 }
1594
1595 let t = node_id_type(bcx, discr_expr.id);
1596 let chk = if t.is_uninhabited(tcx) {
1597 Unreachable
1598 } else {
1599 Infallible
1600 };
1601
1602 let arm_datas: Vec<ArmData> = arms.iter().map(|arm| ArmData {
1603 bodycx: fcx.new_id_block("case_body", arm.body.id),
1604 arm: arm,
1605 bindings_map: create_bindings_map(bcx, &arm.pats[0], discr_expr, &arm.body)
1606 }).collect();
1607
1608 let mut pat_renaming_map = if scope_cx.sess().opts.debuginfo != NoDebugInfo {
1609 Some(FnvHashMap())
1610 } else {
1611 None
1612 };
1613
1614 let arm_pats: Vec<Vec<P<hir::Pat>>> = {
1615 let mut static_inliner = StaticInliner::new(scope_cx.tcx(),
1616 pat_renaming_map.as_mut());
1617 arm_datas.iter().map(|arm_data| {
1618 arm_data.arm.pats.iter().map(|p| static_inliner.fold_pat((*p).clone())).collect()
1619 }).collect()
1620 };
1621
1622 let mut matches = Vec::new();
1623 for (arm_data, pats) in arm_datas.iter().zip(&arm_pats) {
1624 matches.extend(pats.iter().map(|p| Match {
1625 pats: vec![&p],
1626 data: arm_data,
1627 bound_ptrs: Vec::new(),
1628 pat_renaming_map: pat_renaming_map.as_ref()
1629 }));
1630 }
1631
1632 // `compile_submatch` works one column of arm patterns a time and
1633 // then peels that column off. So as we progress, it may become
1634 // impossible to tell whether we have a genuine default arm, i.e.
1635 // `_ => foo` or not. Sometimes it is important to know that in order
1636 // to decide whether moving on to the next condition or falling back
1637 // to the default arm.
1638 let has_default = arms.last().map_or(false, |arm| {
1639 arm.pats.len() == 1
1640 && arm.pats.last().unwrap().node == PatKind::Wild
1641 });
1642
1643 compile_submatch(bcx, &matches[..], &[discr_datum.match_input()], &chk, has_default);
1644
1645 let mut arm_cxs = Vec::new();
1646 for arm_data in &arm_datas {
1647 let mut bcx = arm_data.bodycx;
1648
1649 // insert bindings into the lllocals map and add cleanups
1650 let cs = fcx.push_custom_cleanup_scope();
1651 bcx = insert_lllocals(bcx, &arm_data.bindings_map, Some(cleanup::CustomScope(cs)));
1652 bcx = expr::trans_into(bcx, &arm_data.arm.body, dest);
1653 bcx = fcx.pop_and_trans_custom_cleanup_scope(bcx, cs);
1654 arm_cxs.push(bcx);
1655 }
1656
1657 bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[..]);
1658 return bcx;
1659 }
1660
1661 /// Generates code for a local variable declaration like `let <pat>;` or `let <pat> =
1662 /// <opt_init_expr>`.
1663 pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1664 local: &hir::Local)
1665 -> Block<'blk, 'tcx> {
1666 let _icx = push_ctxt("match::store_local");
1667 let mut bcx = bcx;
1668 let tcx = bcx.tcx();
1669 let pat = &local.pat;
1670
1671 fn create_dummy_locals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1672 pat: &hir::Pat)
1673 -> Block<'blk, 'tcx> {
1674 let _icx = push_ctxt("create_dummy_locals");
1675 // create dummy memory for the variables if we have no
1676 // value to store into them immediately
1677 let tcx = bcx.tcx();
1678 pat_bindings(pat, |_, p_id, _, path1| {
1679 let scope = cleanup::var_scope(tcx, p_id);
1680 bcx = mk_binding_alloca(
1681 bcx, p_id, path1.node, scope, (),
1682 "_match::store_local::create_dummy_locals",
1683 |(), bcx, Datum { val: llval, ty, kind }| {
1684 // Dummy-locals start out uninitialized, so set their
1685 // drop-flag hints (if any) to "moved."
1686 if let Some(hint) = kind.dropflag_hint(bcx) {
1687 let moved_hint = adt::DTOR_MOVED_HINT;
1688 debug!("store moved_hint={} for hint={:?}, uninitialized dummy",
1689 moved_hint, hint);
1690 Store(bcx, C_u8(bcx.fcx.ccx, moved_hint), hint.to_value().value());
1691 }
1692
1693 if kind.drop_flag_info.must_zero() {
1694 // if no drop-flag hint, or the hint requires
1695 // we maintain the embedded drop-flag, then
1696 // mark embedded drop-flag(s) as moved
1697 // (i.e. "already dropped").
1698 drop_done_fill_mem(bcx, llval, ty);
1699 }
1700 bcx
1701 });
1702 });
1703 bcx
1704 }
1705
1706 match local.init {
1707 Some(ref init_expr) => {
1708 // Optimize the "let x = expr" case. This just writes
1709 // the result of evaluating `expr` directly into the alloca
1710 // for `x`. Often the general path results in similar or the
1711 // same code post-optimization, but not always. In particular,
1712 // in unsafe code, you can have expressions like
1713 //
1714 // let x = intrinsics::uninit();
1715 //
1716 // In such cases, the more general path is unsafe, because
1717 // it assumes it is matching against a valid value.
1718 if let Some(name) = simple_name(pat) {
1719 let var_scope = cleanup::var_scope(tcx, local.id);
1720 return mk_binding_alloca(
1721 bcx, pat.id, name, var_scope, (),
1722 "_match::store_local",
1723 |(), bcx, Datum { val: v, .. }| expr::trans_into(bcx, &init_expr,
1724 expr::SaveIn(v)));
1725 }
1726
1727 // General path.
1728 let init_datum =
1729 unpack_datum!(bcx, expr::trans_to_lvalue(bcx, &init_expr, "let"));
1730 if bcx.sess().asm_comments() {
1731 add_comment(bcx, "creating zeroable ref llval");
1732 }
1733 let var_scope = cleanup::var_scope(tcx, local.id);
1734 bind_irrefutable_pat(bcx, pat, init_datum.match_input(), var_scope)
1735 }
1736 None => {
1737 create_dummy_locals(bcx, pat)
1738 }
1739 }
1740 }
1741
1742 fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
1743 p_id: ast::NodeId,
1744 name: ast::Name,
1745 cleanup_scope: cleanup::ScopeId,
1746 arg: A,
1747 caller_name: &'static str,
1748 populate: F)
1749 -> Block<'blk, 'tcx> where
1750 F: FnOnce(A, Block<'blk, 'tcx>, Datum<'tcx, Lvalue>) -> Block<'blk, 'tcx>,
1751 {
1752 let var_ty = node_id_type(bcx, p_id);
1753
1754 // Allocate memory on stack for the binding.
1755 let llval = alloc_ty(bcx, var_ty, &bcx.name(name));
1756 let lvalue = Lvalue::new_with_hint(caller_name, bcx, p_id, HintKind::DontZeroJustUse);
1757 let datum = Datum::new(llval, var_ty, lvalue);
1758
1759 debug!("mk_binding_alloca cleanup_scope={:?} llval={:?} var_ty={:?}",
1760 cleanup_scope, Value(llval), var_ty);
1761
1762 // Subtle: be sure that we *populate* the memory *before*
1763 // we schedule the cleanup.
1764 call_lifetime_start(bcx, llval);
1765 let bcx = populate(arg, bcx, datum);
1766 bcx.fcx.schedule_lifetime_end(cleanup_scope, llval);
1767 bcx.fcx.schedule_drop_mem(cleanup_scope, llval, var_ty, lvalue.dropflag_hint(bcx));
1768
1769 // Now that memory is initialized and has cleanup scheduled,
1770 // insert datum into the local variable map.
1771 bcx.fcx.lllocals.borrow_mut().insert(p_id, datum);
1772 bcx
1773 }
1774
1775 /// A simple version of the pattern matching code that only handles
1776 /// irrefutable patterns. This is used in let/argument patterns,
1777 /// not in match statements. Unifying this code with the code above
1778 /// sounds nice, but in practice it produces very inefficient code,
1779 /// since the match code is so much more general. In most cases,
1780 /// LLVM is able to optimize the code, but it causes longer compile
1781 /// times and makes the generated code nigh impossible to read.
1782 ///
1783 /// # Arguments
1784 /// - bcx: starting basic block context
1785 /// - pat: the irrefutable pattern being matched.
1786 /// - val: the value being matched -- must be an lvalue (by ref, with cleanup)
1787 pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1788 pat: &hir::Pat,
1789 val: MatchInput,
1790 cleanup_scope: cleanup::ScopeId)
1791 -> Block<'blk, 'tcx> {
1792 debug!("bind_irrefutable_pat(bcx={}, pat={:?}, val={:?})",
1793 bcx.to_str(), pat, val);
1794
1795 if bcx.sess().asm_comments() {
1796 add_comment(bcx, &format!("bind_irrefutable_pat(pat={:?})",
1797 pat));
1798 }
1799
1800 let _indenter = indenter();
1801
1802 let _icx = push_ctxt("match::bind_irrefutable_pat");
1803 let mut bcx = bcx;
1804 let tcx = bcx.tcx();
1805 let ccx = bcx.ccx();
1806 match pat.node {
1807 PatKind::Binding(pat_binding_mode, ref path1, ref inner) => {
1808 // Allocate the stack slot where the value of this
1809 // binding will live and place it into the appropriate
1810 // map.
1811 bcx = mk_binding_alloca(bcx, pat.id, path1.node, cleanup_scope, (),
1812 "_match::bind_irrefutable_pat",
1813 |(), bcx, Datum { val: llval, ty, kind: _ }| {
1814 match pat_binding_mode {
1815 hir::BindByValue(_) => {
1816 // By value binding: move the value that `val`
1817 // points at into the binding's stack slot.
1818 let d = val.to_datum(ty);
1819 d.store_to(bcx, llval)
1820 }
1821
1822 hir::BindByRef(_) => {
1823 // By ref binding: the value of the variable
1824 // is the pointer `val` itself or fat pointer referenced by `val`
1825 if type_is_fat_ptr(bcx.tcx(), ty) {
1826 expr::copy_fat_ptr(bcx, val.val, llval);
1827 }
1828 else {
1829 Store(bcx, val.val, llval);
1830 }
1831
1832 bcx
1833 }
1834 }
1835 });
1836
1837 if let Some(ref inner_pat) = *inner {
1838 bcx = bind_irrefutable_pat(bcx, &inner_pat, val, cleanup_scope);
1839 }
1840 }
1841 PatKind::TupleStruct(_, ref sub_pats, ddpos) => {
1842 match bcx.tcx().expect_def(pat.id) {
1843 Def::Variant(enum_id, var_id) => {
1844 let repr = adt::represent_node(bcx, pat.id);
1845 let vinfo = ccx.tcx().lookup_adt_def(enum_id).variant_with_id(var_id);
1846 let args = extract_variant_args(bcx,
1847 &repr,
1848 Disr::from(vinfo.disr_val),
1849 val);
1850 for (i, subpat) in sub_pats.iter()
1851 .enumerate_and_adjust(vinfo.fields.len(), ddpos) {
1852 bcx = bind_irrefutable_pat(
1853 bcx,
1854 subpat,
1855 MatchInput::from_val(args.vals[i]),
1856 cleanup_scope);
1857 }
1858 }
1859 Def::Struct(..) => {
1860 let expected_len = match *ccx.tcx().pat_ty(&pat) {
1861 ty::TyS{sty: ty::TyStruct(adt_def, _), ..} => {
1862 adt_def.struct_variant().fields.len()
1863 }
1864 ref ty => {
1865 span_bug!(pat.span, "tuple struct pattern unexpected type {:?}", ty);
1866 }
1867 };
1868
1869 let repr = adt::represent_node(bcx, pat.id);
1870 let val = adt::MaybeSizedValue::sized(val.val);
1871 for (i, elem) in sub_pats.iter().enumerate_and_adjust(expected_len, ddpos) {
1872 let fldptr = adt::trans_field_ptr(bcx, &repr, val, Disr(0), i);
1873 bcx = bind_irrefutable_pat(
1874 bcx,
1875 &elem,
1876 MatchInput::from_val(fldptr),
1877 cleanup_scope);
1878 }
1879 }
1880 _ => {
1881 // Nothing to do here.
1882 }
1883 }
1884 }
1885 PatKind::Struct(_, ref fields, _) => {
1886 let tcx = bcx.tcx();
1887 let pat_ty = node_id_type(bcx, pat.id);
1888 let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
1889 let pat_v = VariantInfo::of_node(tcx, pat_ty, pat.id);
1890
1891 let val = if type_is_sized(tcx, pat_ty) {
1892 adt::MaybeSizedValue::sized(val.val)
1893 } else {
1894 let data = Load(bcx, expr::get_dataptr(bcx, val.val));
1895 let meta = Load(bcx, expr::get_meta(bcx, val.val));
1896 adt::MaybeSizedValue::unsized_(data, meta)
1897 };
1898
1899 for f in fields {
1900 let name = f.node.name;
1901 let field_idx = pat_v.field_index(name);
1902 let mut fldptr = adt::trans_field_ptr(
1903 bcx,
1904 &pat_repr,
1905 val,
1906 pat_v.discr,
1907 field_idx);
1908
1909 let fty = pat_v.fields[field_idx].1;
1910 // If it's not sized, then construct a fat pointer instead of
1911 // a regular one
1912 if !type_is_sized(tcx, fty) {
1913 let scratch = alloc_ty(bcx, fty, "__struct_field_fat_ptr");
1914 debug!("Creating fat pointer {:?}", Value(scratch));
1915 Store(bcx, fldptr, expr::get_dataptr(bcx, scratch));
1916 Store(bcx, val.meta, expr::get_meta(bcx, scratch));
1917 fldptr = scratch;
1918 }
1919 bcx = bind_irrefutable_pat(bcx,
1920 &f.node.pat,
1921 MatchInput::from_val(fldptr),
1922 cleanup_scope);
1923 }
1924 }
1925 PatKind::Tuple(ref elems, ddpos) => {
1926 match tcx.node_id_to_type(pat.id).sty {
1927 ty::TyTuple(ref tys) => {
1928 let repr = adt::represent_node(bcx, pat.id);
1929 let val = adt::MaybeSizedValue::sized(val.val);
1930 for (i, elem) in elems.iter().enumerate_and_adjust(tys.len(), ddpos) {
1931 let fldptr = adt::trans_field_ptr(bcx, &repr, val, Disr(0), i);
1932 bcx = bind_irrefutable_pat(
1933 bcx,
1934 &elem,
1935 MatchInput::from_val(fldptr),
1936 cleanup_scope);
1937 }
1938 }
1939 ref sty => span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", sty),
1940 }
1941 }
1942 PatKind::Box(ref inner) => {
1943 let pat_ty = node_id_type(bcx, inner.id);
1944 // Pass along DSTs as fat pointers.
1945 let val = if type_is_fat_ptr(tcx, pat_ty) {
1946 // We need to check for this, as the pattern could be binding
1947 // a fat pointer by-value.
1948 if let PatKind::Binding(hir::BindByRef(..),_,_) = inner.node {
1949 val.val
1950 } else {
1951 Load(bcx, val.val)
1952 }
1953 } else if type_is_sized(tcx, pat_ty) {
1954 Load(bcx, val.val)
1955 } else {
1956 val.val
1957 };
1958 bcx = bind_irrefutable_pat(
1959 bcx, &inner, MatchInput::from_val(val), cleanup_scope);
1960 }
1961 PatKind::Ref(ref inner, _) => {
1962 let pat_ty = node_id_type(bcx, inner.id);
1963 // Pass along DSTs as fat pointers.
1964 let val = if type_is_fat_ptr(tcx, pat_ty) {
1965 // We need to check for this, as the pattern could be binding
1966 // a fat pointer by-value.
1967 if let PatKind::Binding(hir::BindByRef(..),_,_) = inner.node {
1968 val.val
1969 } else {
1970 Load(bcx, val.val)
1971 }
1972 } else if type_is_sized(tcx, pat_ty) {
1973 Load(bcx, val.val)
1974 } else {
1975 val.val
1976 };
1977 bcx = bind_irrefutable_pat(
1978 bcx,
1979 &inner,
1980 MatchInput::from_val(val),
1981 cleanup_scope);
1982 }
1983 PatKind::Vec(ref before, ref slice, ref after) => {
1984 let pat_ty = node_id_type(bcx, pat.id);
1985 let mut extracted = extract_vec_elems(bcx, pat_ty, before.len(), after.len(), val);
1986 match slice {
1987 &Some(_) => {
1988 extracted.vals.insert(
1989 before.len(),
1990 bind_subslice_pat(bcx, pat.id, val, before.len(), after.len())
1991 );
1992 }
1993 &None => ()
1994 }
1995 bcx = before
1996 .iter()
1997 .chain(slice.iter())
1998 .chain(after.iter())
1999 .zip(extracted.vals)
2000 .fold(bcx, |bcx, (inner, elem)| {
2001 bind_irrefutable_pat(
2002 bcx,
2003 &inner,
2004 MatchInput::from_val(elem),
2005 cleanup_scope)
2006 });
2007 }
2008 PatKind::Path(..) | PatKind::Wild |
2009 PatKind::Lit(..) | PatKind::Range(..) => ()
2010 }
2011 return bcx;
2012 }