]> git.proxmox.com Git - rustc.git/blob - src/librustc/middle/expr_use_visitor.rs
Imported Upstream version 1.7.0+dfsg1
[rustc.git] / src / librustc / middle / expr_use_visitor.rs
1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! A different sort of visitor for walking fn bodies. Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
14
15 pub use self::LoanCause::*;
16 pub use self::ConsumeMode::*;
17 pub use self::MoveReason::*;
18 pub use self::MatchMode::*;
19 use self::TrackMatchMode::*;
20 use self::OverloadedCallType::*;
21
22 use middle::{def, pat_util};
23 use middle::def_id::{DefId};
24 use middle::infer;
25 use middle::mem_categorization as mc;
26 use middle::ty;
27 use middle::ty::adjustment;
28
29 use rustc_front::hir;
30
31 use syntax::ast;
32 use syntax::ptr::P;
33 use syntax::codemap::Span;
34
35 ///////////////////////////////////////////////////////////////////////////
36 // The Delegate trait
37
38 /// This trait defines the callbacks you can expect to receive when
39 /// employing the ExprUseVisitor.
40 pub trait Delegate<'tcx> {
41 // The value found at `cmt` is either copied or moved, depending
42 // on mode.
43 fn consume(&mut self,
44 consume_id: ast::NodeId,
45 consume_span: Span,
46 cmt: mc::cmt<'tcx>,
47 mode: ConsumeMode);
48
49 // The value found at `cmt` has been determined to match the
50 // pattern binding `matched_pat`, and its subparts are being
51 // copied or moved depending on `mode`. Note that `matched_pat`
52 // is called on all variant/structs in the pattern (i.e., the
53 // interior nodes of the pattern's tree structure) while
54 // consume_pat is called on the binding identifiers in the pattern
55 // (which are leaves of the pattern's tree structure).
56 //
57 // Note that variants/structs and identifiers are disjoint; thus
58 // `matched_pat` and `consume_pat` are never both called on the
59 // same input pattern structure (though of `consume_pat` can be
60 // called on a subpart of an input passed to `matched_pat).
61 fn matched_pat(&mut self,
62 matched_pat: &hir::Pat,
63 cmt: mc::cmt<'tcx>,
64 mode: MatchMode);
65
66 // The value found at `cmt` is either copied or moved via the
67 // pattern binding `consume_pat`, depending on mode.
68 fn consume_pat(&mut self,
69 consume_pat: &hir::Pat,
70 cmt: mc::cmt<'tcx>,
71 mode: ConsumeMode);
72
73 // The value found at `borrow` is being borrowed at the point
74 // `borrow_id` for the region `loan_region` with kind `bk`.
75 fn borrow(&mut self,
76 borrow_id: ast::NodeId,
77 borrow_span: Span,
78 cmt: mc::cmt<'tcx>,
79 loan_region: ty::Region,
80 bk: ty::BorrowKind,
81 loan_cause: LoanCause);
82
83 // The local variable `id` is declared but not initialized.
84 fn decl_without_init(&mut self,
85 id: ast::NodeId,
86 span: Span);
87
88 // The path at `cmt` is being assigned to.
89 fn mutate(&mut self,
90 assignment_id: ast::NodeId,
91 assignment_span: Span,
92 assignee_cmt: mc::cmt<'tcx>,
93 mode: MutateMode);
94 }
95
96 #[derive(Copy, Clone, PartialEq, Debug)]
97 pub enum LoanCause {
98 ClosureCapture(Span),
99 AddrOf,
100 AutoRef,
101 AutoUnsafe,
102 RefBinding,
103 OverloadedOperator,
104 ClosureInvocation,
105 ForLoop,
106 MatchDiscriminant
107 }
108
109 #[derive(Copy, Clone, PartialEq, Debug)]
110 pub enum ConsumeMode {
111 Copy, // reference to x where x has a type that copies
112 Move(MoveReason), // reference to x where x has a type that moves
113 }
114
115 #[derive(Copy, Clone, PartialEq, Debug)]
116 pub enum MoveReason {
117 DirectRefMove,
118 PatBindingMove,
119 CaptureMove,
120 }
121
122 #[derive(Copy, Clone, PartialEq, Debug)]
123 pub enum MatchMode {
124 NonBindingMatch,
125 BorrowingMatch,
126 CopyingMatch,
127 MovingMatch,
128 }
129
130 #[derive(Copy, Clone, PartialEq, Debug)]
131 enum TrackMatchMode {
132 Unknown,
133 Definite(MatchMode),
134 Conflicting,
135 }
136
137 impl TrackMatchMode {
138 // Builds up the whole match mode for a pattern from its constituent
139 // parts. The lattice looks like this:
140 //
141 // Conflicting
142 // / \
143 // / \
144 // Borrowing Moving
145 // \ /
146 // \ /
147 // Copying
148 // |
149 // NonBinding
150 // |
151 // Unknown
152 //
153 // examples:
154 //
155 // * `(_, some_int)` pattern is Copying, since
156 // NonBinding + Copying => Copying
157 //
158 // * `(some_int, some_box)` pattern is Moving, since
159 // Copying + Moving => Moving
160 //
161 // * `(ref x, some_box)` pattern is Conflicting, since
162 // Borrowing + Moving => Conflicting
163 //
164 // Note that the `Unknown` and `Conflicting` states are
165 // represented separately from the other more interesting
166 // `Definite` states, which simplifies logic here somewhat.
167 fn lub(&mut self, mode: MatchMode) {
168 *self = match (*self, mode) {
169 // Note that clause order below is very significant.
170 (Unknown, new) => Definite(new),
171 (Definite(old), new) if old == new => Definite(old),
172
173 (Definite(old), NonBindingMatch) => Definite(old),
174 (Definite(NonBindingMatch), new) => Definite(new),
175
176 (Definite(old), CopyingMatch) => Definite(old),
177 (Definite(CopyingMatch), new) => Definite(new),
178
179 (Definite(_), _) => Conflicting,
180 (Conflicting, _) => *self,
181 };
182 }
183
184 fn match_mode(&self) -> MatchMode {
185 match *self {
186 Unknown => NonBindingMatch,
187 Definite(mode) => mode,
188 Conflicting => {
189 // Conservatively return MovingMatch to let the
190 // compiler continue to make progress.
191 MovingMatch
192 }
193 }
194 }
195 }
196
197 #[derive(Copy, Clone, PartialEq, Debug)]
198 pub enum MutateMode {
199 Init,
200 JustWrite, // x = y
201 WriteAndRead, // x += y
202 }
203
204 #[derive(Copy, Clone)]
205 enum OverloadedCallType {
206 FnOverloadedCall,
207 FnMutOverloadedCall,
208 FnOnceOverloadedCall,
209 }
210
211 impl OverloadedCallType {
212 fn from_trait_id(tcx: &ty::ctxt, trait_id: DefId)
213 -> OverloadedCallType {
214 for &(maybe_function_trait, overloaded_call_type) in &[
215 (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
216 (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
217 (tcx.lang_items.fn_trait(), FnOverloadedCall)
218 ] {
219 match maybe_function_trait {
220 Some(function_trait) if function_trait == trait_id => {
221 return overloaded_call_type
222 }
223 _ => continue,
224 }
225 }
226
227 tcx.sess.bug("overloaded call didn't map to known function trait")
228 }
229
230 fn from_method_id(tcx: &ty::ctxt, method_id: DefId)
231 -> OverloadedCallType {
232 let method = tcx.impl_or_trait_item(method_id);
233 OverloadedCallType::from_trait_id(tcx, method.container().id())
234 }
235 }
236
237 ///////////////////////////////////////////////////////////////////////////
238 // The ExprUseVisitor type
239 //
240 // This is the code that actually walks the tree. Like
241 // mem_categorization, it requires a TYPER, which is a type that
242 // supplies types from the tree. After type checking is complete, you
243 // can just use the tcx as the typer.
244 pub struct ExprUseVisitor<'d, 't, 'a: 't, 'tcx:'a+'d> {
245 typer: &'t infer::InferCtxt<'a, 'tcx>,
246 mc: mc::MemCategorizationContext<'t, 'a, 'tcx>,
247 delegate: &'d mut Delegate<'tcx>,
248 }
249
250 // If the TYPER results in an error, it's because the type check
251 // failed (or will fail, when the error is uncovered and reported
252 // during writeback). In this case, we just ignore this part of the
253 // code.
254 //
255 // Note that this macro appears similar to try!(), but, unlike try!(),
256 // it does not propagate the error.
257 macro_rules! return_if_err {
258 ($inp: expr) => (
259 match $inp {
260 Ok(v) => v,
261 Err(()) => {
262 debug!("mc reported err");
263 return
264 }
265 }
266 )
267 }
268
269 /// Whether the elements of an overloaded operation are passed by value or by reference
270 enum PassArgs {
271 ByValue,
272 ByRef,
273 }
274
275 impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> {
276 pub fn new(delegate: &'d mut (Delegate<'tcx>+'d),
277 typer: &'t infer::InferCtxt<'a, 'tcx>)
278 -> ExprUseVisitor<'d,'t,'a,'tcx> where 'tcx:'a+'d
279 {
280 let mc: mc::MemCategorizationContext<'t, 'a, 'tcx> =
281 mc::MemCategorizationContext::new(typer);
282 ExprUseVisitor { typer: typer, mc: mc, delegate: delegate }
283 }
284
285 pub fn walk_fn(&mut self,
286 decl: &hir::FnDecl,
287 body: &hir::Block) {
288 self.walk_arg_patterns(decl, body);
289 self.walk_block(body);
290 }
291
292 fn walk_arg_patterns(&mut self,
293 decl: &hir::FnDecl,
294 body: &hir::Block) {
295 for arg in &decl.inputs {
296 let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id));
297
298 let fn_body_scope = self.tcx().region_maps.node_extent(body.id);
299 let arg_cmt = self.mc.cat_rvalue(
300 arg.id,
301 arg.pat.span,
302 ty::ReScope(fn_body_scope), // Args live only as long as the fn body.
303 arg_ty);
304
305 self.walk_irrefutable_pat(arg_cmt, &*arg.pat);
306 }
307 }
308
309 fn tcx(&self) -> &'t ty::ctxt<'tcx> {
310 self.typer.tcx
311 }
312
313 fn delegate_consume(&mut self,
314 consume_id: ast::NodeId,
315 consume_span: Span,
316 cmt: mc::cmt<'tcx>) {
317 debug!("delegate_consume(consume_id={}, cmt={:?})",
318 consume_id, cmt);
319
320 let mode = copy_or_move(self.typer, &cmt, DirectRefMove);
321 self.delegate.consume(consume_id, consume_span, cmt, mode);
322 }
323
324 fn consume_exprs(&mut self, exprs: &[P<hir::Expr>]) {
325 for expr in exprs {
326 self.consume_expr(&**expr);
327 }
328 }
329
330 pub fn consume_expr(&mut self, expr: &hir::Expr) {
331 debug!("consume_expr(expr={:?})", expr);
332
333 let cmt = return_if_err!(self.mc.cat_expr(expr));
334 self.delegate_consume(expr.id, expr.span, cmt);
335 self.walk_expr(expr);
336 }
337
338 fn mutate_expr(&mut self,
339 assignment_expr: &hir::Expr,
340 expr: &hir::Expr,
341 mode: MutateMode) {
342 let cmt = return_if_err!(self.mc.cat_expr(expr));
343 self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
344 self.walk_expr(expr);
345 }
346
347 fn borrow_expr(&mut self,
348 expr: &hir::Expr,
349 r: ty::Region,
350 bk: ty::BorrowKind,
351 cause: LoanCause) {
352 debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
353 expr, r, bk);
354
355 let cmt = return_if_err!(self.mc.cat_expr(expr));
356 self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
357
358 self.walk_expr(expr)
359 }
360
361 fn select_from_expr(&mut self, expr: &hir::Expr) {
362 self.walk_expr(expr)
363 }
364
365 pub fn walk_expr(&mut self, expr: &hir::Expr) {
366 debug!("walk_expr(expr={:?})", expr);
367
368 self.walk_adjustment(expr);
369
370 match expr.node {
371 hir::ExprPath(..) => { }
372
373 hir::ExprType(ref subexpr, _) => {
374 self.walk_expr(&**subexpr)
375 }
376
377 hir::ExprUnary(hir::UnDeref, ref base) => { // *base
378 if !self.walk_overloaded_operator(expr, &**base, Vec::new(), PassArgs::ByRef) {
379 self.select_from_expr(&**base);
380 }
381 }
382
383 hir::ExprField(ref base, _) => { // base.f
384 self.select_from_expr(&**base);
385 }
386
387 hir::ExprTupField(ref base, _) => { // base.<n>
388 self.select_from_expr(&**base);
389 }
390
391 hir::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
392 if !self.walk_overloaded_operator(expr,
393 &**lhs,
394 vec![&**rhs],
395 PassArgs::ByValue) {
396 self.select_from_expr(&**lhs);
397 self.consume_expr(&**rhs);
398 }
399 }
400
401 hir::ExprRange(ref start, ref end) => {
402 start.as_ref().map(|e| self.consume_expr(&**e));
403 end.as_ref().map(|e| self.consume_expr(&**e));
404 }
405
406 hir::ExprCall(ref callee, ref args) => { // callee(args)
407 self.walk_callee(expr, &**callee);
408 self.consume_exprs(args);
409 }
410
411 hir::ExprMethodCall(_, _, ref args) => { // callee.m(args)
412 self.consume_exprs(args);
413 }
414
415 hir::ExprStruct(_, ref fields, ref opt_with) => {
416 self.walk_struct_expr(expr, fields, opt_with);
417 }
418
419 hir::ExprTup(ref exprs) => {
420 self.consume_exprs(exprs);
421 }
422
423 hir::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
424 self.consume_expr(&**cond_expr);
425 self.walk_block(&**then_blk);
426 if let Some(ref else_expr) = *opt_else_expr {
427 self.consume_expr(&**else_expr);
428 }
429 }
430
431 hir::ExprMatch(ref discr, ref arms, _) => {
432 let discr_cmt = return_if_err!(self.mc.cat_expr(&**discr));
433 self.borrow_expr(&**discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
434
435 // treatment of the discriminant is handled while walking the arms.
436 for arm in arms {
437 let mode = self.arm_move_mode(discr_cmt.clone(), arm);
438 let mode = mode.match_mode();
439 self.walk_arm(discr_cmt.clone(), arm, mode);
440 }
441 }
442
443 hir::ExprVec(ref exprs) => {
444 self.consume_exprs(exprs);
445 }
446
447 hir::ExprAddrOf(m, ref base) => { // &base
448 // make sure that the thing we are pointing out stays valid
449 // for the lifetime `scope_r` of the resulting ptr:
450 let expr_ty = return_if_err!(self.typer.node_ty(expr.id));
451 if let ty::TyRef(&r, _) = expr_ty.sty {
452 let bk = ty::BorrowKind::from_mutbl(m);
453 self.borrow_expr(&**base, r, bk, AddrOf);
454 }
455 }
456
457 hir::ExprInlineAsm(ref ia) => {
458 for &(_, ref input) in &ia.inputs {
459 self.consume_expr(&**input);
460 }
461
462 for output in &ia.outputs {
463 if output.is_indirect {
464 self.consume_expr(&*output.expr);
465 } else {
466 self.mutate_expr(expr, &*output.expr,
467 if output.is_rw {
468 MutateMode::WriteAndRead
469 } else {
470 MutateMode::JustWrite
471 });
472 }
473 }
474 }
475
476 hir::ExprBreak(..) |
477 hir::ExprAgain(..) |
478 hir::ExprLit(..) => {}
479
480 hir::ExprLoop(ref blk, _) => {
481 self.walk_block(&**blk);
482 }
483
484 hir::ExprWhile(ref cond_expr, ref blk, _) => {
485 self.consume_expr(&**cond_expr);
486 self.walk_block(&**blk);
487 }
488
489 hir::ExprUnary(op, ref lhs) => {
490 let pass_args = if ::rustc_front::util::is_by_value_unop(op) {
491 PassArgs::ByValue
492 } else {
493 PassArgs::ByRef
494 };
495
496 if !self.walk_overloaded_operator(expr, &**lhs, Vec::new(), pass_args) {
497 self.consume_expr(&**lhs);
498 }
499 }
500
501 hir::ExprBinary(op, ref lhs, ref rhs) => {
502 let pass_args = if ::rustc_front::util::is_by_value_binop(op.node) {
503 PassArgs::ByValue
504 } else {
505 PassArgs::ByRef
506 };
507
508 if !self.walk_overloaded_operator(expr, &**lhs, vec![&**rhs], pass_args) {
509 self.consume_expr(&**lhs);
510 self.consume_expr(&**rhs);
511 }
512 }
513
514 hir::ExprBlock(ref blk) => {
515 self.walk_block(&**blk);
516 }
517
518 hir::ExprRet(ref opt_expr) => {
519 if let Some(ref expr) = *opt_expr {
520 self.consume_expr(&**expr);
521 }
522 }
523
524 hir::ExprAssign(ref lhs, ref rhs) => {
525 self.mutate_expr(expr, &**lhs, MutateMode::JustWrite);
526 self.consume_expr(&**rhs);
527 }
528
529 hir::ExprCast(ref base, _) => {
530 self.consume_expr(&**base);
531 }
532
533 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
534 // NB All our assignment operations take the RHS by value
535 assert!(::rustc_front::util::is_by_value_binop(op.node));
536
537 if !self.walk_overloaded_operator(expr, lhs, vec![rhs], PassArgs::ByValue) {
538 self.mutate_expr(expr, &**lhs, MutateMode::WriteAndRead);
539 self.consume_expr(&**rhs);
540 }
541 }
542
543 hir::ExprRepeat(ref base, ref count) => {
544 self.consume_expr(&**base);
545 self.consume_expr(&**count);
546 }
547
548 hir::ExprClosure(..) => {
549 self.walk_captures(expr)
550 }
551
552 hir::ExprBox(ref base) => {
553 self.consume_expr(&**base);
554 }
555 }
556 }
557
558 fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
559 let callee_ty = return_if_err!(self.typer.expr_ty_adjusted(callee));
560 debug!("walk_callee: callee={:?} callee_ty={:?}",
561 callee, callee_ty);
562 let call_scope = self.tcx().region_maps.node_extent(call.id);
563 match callee_ty.sty {
564 ty::TyBareFn(..) => {
565 self.consume_expr(callee);
566 }
567 ty::TyError => { }
568 _ => {
569 let overloaded_call_type =
570 match self.typer.node_method_id(ty::MethodCall::expr(call.id)) {
571 Some(method_id) => {
572 OverloadedCallType::from_method_id(self.tcx(), method_id)
573 }
574 None => {
575 self.tcx().sess.span_bug(
576 callee.span,
577 &format!("unexpected callee type {}", callee_ty))
578 }
579 };
580 match overloaded_call_type {
581 FnMutOverloadedCall => {
582 self.borrow_expr(callee,
583 ty::ReScope(call_scope),
584 ty::MutBorrow,
585 ClosureInvocation);
586 }
587 FnOverloadedCall => {
588 self.borrow_expr(callee,
589 ty::ReScope(call_scope),
590 ty::ImmBorrow,
591 ClosureInvocation);
592 }
593 FnOnceOverloadedCall => self.consume_expr(callee),
594 }
595 }
596 }
597 }
598
599 fn walk_stmt(&mut self, stmt: &hir::Stmt) {
600 match stmt.node {
601 hir::StmtDecl(ref decl, _) => {
602 match decl.node {
603 hir::DeclLocal(ref local) => {
604 self.walk_local(&**local);
605 }
606
607 hir::DeclItem(_) => {
608 // we don't visit nested items in this visitor,
609 // only the fn body we were given.
610 }
611 }
612 }
613
614 hir::StmtExpr(ref expr, _) |
615 hir::StmtSemi(ref expr, _) => {
616 self.consume_expr(&**expr);
617 }
618 }
619 }
620
621 fn walk_local(&mut self, local: &hir::Local) {
622 match local.init {
623 None => {
624 let delegate = &mut self.delegate;
625 pat_util::pat_bindings(&self.typer.tcx.def_map, &*local.pat,
626 |_, id, span, _| {
627 delegate.decl_without_init(id, span);
628 })
629 }
630
631 Some(ref expr) => {
632 // Variable declarations with
633 // initializers are considered
634 // "assigns", which is handled by
635 // `walk_pat`:
636 self.walk_expr(&**expr);
637 let init_cmt = return_if_err!(self.mc.cat_expr(&**expr));
638 self.walk_irrefutable_pat(init_cmt, &*local.pat);
639 }
640 }
641 }
642
643 /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
644 /// depending on its type.
645 fn walk_block(&mut self, blk: &hir::Block) {
646 debug!("walk_block(blk.id={})", blk.id);
647
648 for stmt in &blk.stmts {
649 self.walk_stmt(stmt);
650 }
651
652 if let Some(ref tail_expr) = blk.expr {
653 self.consume_expr(&**tail_expr);
654 }
655 }
656
657 fn walk_struct_expr(&mut self,
658 _expr: &hir::Expr,
659 fields: &[hir::Field],
660 opt_with: &Option<P<hir::Expr>>) {
661 // Consume the expressions supplying values for each field.
662 for field in fields {
663 self.consume_expr(&*field.expr);
664 }
665
666 let with_expr = match *opt_with {
667 Some(ref w) => &**w,
668 None => { return; }
669 };
670
671 let with_cmt = return_if_err!(self.mc.cat_expr(&*with_expr));
672
673 // Select just those fields of the `with`
674 // expression that will actually be used
675 if let ty::TyStruct(def, substs) = with_cmt.ty.sty {
676 // Consume those fields of the with expression that are needed.
677 for with_field in &def.struct_variant().fields {
678 if !contains_field_named(with_field, fields) {
679 let cmt_field = self.mc.cat_field(
680 &*with_expr,
681 with_cmt.clone(),
682 with_field.name,
683 with_field.ty(self.tcx(), substs)
684 );
685 self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
686 }
687 }
688 } else {
689 // the base expression should always evaluate to a
690 // struct; however, when EUV is run during typeck, it
691 // may not. This will generate an error earlier in typeck,
692 // so we can just ignore it.
693 if !self.tcx().sess.has_errors() {
694 self.tcx().sess.span_bug(
695 with_expr.span,
696 "with expression doesn't evaluate to a struct");
697 }
698 };
699
700 // walk the with expression so that complex expressions
701 // are properly handled.
702 self.walk_expr(with_expr);
703
704 fn contains_field_named(field: ty::FieldDef,
705 fields: &[hir::Field])
706 -> bool
707 {
708 fields.iter().any(
709 |f| f.name.node == field.name)
710 }
711 }
712
713 // Invoke the appropriate delegate calls for anything that gets
714 // consumed or borrowed as part of the automatic adjustment
715 // process.
716 fn walk_adjustment(&mut self, expr: &hir::Expr) {
717 let typer = self.typer;
718 //NOTE(@jroesch): mixed RefCell borrow causes crash
719 let adj = typer.adjustments().get(&expr.id).map(|x| x.clone());
720 if let Some(adjustment) = adj {
721 match adjustment {
722 adjustment::AdjustReifyFnPointer |
723 adjustment::AdjustUnsafeFnPointer => {
724 // Creating a closure/fn-pointer or unsizing consumes
725 // the input and stores it into the resulting rvalue.
726 debug!("walk_adjustment(AdjustReifyFnPointer|AdjustUnsafeFnPointer)");
727 let cmt_unadjusted =
728 return_if_err!(self.mc.cat_expr_unadjusted(expr));
729 self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
730 }
731 adjustment::AdjustDerefRef(ref adj) => {
732 self.walk_autoderefref(expr, adj);
733 }
734 }
735 }
736 }
737
738 /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
739 /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
740 /// `deref()` is declared with `&self`, this is an autoref of `x`.
741 fn walk_autoderefs(&mut self,
742 expr: &hir::Expr,
743 autoderefs: usize) {
744 debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
745
746 for i in 0..autoderefs {
747 let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
748 match self.typer.node_method_ty(deref_id) {
749 None => {}
750 Some(method_ty) => {
751 let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
752
753 // the method call infrastructure should have
754 // replaced all late-bound regions with variables:
755 let self_ty = method_ty.fn_sig().input(0);
756 let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
757
758 let (m, r) = match self_ty.sty {
759 ty::TyRef(r, ref m) => (m.mutbl, r),
760 _ => self.tcx().sess.span_bug(expr.span,
761 &format!("bad overloaded deref type {:?}",
762 method_ty))
763 };
764 let bk = ty::BorrowKind::from_mutbl(m);
765 self.delegate.borrow(expr.id, expr.span, cmt,
766 *r, bk, AutoRef);
767 }
768 }
769 }
770 }
771
772 fn walk_autoderefref(&mut self,
773 expr: &hir::Expr,
774 adj: &adjustment::AutoDerefRef<'tcx>) {
775 debug!("walk_autoderefref expr={:?} adj={:?}",
776 expr,
777 adj);
778
779 self.walk_autoderefs(expr, adj.autoderefs);
780
781 let cmt_derefd =
782 return_if_err!(self.mc.cat_expr_autoderefd(expr, adj.autoderefs));
783
784 let cmt_refd =
785 self.walk_autoref(expr, cmt_derefd, adj.autoref);
786
787 if adj.unsize.is_some() {
788 // Unsizing consumes the thin pointer and produces a fat one.
789 self.delegate_consume(expr.id, expr.span, cmt_refd);
790 }
791 }
792
793
794 /// Walks the autoref `opt_autoref` applied to the autoderef'd
795 /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
796 /// after all relevant autoderefs have occurred. Because AutoRefs
797 /// can be recursive, this function is recursive: it first walks
798 /// deeply all the way down the autoref chain, and then processes
799 /// the autorefs on the way out. At each point, it returns the
800 /// `cmt` for the rvalue that will be produced by introduced an
801 /// autoref.
802 fn walk_autoref(&mut self,
803 expr: &hir::Expr,
804 cmt_base: mc::cmt<'tcx>,
805 opt_autoref: Option<adjustment::AutoRef<'tcx>>)
806 -> mc::cmt<'tcx>
807 {
808 debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
809 expr.id,
810 cmt_base,
811 opt_autoref);
812
813 let cmt_base_ty = cmt_base.ty;
814
815 let autoref = match opt_autoref {
816 Some(ref autoref) => autoref,
817 None => {
818 // No AutoRef.
819 return cmt_base;
820 }
821 };
822
823 match *autoref {
824 adjustment::AutoPtr(r, m) => {
825 self.delegate.borrow(expr.id,
826 expr.span,
827 cmt_base,
828 *r,
829 ty::BorrowKind::from_mutbl(m),
830 AutoRef);
831 }
832
833 adjustment::AutoUnsafe(m) => {
834 debug!("walk_autoref: expr.id={} cmt_base={:?}",
835 expr.id,
836 cmt_base);
837
838 // Converting from a &T to *T (or &mut T to *mut T) is
839 // treated as borrowing it for the enclosing temporary
840 // scope.
841 let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
842
843 self.delegate.borrow(expr.id,
844 expr.span,
845 cmt_base,
846 r,
847 ty::BorrowKind::from_mutbl(m),
848 AutoUnsafe);
849 }
850 }
851
852 // Construct the categorization for the result of the autoref.
853 // This is always an rvalue, since we are producing a new
854 // (temporary) indirection.
855
856 let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
857
858 self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
859 }
860
861
862 // When this returns true, it means that the expression *is* a
863 // method-call (i.e. via the operator-overload). This true result
864 // also implies that walk_overloaded_operator already took care of
865 // recursively processing the input arguments, and thus the caller
866 // should not do so.
867 fn walk_overloaded_operator(&mut self,
868 expr: &hir::Expr,
869 receiver: &hir::Expr,
870 rhs: Vec<&hir::Expr>,
871 pass_args: PassArgs)
872 -> bool
873 {
874 if !self.typer.is_method_call(expr.id) {
875 return false;
876 }
877
878 match pass_args {
879 PassArgs::ByValue => {
880 self.consume_expr(receiver);
881 for &arg in &rhs {
882 self.consume_expr(arg);
883 }
884
885 return true;
886 },
887 PassArgs::ByRef => {},
888 }
889
890 self.walk_expr(receiver);
891
892 // Arguments (but not receivers) to overloaded operator
893 // methods are implicitly autoref'd which sadly does not use
894 // adjustments, so we must hardcode the borrow here.
895
896 let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
897 let bk = ty::ImmBorrow;
898
899 for &arg in &rhs {
900 self.borrow_expr(arg, r, bk, OverloadedOperator);
901 }
902 return true;
903 }
904
905 fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
906 let mut mode = Unknown;
907 for pat in &arm.pats {
908 self.determine_pat_move_mode(discr_cmt.clone(), &**pat, &mut mode);
909 }
910 mode
911 }
912
913 fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
914 for pat in &arm.pats {
915 self.walk_pat(discr_cmt.clone(), &**pat, mode);
916 }
917
918 if let Some(ref guard) = arm.guard {
919 self.consume_expr(&**guard);
920 }
921
922 self.consume_expr(&*arm.body);
923 }
924
925 /// Walks a pat that occurs in isolation (i.e. top-level of fn
926 /// arg or let binding. *Not* a match arm or nested pat.)
927 fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
928 let mut mode = Unknown;
929 self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
930 let mode = mode.match_mode();
931 self.walk_pat(cmt_discr, pat, mode);
932 }
933
934 /// Identifies any bindings within `pat` and accumulates within
935 /// `mode` whether the overall pattern/match structure is a move,
936 /// copy, or borrow.
937 fn determine_pat_move_mode(&mut self,
938 cmt_discr: mc::cmt<'tcx>,
939 pat: &hir::Pat,
940 mode: &mut TrackMatchMode) {
941 debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
942 pat);
943 return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
944 let tcx = self.tcx();
945 let def_map = &self.tcx().def_map;
946 if pat_util::pat_is_binding(&def_map.borrow(), pat) {
947 match pat.node {
948 hir::PatIdent(hir::BindByRef(_), _, _) =>
949 mode.lub(BorrowingMatch),
950 hir::PatIdent(hir::BindByValue(_), _, _) => {
951 match copy_or_move(self.typer, &cmt_pat, PatBindingMove) {
952 Copy => mode.lub(CopyingMatch),
953 Move(_) => mode.lub(MovingMatch),
954 }
955 }
956 _ => {
957 tcx.sess.span_bug(
958 pat.span,
959 "binding pattern not an identifier");
960 }
961 }
962 }
963 }));
964 }
965
966 /// The core driver for walking a pattern; `match_mode` must be
967 /// established up front, e.g. via `determine_pat_move_mode` (see
968 /// also `walk_irrefutable_pat` for patterns that stand alone).
969 fn walk_pat(&mut self,
970 cmt_discr: mc::cmt<'tcx>,
971 pat: &hir::Pat,
972 match_mode: MatchMode) {
973 debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr,
974 pat);
975
976 let mc = &self.mc;
977 let typer = self.typer;
978 let def_map = &self.tcx().def_map;
979 let delegate = &mut self.delegate;
980 return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
981 if pat_util::pat_is_binding(&def_map.borrow(), pat) {
982 let tcx = typer.tcx;
983
984 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}",
985 cmt_pat,
986 pat,
987 match_mode);
988
989 // pat_ty: the type of the binding being produced.
990 let pat_ty = return_if_err!(typer.node_ty(pat.id));
991
992 // Each match binding is effectively an assignment to the
993 // binding being produced.
994 let def = def_map.borrow().get(&pat.id).unwrap().full_def();
995 match mc.cat_def(pat.id, pat.span, pat_ty, def) {
996 Ok(binding_cmt) => {
997 delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
998 }
999 Err(_) => { }
1000 }
1001
1002 // It is also a borrow or copy/move of the value being matched.
1003 match pat.node {
1004 hir::PatIdent(hir::BindByRef(m), _, _) => {
1005 if let ty::TyRef(&r, _) = pat_ty.sty {
1006 let bk = ty::BorrowKind::from_mutbl(m);
1007 delegate.borrow(pat.id, pat.span, cmt_pat,
1008 r, bk, RefBinding);
1009 }
1010 }
1011 hir::PatIdent(hir::BindByValue(_), _, _) => {
1012 let mode = copy_or_move(typer, &cmt_pat, PatBindingMove);
1013 debug!("walk_pat binding consuming pat");
1014 delegate.consume_pat(pat, cmt_pat, mode);
1015 }
1016 _ => {
1017 tcx.sess.span_bug(
1018 pat.span,
1019 "binding pattern not an identifier");
1020 }
1021 }
1022 } else {
1023 match pat.node {
1024 hir::PatVec(_, Some(ref slice_pat), _) => {
1025 // The `slice_pat` here creates a slice into
1026 // the original vector. This is effectively a
1027 // borrow of the elements of the vector being
1028 // matched.
1029
1030 let (slice_cmt, slice_mutbl, slice_r) =
1031 return_if_err!(mc.cat_slice_pattern(cmt_pat, &**slice_pat));
1032
1033 // Note: We declare here that the borrow
1034 // occurs upon entering the `[...]`
1035 // pattern. This implies that something like
1036 // `[a; b]` where `a` is a move is illegal,
1037 // because the borrow is already in effect.
1038 // In fact such a move would be safe-ish, but
1039 // it effectively *requires* that we use the
1040 // nulling out semantics to indicate when a
1041 // value has been moved, which we are trying
1042 // to move away from. Otherwise, how can we
1043 // indicate that the first element in the
1044 // vector has been moved? Eventually, we
1045 // could perhaps modify this rule to permit
1046 // `[..a, b]` where `b` is a move, because in
1047 // that case we can adjust the length of the
1048 // original vec accordingly, but we'd have to
1049 // make trans do the right thing, and it would
1050 // only work for `Box<[T]>`s. It seems simpler
1051 // to just require that people call
1052 // `vec.pop()` or `vec.unshift()`.
1053 let slice_bk = ty::BorrowKind::from_mutbl(slice_mutbl);
1054 delegate.borrow(pat.id, pat.span,
1055 slice_cmt, slice_r,
1056 slice_bk, RefBinding);
1057 }
1058 _ => { }
1059 }
1060 }
1061 }));
1062
1063 // Do a second pass over the pattern, calling `matched_pat` on
1064 // the interior nodes (enum variants and structs), as opposed
1065 // to the above loop's visit of than the bindings that form
1066 // the leaves of the pattern tree structure.
1067 return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
1068 let def_map = def_map.borrow();
1069 let tcx = typer.tcx;
1070
1071 match pat.node {
1072 hir::PatEnum(_, _) | hir::PatQPath(..) |
1073 hir::PatIdent(_, _, None) | hir::PatStruct(..) => {
1074 match def_map.get(&pat.id).map(|d| d.full_def()) {
1075 None => {
1076 // no definition found: pat is not a
1077 // struct or enum pattern.
1078 }
1079
1080 Some(def::DefVariant(enum_did, variant_did, _is_struct)) => {
1081 let downcast_cmt =
1082 if tcx.lookup_adt_def(enum_did).is_univariant() {
1083 cmt_pat
1084 } else {
1085 let cmt_pat_ty = cmt_pat.ty;
1086 mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1087 };
1088
1089 debug!("variant downcast_cmt={:?} pat={:?}",
1090 downcast_cmt,
1091 pat);
1092
1093 delegate.matched_pat(pat, downcast_cmt, match_mode);
1094 }
1095
1096 Some(def::DefStruct(..)) | Some(def::DefTy(_, false)) => {
1097 // A struct (in either the value or type
1098 // namespace; we encounter the former on
1099 // e.g. patterns for unit structs).
1100
1101 debug!("struct cmt_pat={:?} pat={:?}",
1102 cmt_pat,
1103 pat);
1104
1105 delegate.matched_pat(pat, cmt_pat, match_mode);
1106 }
1107
1108 Some(def::DefConst(..)) |
1109 Some(def::DefAssociatedConst(..)) |
1110 Some(def::DefLocal(..)) => {
1111 // This is a leaf (i.e. identifier binding
1112 // or constant value to match); thus no
1113 // `matched_pat` call.
1114 }
1115
1116 Some(def @ def::DefTy(_, true)) => {
1117 // An enum's type -- should never be in a
1118 // pattern.
1119
1120 if !tcx.sess.has_errors() {
1121 let msg = format!("Pattern has unexpected type: {:?} and type {:?}",
1122 def,
1123 cmt_pat.ty);
1124 tcx.sess.span_bug(pat.span, &msg)
1125 }
1126 }
1127
1128 Some(def) => {
1129 // Remaining cases are e.g. DefFn, to
1130 // which identifiers within patterns
1131 // should not resolve. However, we do
1132 // encouter this when using the
1133 // expr-use-visitor during typeck. So just
1134 // ignore it, an error should have been
1135 // reported.
1136
1137 if !tcx.sess.has_errors() {
1138 let msg = format!("Pattern has unexpected def: {:?} and type {:?}",
1139 def,
1140 cmt_pat.ty);
1141 tcx.sess.span_bug(pat.span, &msg[..])
1142 }
1143 }
1144 }
1145 }
1146
1147 hir::PatIdent(_, _, Some(_)) => {
1148 // Do nothing; this is a binding (not an enum
1149 // variant or struct), and the cat_pattern call
1150 // will visit the substructure recursively.
1151 }
1152
1153 hir::PatWild | hir::PatTup(..) | hir::PatBox(..) |
1154 hir::PatRegion(..) | hir::PatLit(..) | hir::PatRange(..) |
1155 hir::PatVec(..) => {
1156 // Similarly, each of these cases does not
1157 // correspond to an enum variant or struct, so we
1158 // do not do any `matched_pat` calls for these
1159 // cases either.
1160 }
1161 }
1162 }));
1163 }
1164
1165 fn walk_captures(&mut self, closure_expr: &hir::Expr) {
1166 debug!("walk_captures({:?})", closure_expr);
1167
1168 self.tcx().with_freevars(closure_expr.id, |freevars| {
1169 for freevar in freevars {
1170 let id_var = freevar.def.var_id();
1171 let upvar_id = ty::UpvarId { var_id: id_var,
1172 closure_expr_id: closure_expr.id };
1173 let upvar_capture = self.typer.upvar_capture(upvar_id).unwrap();
1174 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
1175 closure_expr.span,
1176 freevar.def));
1177 match upvar_capture {
1178 ty::UpvarCapture::ByValue => {
1179 let mode = copy_or_move(self.typer, &cmt_var, CaptureMove);
1180 self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1181 }
1182 ty::UpvarCapture::ByRef(upvar_borrow) => {
1183 self.delegate.borrow(closure_expr.id,
1184 closure_expr.span,
1185 cmt_var,
1186 upvar_borrow.region,
1187 upvar_borrow.kind,
1188 ClosureCapture(freevar.span));
1189 }
1190 }
1191 }
1192 });
1193 }
1194
1195 fn cat_captured_var(&mut self,
1196 closure_id: ast::NodeId,
1197 closure_span: Span,
1198 upvar_def: def::Def)
1199 -> mc::McResult<mc::cmt<'tcx>> {
1200 // Create the cmt for the variable being borrowed, from the
1201 // caller's perspective
1202 let var_id = upvar_def.var_id();
1203 let var_ty = try!(self.typer.node_ty(var_id));
1204 self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1205 }
1206 }
1207
1208 fn copy_or_move<'a, 'tcx>(typer: &infer::InferCtxt<'a, 'tcx>,
1209 cmt: &mc::cmt<'tcx>,
1210 move_reason: MoveReason)
1211 -> ConsumeMode
1212 {
1213 if typer.type_moves_by_default(cmt.ty, cmt.span) {
1214 Move(move_reason)
1215 } else {
1216 Copy
1217 }
1218 }