]> git.proxmox.com Git - rustc.git/blame - src/librustc/middle/expr_use_visitor.rs
New upstream version 1.13.0+dfsg1
[rustc.git] / src / librustc / middle / expr_use_visitor.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! A different sort of visitor for walking fn bodies. Unlike the
12//! normal visitor, which just walks the entire body in one shot, the
13//! `ExprUseVisitor` determines how expressions are being used.
14
1a4d82fc
JJ
15pub use self::LoanCause::*;
16pub use self::ConsumeMode::*;
17pub use self::MoveReason::*;
18pub use self::MatchMode::*;
19use self::TrackMatchMode::*;
20use self::OverloadedCallType::*;
21
54a0048b
SL
22use hir::pat_util;
23use hir::def::Def;
24use hir::def_id::{DefId};
a7813a04 25use infer::InferCtxt;
1a4d82fc 26use middle::mem_categorization as mc;
54a0048b 27use ty::{self, TyCtxt, adjustment};
1a4d82fc 28
54a0048b 29use hir::{self, PatKind};
e9174d1e
SL
30
31use syntax::ast;
1a4d82fc 32use syntax::ptr::P;
3157f602 33use syntax_pos::Span;
1a4d82fc
JJ
34
35///////////////////////////////////////////////////////////////////////////
36// The Delegate trait
37
38/// This trait defines the callbacks you can expect to receive when
39/// employing the ExprUseVisitor.
40pub trait Delegate<'tcx> {
41 // The value found at `cmt` is either copied or moved, depending
42 // on mode.
43 fn consume(&mut self,
44 consume_id: ast::NodeId,
45 consume_span: Span,
46 cmt: mc::cmt<'tcx>,
47 mode: ConsumeMode);
48
49 // The value found at `cmt` has been determined to match the
50 // pattern binding `matched_pat`, and its subparts are being
51 // copied or moved depending on `mode`. Note that `matched_pat`
52 // is called on all variant/structs in the pattern (i.e., the
53 // interior nodes of the pattern's tree structure) while
54 // consume_pat is called on the binding identifiers in the pattern
55 // (which are leaves of the pattern's tree structure).
56 //
57 // Note that variants/structs and identifiers are disjoint; thus
58 // `matched_pat` and `consume_pat` are never both called on the
59 // same input pattern structure (though of `consume_pat` can be
60 // called on a subpart of an input passed to `matched_pat).
61 fn matched_pat(&mut self,
e9174d1e 62 matched_pat: &hir::Pat,
1a4d82fc
JJ
63 cmt: mc::cmt<'tcx>,
64 mode: MatchMode);
65
66 // The value found at `cmt` is either copied or moved via the
67 // pattern binding `consume_pat`, depending on mode.
68 fn consume_pat(&mut self,
e9174d1e 69 consume_pat: &hir::Pat,
1a4d82fc
JJ
70 cmt: mc::cmt<'tcx>,
71 mode: ConsumeMode);
72
73 // The value found at `borrow` is being borrowed at the point
74 // `borrow_id` for the region `loan_region` with kind `bk`.
75 fn borrow(&mut self,
76 borrow_id: ast::NodeId,
77 borrow_span: Span,
78 cmt: mc::cmt<'tcx>,
9e0c209e 79 loan_region: &'tcx ty::Region,
1a4d82fc
JJ
80 bk: ty::BorrowKind,
81 loan_cause: LoanCause);
82
83 // The local variable `id` is declared but not initialized.
84 fn decl_without_init(&mut self,
85 id: ast::NodeId,
86 span: Span);
87
88 // The path at `cmt` is being assigned to.
89 fn mutate(&mut self,
90 assignment_id: ast::NodeId,
91 assignment_span: Span,
92 assignee_cmt: mc::cmt<'tcx>,
93 mode: MutateMode);
94}
95
c34b1796 96#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
97pub enum LoanCause {
98 ClosureCapture(Span),
99 AddrOf,
100 AutoRef,
9346a6ac 101 AutoUnsafe,
1a4d82fc
JJ
102 RefBinding,
103 OverloadedOperator,
104 ClosureInvocation,
105 ForLoop,
106 MatchDiscriminant
107}
108
c34b1796 109#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
110pub enum ConsumeMode {
111 Copy, // reference to x where x has a type that copies
112 Move(MoveReason), // reference to x where x has a type that moves
113}
114
c34b1796 115#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
116pub enum MoveReason {
117 DirectRefMove,
118 PatBindingMove,
119 CaptureMove,
120}
121
c34b1796 122#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
123pub enum MatchMode {
124 NonBindingMatch,
125 BorrowingMatch,
126 CopyingMatch,
127 MovingMatch,
128}
129
c34b1796 130#[derive(Copy, Clone, PartialEq, Debug)]
85aaf69f 131enum TrackMatchMode {
1a4d82fc
JJ
132 Unknown,
133 Definite(MatchMode),
134 Conflicting,
135}
136
85aaf69f 137impl TrackMatchMode {
1a4d82fc
JJ
138 // Builds up the whole match mode for a pattern from its constituent
139 // parts. The lattice looks like this:
140 //
141 // Conflicting
142 // / \
143 // / \
144 // Borrowing Moving
145 // \ /
146 // \ /
147 // Copying
148 // |
149 // NonBinding
150 // |
151 // Unknown
152 //
153 // examples:
154 //
155 // * `(_, some_int)` pattern is Copying, since
156 // NonBinding + Copying => Copying
157 //
158 // * `(some_int, some_box)` pattern is Moving, since
159 // Copying + Moving => Moving
160 //
161 // * `(ref x, some_box)` pattern is Conflicting, since
162 // Borrowing + Moving => Conflicting
163 //
164 // Note that the `Unknown` and `Conflicting` states are
165 // represented separately from the other more interesting
166 // `Definite` states, which simplifies logic here somewhat.
167 fn lub(&mut self, mode: MatchMode) {
168 *self = match (*self, mode) {
169 // Note that clause order below is very significant.
170 (Unknown, new) => Definite(new),
171 (Definite(old), new) if old == new => Definite(old),
172
173 (Definite(old), NonBindingMatch) => Definite(old),
174 (Definite(NonBindingMatch), new) => Definite(new),
175
176 (Definite(old), CopyingMatch) => Definite(old),
177 (Definite(CopyingMatch), new) => Definite(new),
178
179 (Definite(_), _) => Conflicting,
180 (Conflicting, _) => *self,
181 };
182 }
183
184 fn match_mode(&self) -> MatchMode {
185 match *self {
186 Unknown => NonBindingMatch,
187 Definite(mode) => mode,
188 Conflicting => {
189 // Conservatively return MovingMatch to let the
190 // compiler continue to make progress.
191 MovingMatch
192 }
193 }
194 }
195}
196
c34b1796 197#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
198pub enum MutateMode {
199 Init,
200 JustWrite, // x = y
201 WriteAndRead, // x += y
202}
203
c34b1796 204#[derive(Copy, Clone)]
1a4d82fc
JJ
205enum OverloadedCallType {
206 FnOverloadedCall,
207 FnMutOverloadedCall,
208 FnOnceOverloadedCall,
209}
210
211impl OverloadedCallType {
a7813a04 212 fn from_trait_id(tcx: TyCtxt, trait_id: DefId) -> OverloadedCallType {
62682a34 213 for &(maybe_function_trait, overloaded_call_type) in &[
1a4d82fc
JJ
214 (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall),
215 (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall),
216 (tcx.lang_items.fn_trait(), FnOverloadedCall)
62682a34 217 ] {
1a4d82fc
JJ
218 match maybe_function_trait {
219 Some(function_trait) if function_trait == trait_id => {
220 return overloaded_call_type
221 }
222 _ => continue,
223 }
224 }
225
54a0048b 226 bug!("overloaded call didn't map to known function trait")
1a4d82fc
JJ
227 }
228
a7813a04 229 fn from_method_id(tcx: TyCtxt, method_id: DefId) -> OverloadedCallType {
c1a9b12d
SL
230 let method = tcx.impl_or_trait_item(method_id);
231 OverloadedCallType::from_trait_id(tcx, method.container().id())
1a4d82fc
JJ
232 }
233}
234
235///////////////////////////////////////////////////////////////////////////
236// The ExprUseVisitor type
237//
238// This is the code that actually walks the tree. Like
239// mem_categorization, it requires a TYPER, which is a type that
240// supplies types from the tree. After type checking is complete, you
241// can just use the tcx as the typer.
a7813a04
XL
242pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
243 mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
244 delegate: &'a mut Delegate<'tcx>,
1a4d82fc
JJ
245}
246
247// If the TYPER results in an error, it's because the type check
248// failed (or will fail, when the error is uncovered and reported
249// during writeback). In this case, we just ignore this part of the
250// code.
251//
252// Note that this macro appears similar to try!(), but, unlike try!(),
253// it does not propagate the error.
254macro_rules! return_if_err {
255 ($inp: expr) => (
256 match $inp {
257 Ok(v) => v,
c1a9b12d
SL
258 Err(()) => {
259 debug!("mc reported err");
260 return
261 }
1a4d82fc
JJ
262 }
263 )
264}
265
266/// Whether the elements of an overloaded operation are passed by value or by reference
267enum PassArgs {
268 ByValue,
269 ByRef,
270}
271
a7813a04
XL
272impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
273 pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
3157f602
XL
274 infcx: &'a InferCtxt<'a, 'gcx, 'tcx>)
275 -> Self
276 {
277 ExprUseVisitor::with_options(delegate, infcx, mc::MemCategorizationOptions::default())
278 }
279
280 pub fn with_options(delegate: &'a mut (Delegate<'tcx>+'a),
281 infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
282 options: mc::MemCategorizationOptions)
283 -> Self
e9174d1e 284 {
a7813a04 285 ExprUseVisitor {
3157f602 286 mc: mc::MemCategorizationContext::with_options(infcx, options),
a7813a04
XL
287 delegate: delegate
288 }
1a4d82fc
JJ
289 }
290
291 pub fn walk_fn(&mut self,
e9174d1e
SL
292 decl: &hir::FnDecl,
293 body: &hir::Block) {
1a4d82fc
JJ
294 self.walk_arg_patterns(decl, body);
295 self.walk_block(body);
296 }
297
298 fn walk_arg_patterns(&mut self,
e9174d1e
SL
299 decl: &hir::FnDecl,
300 body: &hir::Block) {
85aaf69f 301 for arg in &decl.inputs {
a7813a04 302 let arg_ty = return_if_err!(self.mc.infcx.node_ty(arg.pat.id));
1a4d82fc 303
9e0c209e 304 let fn_body_scope_r = self.tcx().node_scope_region(body.id);
1a4d82fc
JJ
305 let arg_cmt = self.mc.cat_rvalue(
306 arg.id,
307 arg.pat.span,
9e0c209e 308 fn_body_scope_r, // Args live only as long as the fn body.
1a4d82fc
JJ
309 arg_ty);
310
7453a54e 311 self.walk_irrefutable_pat(arg_cmt, &arg.pat);
1a4d82fc
JJ
312 }
313 }
314
a7813a04
XL
315 fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
316 self.mc.infcx.tcx
1a4d82fc
JJ
317 }
318
319 fn delegate_consume(&mut self,
320 consume_id: ast::NodeId,
321 consume_span: Span,
322 cmt: mc::cmt<'tcx>) {
62682a34
SL
323 debug!("delegate_consume(consume_id={}, cmt={:?})",
324 consume_id, cmt);
85aaf69f 325
a7813a04 326 let mode = copy_or_move(self.mc.infcx, &cmt, DirectRefMove);
1a4d82fc
JJ
327 self.delegate.consume(consume_id, consume_span, cmt, mode);
328 }
329
9cc50fc6 330 fn consume_exprs(&mut self, exprs: &[P<hir::Expr>]) {
85aaf69f 331 for expr in exprs {
7453a54e 332 self.consume_expr(&expr);
1a4d82fc
JJ
333 }
334 }
335
e9174d1e 336 pub fn consume_expr(&mut self, expr: &hir::Expr) {
62682a34 337 debug!("consume_expr(expr={:?})", expr);
1a4d82fc
JJ
338
339 let cmt = return_if_err!(self.mc.cat_expr(expr));
340 self.delegate_consume(expr.id, expr.span, cmt);
341 self.walk_expr(expr);
342 }
343
344 fn mutate_expr(&mut self,
e9174d1e
SL
345 assignment_expr: &hir::Expr,
346 expr: &hir::Expr,
1a4d82fc
JJ
347 mode: MutateMode) {
348 let cmt = return_if_err!(self.mc.cat_expr(expr));
349 self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
350 self.walk_expr(expr);
351 }
352
353 fn borrow_expr(&mut self,
e9174d1e 354 expr: &hir::Expr,
9e0c209e 355 r: &'tcx ty::Region,
1a4d82fc
JJ
356 bk: ty::BorrowKind,
357 cause: LoanCause) {
62682a34
SL
358 debug!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
359 expr, r, bk);
1a4d82fc
JJ
360
361 let cmt = return_if_err!(self.mc.cat_expr(expr));
362 self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
363
1a4d82fc
JJ
364 self.walk_expr(expr)
365 }
366
e9174d1e 367 fn select_from_expr(&mut self, expr: &hir::Expr) {
1a4d82fc
JJ
368 self.walk_expr(expr)
369 }
370
e9174d1e 371 pub fn walk_expr(&mut self, expr: &hir::Expr) {
62682a34 372 debug!("walk_expr(expr={:?})", expr);
1a4d82fc
JJ
373
374 self.walk_adjustment(expr);
375
376 match expr.node {
e9174d1e 377 hir::ExprPath(..) => { }
1a4d82fc 378
9cc50fc6 379 hir::ExprType(ref subexpr, _) => {
7453a54e 380 self.walk_expr(&subexpr)
9cc50fc6
SL
381 }
382
e9174d1e 383 hir::ExprUnary(hir::UnDeref, ref base) => { // *base
7453a54e
SL
384 if !self.walk_overloaded_operator(expr, &base, Vec::new(), PassArgs::ByRef) {
385 self.select_from_expr(&base);
1a4d82fc
JJ
386 }
387 }
388
e9174d1e 389 hir::ExprField(ref base, _) => { // base.f
7453a54e 390 self.select_from_expr(&base);
1a4d82fc
JJ
391 }
392
e9174d1e 393 hir::ExprTupField(ref base, _) => { // base.<n>
7453a54e 394 self.select_from_expr(&base);
1a4d82fc
JJ
395 }
396
e9174d1e 397 hir::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
1a4d82fc 398 if !self.walk_overloaded_operator(expr,
7453a54e
SL
399 &lhs,
400 vec![&rhs],
c34b1796 401 PassArgs::ByValue) {
7453a54e
SL
402 self.select_from_expr(&lhs);
403 self.consume_expr(&rhs);
1a4d82fc
JJ
404 }
405 }
406
e9174d1e 407 hir::ExprCall(ref callee, ref args) => { // callee(args)
7453a54e 408 self.walk_callee(expr, &callee);
1a4d82fc
JJ
409 self.consume_exprs(args);
410 }
411
9e0c209e 412 hir::ExprMethodCall(.., ref args) => { // callee.m(args)
1a4d82fc
JJ
413 self.consume_exprs(args);
414 }
415
e9174d1e 416 hir::ExprStruct(_, ref fields, ref opt_with) => {
9e0c209e 417 self.walk_struct_expr(fields, opt_with);
1a4d82fc
JJ
418 }
419
e9174d1e 420 hir::ExprTup(ref exprs) => {
1a4d82fc
JJ
421 self.consume_exprs(exprs);
422 }
423
e9174d1e 424 hir::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
7453a54e
SL
425 self.consume_expr(&cond_expr);
426 self.walk_block(&then_blk);
85aaf69f 427 if let Some(ref else_expr) = *opt_else_expr {
7453a54e 428 self.consume_expr(&else_expr);
1a4d82fc
JJ
429 }
430 }
431
e9174d1e 432 hir::ExprMatch(ref discr, ref arms, _) => {
7453a54e 433 let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
9e0c209e
SL
434 let r = self.tcx().mk_region(ty::ReEmpty);
435 self.borrow_expr(&discr, r, ty::ImmBorrow, MatchDiscriminant);
1a4d82fc
JJ
436
437 // treatment of the discriminant is handled while walking the arms.
85aaf69f 438 for arm in arms {
1a4d82fc
JJ
439 let mode = self.arm_move_mode(discr_cmt.clone(), arm);
440 let mode = mode.match_mode();
441 self.walk_arm(discr_cmt.clone(), arm, mode);
442 }
443 }
444
e9174d1e 445 hir::ExprVec(ref exprs) => {
1a4d82fc
JJ
446 self.consume_exprs(exprs);
447 }
448
e9174d1e 449 hir::ExprAddrOf(m, ref base) => { // &base
1a4d82fc
JJ
450 // make sure that the thing we are pointing out stays valid
451 // for the lifetime `scope_r` of the resulting ptr:
a7813a04 452 let expr_ty = return_if_err!(self.mc.infcx.node_ty(expr.id));
9e0c209e 453 if let ty::TyRef(r, _) = expr_ty.sty {
c1a9b12d 454 let bk = ty::BorrowKind::from_mutbl(m);
7453a54e 455 self.borrow_expr(&base, r, bk, AddrOf);
c1a9b12d 456 }
1a4d82fc
JJ
457 }
458
54a0048b
SL
459 hir::ExprInlineAsm(ref ia, ref outputs, ref inputs) => {
460 for (o, output) in ia.outputs.iter().zip(outputs) {
461 if o.is_indirect {
462 self.consume_expr(output);
9cc50fc6 463 } else {
54a0048b
SL
464 self.mutate_expr(expr, output,
465 if o.is_rw {
9cc50fc6
SL
466 MutateMode::WriteAndRead
467 } else {
468 MutateMode::JustWrite
469 });
470 }
1a4d82fc 471 }
54a0048b 472 self.consume_exprs(inputs);
1a4d82fc
JJ
473 }
474
e9174d1e
SL
475 hir::ExprBreak(..) |
476 hir::ExprAgain(..) |
477 hir::ExprLit(..) => {}
1a4d82fc 478
e9174d1e 479 hir::ExprLoop(ref blk, _) => {
7453a54e 480 self.walk_block(&blk);
1a4d82fc
JJ
481 }
482
e9174d1e 483 hir::ExprWhile(ref cond_expr, ref blk, _) => {
7453a54e
SL
484 self.consume_expr(&cond_expr);
485 self.walk_block(&blk);
1a4d82fc
JJ
486 }
487
e9174d1e 488 hir::ExprUnary(op, ref lhs) => {
54a0048b 489 let pass_args = if op.is_by_value() {
1a4d82fc
JJ
490 PassArgs::ByValue
491 } else {
492 PassArgs::ByRef
493 };
494
7453a54e
SL
495 if !self.walk_overloaded_operator(expr, &lhs, Vec::new(), pass_args) {
496 self.consume_expr(&lhs);
1a4d82fc
JJ
497 }
498 }
499
e9174d1e 500 hir::ExprBinary(op, ref lhs, ref rhs) => {
54a0048b 501 let pass_args = if op.node.is_by_value() {
1a4d82fc
JJ
502 PassArgs::ByValue
503 } else {
504 PassArgs::ByRef
505 };
506
7453a54e
SL
507 if !self.walk_overloaded_operator(expr, &lhs, vec![&rhs], pass_args) {
508 self.consume_expr(&lhs);
509 self.consume_expr(&rhs);
1a4d82fc
JJ
510 }
511 }
512
e9174d1e 513 hir::ExprBlock(ref blk) => {
7453a54e 514 self.walk_block(&blk);
1a4d82fc
JJ
515 }
516
e9174d1e 517 hir::ExprRet(ref opt_expr) => {
85aaf69f 518 if let Some(ref expr) = *opt_expr {
7453a54e 519 self.consume_expr(&expr);
1a4d82fc
JJ
520 }
521 }
522
e9174d1e 523 hir::ExprAssign(ref lhs, ref rhs) => {
7453a54e
SL
524 self.mutate_expr(expr, &lhs, MutateMode::JustWrite);
525 self.consume_expr(&rhs);
1a4d82fc
JJ
526 }
527
e9174d1e 528 hir::ExprCast(ref base, _) => {
7453a54e 529 self.consume_expr(&base);
1a4d82fc
JJ
530 }
531
b039eaaf
SL
532 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
533 // NB All our assignment operations take the RHS by value
54a0048b 534 assert!(op.node.is_by_value());
b039eaaf
SL
535
536 if !self.walk_overloaded_operator(expr, lhs, vec![rhs], PassArgs::ByValue) {
7453a54e
SL
537 self.mutate_expr(expr, &lhs, MutateMode::WriteAndRead);
538 self.consume_expr(&rhs);
b039eaaf 539 }
1a4d82fc
JJ
540 }
541
e9174d1e 542 hir::ExprRepeat(ref base, ref count) => {
7453a54e
SL
543 self.consume_expr(&base);
544 self.consume_expr(&count);
1a4d82fc
JJ
545 }
546
9e0c209e 547 hir::ExprClosure(.., fn_decl_span) => {
a7813a04 548 self.walk_captures(expr, fn_decl_span)
1a4d82fc
JJ
549 }
550
b039eaaf 551 hir::ExprBox(ref base) => {
7453a54e 552 self.consume_expr(&base);
1a4d82fc 553 }
1a4d82fc
JJ
554 }
555 }
556
e9174d1e 557 fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) {
a7813a04 558 let callee_ty = return_if_err!(self.mc.infcx.expr_ty_adjusted(callee));
62682a34
SL
559 debug!("walk_callee: callee={:?} callee_ty={:?}",
560 callee, callee_ty);
1a4d82fc 561 match callee_ty.sty {
54a0048b 562 ty::TyFnDef(..) | ty::TyFnPtr(_) => {
1a4d82fc
JJ
563 self.consume_expr(callee);
564 }
62682a34 565 ty::TyError => { }
1a4d82fc
JJ
566 _ => {
567 let overloaded_call_type =
a7813a04 568 match self.mc.infcx.node_method_id(ty::MethodCall::expr(call.id)) {
c1a9b12d
SL
569 Some(method_id) => {
570 OverloadedCallType::from_method_id(self.tcx(), method_id)
1a4d82fc
JJ
571 }
572 None => {
54a0048b 573 span_bug!(
1a4d82fc 574 callee.span,
54a0048b
SL
575 "unexpected callee type {}",
576 callee_ty)
1a4d82fc
JJ
577 }
578 };
579 match overloaded_call_type {
580 FnMutOverloadedCall => {
9e0c209e 581 let call_scope_r = self.tcx().node_scope_region(call.id);
1a4d82fc 582 self.borrow_expr(callee,
9e0c209e 583 call_scope_r,
1a4d82fc
JJ
584 ty::MutBorrow,
585 ClosureInvocation);
586 }
587 FnOverloadedCall => {
9e0c209e 588 let call_scope_r = self.tcx().node_scope_region(call.id);
1a4d82fc 589 self.borrow_expr(callee,
9e0c209e 590 call_scope_r,
1a4d82fc
JJ
591 ty::ImmBorrow,
592 ClosureInvocation);
593 }
594 FnOnceOverloadedCall => self.consume_expr(callee),
595 }
596 }
597 }
598 }
599
e9174d1e 600 fn walk_stmt(&mut self, stmt: &hir::Stmt) {
1a4d82fc 601 match stmt.node {
e9174d1e 602 hir::StmtDecl(ref decl, _) => {
1a4d82fc 603 match decl.node {
e9174d1e 604 hir::DeclLocal(ref local) => {
7453a54e 605 self.walk_local(&local);
1a4d82fc
JJ
606 }
607
e9174d1e 608 hir::DeclItem(_) => {
1a4d82fc
JJ
609 // we don't visit nested items in this visitor,
610 // only the fn body we were given.
611 }
612 }
613 }
614
e9174d1e
SL
615 hir::StmtExpr(ref expr, _) |
616 hir::StmtSemi(ref expr, _) => {
7453a54e 617 self.consume_expr(&expr);
1a4d82fc 618 }
1a4d82fc
JJ
619 }
620 }
621
e9174d1e 622 fn walk_local(&mut self, local: &hir::Local) {
1a4d82fc
JJ
623 match local.init {
624 None => {
625 let delegate = &mut self.delegate;
3157f602 626 pat_util::pat_bindings(&local.pat, |_, id, span, _| {
1a4d82fc
JJ
627 delegate.decl_without_init(id, span);
628 })
629 }
630
631 Some(ref expr) => {
632 // Variable declarations with
633 // initializers are considered
634 // "assigns", which is handled by
635 // `walk_pat`:
7453a54e
SL
636 self.walk_expr(&expr);
637 let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
638 self.walk_irrefutable_pat(init_cmt, &local.pat);
1a4d82fc
JJ
639 }
640 }
641 }
642
643 /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
644 /// depending on its type.
e9174d1e 645 fn walk_block(&mut self, blk: &hir::Block) {
1a4d82fc
JJ
646 debug!("walk_block(blk.id={})", blk.id);
647
85aaf69f 648 for stmt in &blk.stmts {
92a42be0 649 self.walk_stmt(stmt);
1a4d82fc
JJ
650 }
651
85aaf69f 652 if let Some(ref tail_expr) = blk.expr {
7453a54e 653 self.consume_expr(&tail_expr);
1a4d82fc
JJ
654 }
655 }
656
657 fn walk_struct_expr(&mut self,
9cc50fc6 658 fields: &[hir::Field],
e9174d1e 659 opt_with: &Option<P<hir::Expr>>) {
1a4d82fc 660 // Consume the expressions supplying values for each field.
85aaf69f 661 for field in fields {
7453a54e 662 self.consume_expr(&field.expr);
1a4d82fc
JJ
663 }
664
665 let with_expr = match *opt_with {
666 Some(ref w) => &**w,
667 None => { return; }
668 };
669
7453a54e 670 let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
1a4d82fc
JJ
671
672 // Select just those fields of the `with`
673 // expression that will actually be used
9e0c209e
SL
674 match with_cmt.ty.sty {
675 ty::TyAdt(adt, substs) if adt.is_struct() => {
676 // Consume those fields of the with expression that are needed.
677 for with_field in &adt.struct_variant().fields {
678 if !contains_field_named(with_field, fields) {
679 let cmt_field = self.mc.cat_field(
680 &*with_expr,
681 with_cmt.clone(),
682 with_field.name,
683 with_field.ty(self.tcx(), substs)
684 );
685 self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
686 }
1a4d82fc 687 }
1a4d82fc 688 }
9e0c209e
SL
689 _ => {
690 // the base expression should always evaluate to a
691 // struct; however, when EUV is run during typeck, it
692 // may not. This will generate an error earlier in typeck,
693 // so we can just ignore it.
694 if !self.tcx().sess.has_errors() {
695 span_bug!(
696 with_expr.span,
697 "with expression doesn't evaluate to a struct");
698 }
1a4d82fc 699 }
9e0c209e 700 }
1a4d82fc
JJ
701
702 // walk the with expression so that complex expressions
703 // are properly handled.
704 self.walk_expr(with_expr);
705
e9174d1e 706 fn contains_field_named(field: ty::FieldDef,
9cc50fc6 707 fields: &[hir::Field])
1a4d82fc
JJ
708 -> bool
709 {
710 fields.iter().any(
b039eaaf 711 |f| f.name.node == field.name)
1a4d82fc
JJ
712 }
713 }
714
715 // Invoke the appropriate delegate calls for anything that gets
716 // consumed or borrowed as part of the automatic adjustment
717 // process.
e9174d1e 718 fn walk_adjustment(&mut self, expr: &hir::Expr) {
a7813a04 719 let infcx = self.mc.infcx;
c1a9b12d 720 //NOTE(@jroesch): mixed RefCell borrow causes crash
a7813a04 721 let adj = infcx.adjustments().get(&expr.id).map(|x| x.clone());
c1a9b12d
SL
722 if let Some(adjustment) = adj {
723 match adjustment {
5bcae85e 724 adjustment::AdjustNeverToAny(..) |
e9174d1e 725 adjustment::AdjustReifyFnPointer |
7453a54e
SL
726 adjustment::AdjustUnsafeFnPointer |
727 adjustment::AdjustMutToConstPointer => {
9346a6ac
AL
728 // Creating a closure/fn-pointer or unsizing consumes
729 // the input and stores it into the resulting rvalue.
7453a54e 730 debug!("walk_adjustment: trivial adjustment");
9346a6ac
AL
731 let cmt_unadjusted =
732 return_if_err!(self.mc.cat_expr_unadjusted(expr));
733 self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
734 }
e9174d1e 735 adjustment::AdjustDerefRef(ref adj) => {
9346a6ac 736 self.walk_autoderefref(expr, adj);
1a4d82fc
JJ
737 }
738 }
739 }
740 }
741
742 /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
743 /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
744 /// `deref()` is declared with `&self`, this is an autoref of `x`.
745 fn walk_autoderefs(&mut self,
e9174d1e 746 expr: &hir::Expr,
c34b1796 747 autoderefs: usize) {
62682a34 748 debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
1a4d82fc 749
85aaf69f 750 for i in 0..autoderefs {
9346a6ac 751 let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
3157f602
XL
752 if let Some(method_ty) = self.mc.infcx.node_method_ty(deref_id) {
753 let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
754
755 // the method call infrastructure should have
756 // replaced all late-bound regions with variables:
757 let self_ty = method_ty.fn_sig().input(0);
758 let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
759
760 let (m, r) = match self_ty.sty {
761 ty::TyRef(r, ref m) => (m.mutbl, r),
762 _ => span_bug!(expr.span,
763 "bad overloaded deref type {:?}",
764 method_ty)
765 };
766 let bk = ty::BorrowKind::from_mutbl(m);
767 self.delegate.borrow(expr.id, expr.span, cmt,
9e0c209e 768 r, bk, AutoRef);
1a4d82fc
JJ
769 }
770 }
771 }
772
9346a6ac 773 fn walk_autoderefref(&mut self,
e9174d1e
SL
774 expr: &hir::Expr,
775 adj: &adjustment::AutoDerefRef<'tcx>) {
62682a34
SL
776 debug!("walk_autoderefref expr={:?} adj={:?}",
777 expr,
778 adj);
9346a6ac
AL
779
780 self.walk_autoderefs(expr, adj.autoderefs);
781
782 let cmt_derefd =
783 return_if_err!(self.mc.cat_expr_autoderefd(expr, adj.autoderefs));
784
785 let cmt_refd =
786 self.walk_autoref(expr, cmt_derefd, adj.autoref);
787
788 if adj.unsize.is_some() {
789 // Unsizing consumes the thin pointer and produces a fat one.
790 self.delegate_consume(expr.id, expr.span, cmt_refd);
791 }
792 }
793
794
795 /// Walks the autoref `opt_autoref` applied to the autoderef'd
796 /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
797 /// after all relevant autoderefs have occurred. Because AutoRefs
798 /// can be recursive, this function is recursive: it first walks
799 /// deeply all the way down the autoref chain, and then processes
800 /// the autorefs on the way out. At each point, it returns the
801 /// `cmt` for the rvalue that will be produced by introduced an
802 /// autoref.
1a4d82fc 803 fn walk_autoref(&mut self,
e9174d1e 804 expr: &hir::Expr,
9346a6ac 805 cmt_base: mc::cmt<'tcx>,
e9174d1e 806 opt_autoref: Option<adjustment::AutoRef<'tcx>>)
9346a6ac
AL
807 -> mc::cmt<'tcx>
808 {
62682a34 809 debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
9346a6ac 810 expr.id,
62682a34 811 cmt_base,
9346a6ac
AL
812 opt_autoref);
813
814 let cmt_base_ty = cmt_base.ty;
815
816 let autoref = match opt_autoref {
817 Some(ref autoref) => autoref,
818 None => {
819 // No AutoRef.
820 return cmt_base;
821 }
822 };
1a4d82fc 823
9346a6ac 824 match *autoref {
e9174d1e 825 adjustment::AutoPtr(r, m) => {
1a4d82fc
JJ
826 self.delegate.borrow(expr.id,
827 expr.span,
9346a6ac 828 cmt_base,
9e0c209e 829 r,
1a4d82fc
JJ
830 ty::BorrowKind::from_mutbl(m),
831 AutoRef);
832 }
9346a6ac 833
e9174d1e 834 adjustment::AutoUnsafe(m) => {
62682a34 835 debug!("walk_autoref: expr.id={} cmt_base={:?}",
9346a6ac 836 expr.id,
62682a34 837 cmt_base);
9346a6ac
AL
838
839 // Converting from a &T to *T (or &mut T to *mut T) is
840 // treated as borrowing it for the enclosing temporary
841 // scope.
9e0c209e 842 let r = self.tcx().node_scope_region(expr.id);
9346a6ac
AL
843
844 self.delegate.borrow(expr.id,
845 expr.span,
846 cmt_base,
847 r,
848 ty::BorrowKind::from_mutbl(m),
849 AutoUnsafe);
c34b1796 850 }
1a4d82fc 851 }
9346a6ac
AL
852
853 // Construct the categorization for the result of the autoref.
854 // This is always an rvalue, since we are producing a new
855 // (temporary) indirection.
856
c1a9b12d 857 let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
9346a6ac
AL
858
859 self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
1a4d82fc
JJ
860 }
861
9346a6ac 862
c34b1796
AL
863 // When this returns true, it means that the expression *is* a
864 // method-call (i.e. via the operator-overload). This true result
865 // also implies that walk_overloaded_operator already took care of
866 // recursively processing the input arguments, and thus the caller
867 // should not do so.
1a4d82fc 868 fn walk_overloaded_operator(&mut self,
e9174d1e
SL
869 expr: &hir::Expr,
870 receiver: &hir::Expr,
871 rhs: Vec<&hir::Expr>,
1a4d82fc
JJ
872 pass_args: PassArgs)
873 -> bool
874 {
a7813a04 875 if !self.mc.infcx.is_method_call(expr.id) {
1a4d82fc
JJ
876 return false;
877 }
878
879 match pass_args {
880 PassArgs::ByValue => {
881 self.consume_expr(receiver);
85aaf69f 882 for &arg in &rhs {
1a4d82fc
JJ
883 self.consume_expr(arg);
884 }
885
886 return true;
887 },
888 PassArgs::ByRef => {},
889 }
890
891 self.walk_expr(receiver);
892
893 // Arguments (but not receivers) to overloaded operator
894 // methods are implicitly autoref'd which sadly does not use
895 // adjustments, so we must hardcode the borrow here.
896
9e0c209e 897 let r = self.tcx().node_scope_region(expr.id);
1a4d82fc
JJ
898 let bk = ty::ImmBorrow;
899
85aaf69f 900 for &arg in &rhs {
1a4d82fc
JJ
901 self.borrow_expr(arg, r, bk, OverloadedOperator);
902 }
903 return true;
904 }
905
e9174d1e 906 fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
1a4d82fc 907 let mut mode = Unknown;
85aaf69f 908 for pat in &arm.pats {
7453a54e 909 self.determine_pat_move_mode(discr_cmt.clone(), &pat, &mut mode);
1a4d82fc
JJ
910 }
911 mode
912 }
913
e9174d1e 914 fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm, mode: MatchMode) {
85aaf69f 915 for pat in &arm.pats {
7453a54e 916 self.walk_pat(discr_cmt.clone(), &pat, mode);
1a4d82fc
JJ
917 }
918
85aaf69f 919 if let Some(ref guard) = arm.guard {
7453a54e 920 self.consume_expr(&guard);
1a4d82fc
JJ
921 }
922
7453a54e 923 self.consume_expr(&arm.body);
1a4d82fc
JJ
924 }
925
b039eaaf 926 /// Walks a pat that occurs in isolation (i.e. top-level of fn
1a4d82fc 927 /// arg or let binding. *Not* a match arm or nested pat.)
e9174d1e 928 fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
1a4d82fc
JJ
929 let mut mode = Unknown;
930 self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
931 let mode = mode.match_mode();
932 self.walk_pat(cmt_discr, pat, mode);
933 }
934
935 /// Identifies any bindings within `pat` and accumulates within
936 /// `mode` whether the overall pattern/match structure is a move,
937 /// copy, or borrow.
938 fn determine_pat_move_mode(&mut self,
939 cmt_discr: mc::cmt<'tcx>,
e9174d1e 940 pat: &hir::Pat,
85aaf69f 941 mode: &mut TrackMatchMode) {
62682a34
SL
942 debug!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr,
943 pat);
1a4d82fc 944 return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
3157f602 945 match pat.node {
9e0c209e 946 PatKind::Binding(hir::BindByRef(..), ..) =>
3157f602 947 mode.lub(BorrowingMatch),
9e0c209e 948 PatKind::Binding(hir::BindByValue(..), ..) => {
3157f602
XL
949 match copy_or_move(self.mc.infcx, &cmt_pat, PatBindingMove) {
950 Copy => mode.lub(CopyingMatch),
951 Move(..) => mode.lub(MovingMatch),
1a4d82fc
JJ
952 }
953 }
3157f602 954 _ => {}
1a4d82fc
JJ
955 }
956 }));
957 }
958
959 /// The core driver for walking a pattern; `match_mode` must be
960 /// established up front, e.g. via `determine_pat_move_mode` (see
961 /// also `walk_irrefutable_pat` for patterns that stand alone).
5bcae85e
SL
962 fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
963 debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
1a4d82fc 964
3157f602 965 let tcx = &self.tcx();
1a4d82fc 966 let mc = &self.mc;
a7813a04 967 let infcx = self.mc.infcx;
1a4d82fc
JJ
968 let delegate = &mut self.delegate;
969 return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
9e0c209e 970 if let PatKind::Binding(bmode, ..) = pat.node {
5bcae85e 971 debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}", cmt_pat, pat, match_mode);
1a4d82fc 972
5bcae85e
SL
973 // pat_ty: the type of the binding being produced.
974 let pat_ty = return_if_err!(infcx.node_ty(pat.id));
975
976 // Each match binding is effectively an assignment to the
977 // binding being produced.
978 if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty,
979 tcx.expect_def(pat.id)) {
980 delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
981 }
982
983 // It is also a borrow or copy/move of the value being matched.
984 match bmode {
985 hir::BindByRef(m) => {
9e0c209e 986 if let ty::TyRef(r, _) = pat_ty.sty {
5bcae85e
SL
987 let bk = ty::BorrowKind::from_mutbl(m);
988 delegate.borrow(pat.id, pat.span, cmt_pat, r, bk, RefBinding);
c1a9b12d 989 }
1a4d82fc 990 }
5bcae85e
SL
991 hir::BindByValue(..) => {
992 let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
993 debug!("walk_pat binding consuming pat");
994 delegate.consume_pat(pat, cmt_pat, mode);
995 }
1a4d82fc
JJ
996 }
997 }
998 }));
999
1000 // Do a second pass over the pattern, calling `matched_pat` on
1001 // the interior nodes (enum variants and structs), as opposed
1002 // to the above loop's visit of than the bindings that form
1003 // the leaves of the pattern tree structure.
1004 return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
5bcae85e 1005 match tcx.expect_def_or_none(pat.id) {
9e0c209e
SL
1006 Some(Def::Variant(variant_did)) => {
1007 let enum_did = tcx.parent_def_id(variant_did).unwrap();
5bcae85e
SL
1008 let downcast_cmt = if tcx.lookup_adt_def(enum_did).is_univariant() {
1009 cmt_pat
1010 } else {
1011 let cmt_pat_ty = cmt_pat.ty;
1012 mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
1013 };
1a4d82fc 1014
5bcae85e
SL
1015 debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
1016 delegate.matched_pat(pat, downcast_cmt, match_mode);
1a4d82fc 1017 }
9e0c209e
SL
1018 Some(Def::Struct(..)) | Some(Def::Union(..)) |
1019 Some(Def::TyAlias(..)) | Some(Def::AssociatedTy(..)) => {
5bcae85e
SL
1020 debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
1021 delegate.matched_pat(pat, cmt_pat, match_mode);
1a4d82fc 1022 }
5bcae85e 1023 _ => {}
1a4d82fc
JJ
1024 }
1025 }));
1026 }
1027
a7813a04 1028 fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
62682a34 1029 debug!("walk_captures({:?})", closure_expr);
1a4d82fc 1030
c1a9b12d 1031 self.tcx().with_freevars(closure_expr.id, |freevars| {
85aaf69f 1032 for freevar in freevars {
9e0c209e
SL
1033 let def_id = freevar.def.def_id();
1034 let id_var = self.tcx().map.as_local_node_id(def_id).unwrap();
85aaf69f
SL
1035 let upvar_id = ty::UpvarId { var_id: id_var,
1036 closure_expr_id: closure_expr.id };
a7813a04 1037 let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap();
85aaf69f 1038 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
a7813a04 1039 fn_decl_span,
85aaf69f
SL
1040 freevar.def));
1041 match upvar_capture {
1042 ty::UpvarCapture::ByValue => {
a7813a04 1043 let mode = copy_or_move(self.mc.infcx, &cmt_var, CaptureMove);
85aaf69f
SL
1044 self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
1045 }
1046 ty::UpvarCapture::ByRef(upvar_borrow) => {
1047 self.delegate.borrow(closure_expr.id,
a7813a04 1048 fn_decl_span,
85aaf69f
SL
1049 cmt_var,
1050 upvar_borrow.region,
1051 upvar_borrow.kind,
1052 ClosureCapture(freevar.span));
1053 }
1a4d82fc
JJ
1054 }
1055 }
1056 });
1057 }
1058
1a4d82fc
JJ
1059 fn cat_captured_var(&mut self,
1060 closure_id: ast::NodeId,
1061 closure_span: Span,
7453a54e 1062 upvar_def: Def)
1a4d82fc
JJ
1063 -> mc::McResult<mc::cmt<'tcx>> {
1064 // Create the cmt for the variable being borrowed, from the
1065 // caller's perspective
9e0c209e 1066 let var_id = self.tcx().map.as_local_node_id(upvar_def.def_id()).unwrap();
a7813a04 1067 let var_ty = self.mc.infcx.node_ty(var_id)?;
1a4d82fc
JJ
1068 self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
1069 }
1070}
1071
a7813a04
XL
1072fn copy_or_move<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
1073 cmt: &mc::cmt<'tcx>,
1074 move_reason: MoveReason)
1075 -> ConsumeMode
1a4d82fc 1076{
a7813a04 1077 if infcx.type_moves_by_default(cmt.ty, cmt.span) {
1a4d82fc
JJ
1078 Move(move_reason)
1079 } else {
1080 Copy
1081 }
1082}