1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A different sort of visitor for walking fn bodies. Unlike the
12 //! normal visitor, which just walks the entire body in one shot, the
13 //! `ExprUseVisitor` determines how expressions are being used.
15 pub use self::LoanCause
::*;
16 pub use self::ConsumeMode
::*;
17 pub use self::MoveReason
::*;
18 pub use self::MatchMode
::*;
19 use self::TrackMatchMode
::*;
20 use self::OverloadedCallType
::*;
22 use middle
::{def, pat_util}
;
23 use middle
::def_id
::{DefId}
;
25 use middle
::mem_categorization
as mc
;
27 use middle
::ty
::adjustment
;
33 use syntax
::codemap
::Span
;
35 ///////////////////////////////////////////////////////////////////////////
38 /// This trait defines the callbacks you can expect to receive when
39 /// employing the ExprUseVisitor.
40 pub trait Delegate
<'tcx
> {
41 // The value found at `cmt` is either copied or moved, depending
44 consume_id
: ast
::NodeId
,
49 // The value found at `cmt` has been determined to match the
50 // pattern binding `matched_pat`, and its subparts are being
51 // copied or moved depending on `mode`. Note that `matched_pat`
52 // is called on all variant/structs in the pattern (i.e., the
53 // interior nodes of the pattern's tree structure) while
54 // consume_pat is called on the binding identifiers in the pattern
55 // (which are leaves of the pattern's tree structure).
57 // Note that variants/structs and identifiers are disjoint; thus
58 // `matched_pat` and `consume_pat` are never both called on the
59 // same input pattern structure (though of `consume_pat` can be
60 // called on a subpart of an input passed to `matched_pat).
61 fn matched_pat(&mut self,
62 matched_pat
: &hir
::Pat
,
66 // The value found at `cmt` is either copied or moved via the
67 // pattern binding `consume_pat`, depending on mode.
68 fn consume_pat(&mut self,
69 consume_pat
: &hir
::Pat
,
73 // The value found at `borrow` is being borrowed at the point
74 // `borrow_id` for the region `loan_region` with kind `bk`.
76 borrow_id
: ast
::NodeId
,
79 loan_region
: ty
::Region
,
81 loan_cause
: LoanCause
);
83 // The local variable `id` is declared but not initialized.
84 fn decl_without_init(&mut self,
88 // The path at `cmt` is being assigned to.
90 assignment_id
: ast
::NodeId
,
91 assignment_span
: Span
,
92 assignee_cmt
: mc
::cmt
<'tcx
>,
96 #[derive(Copy, Clone, PartialEq, Debug)]
109 #[derive(Copy, Clone, PartialEq, Debug)]
110 pub enum ConsumeMode
{
111 Copy
, // reference to x where x has a type that copies
112 Move(MoveReason
), // reference to x where x has a type that moves
115 #[derive(Copy, Clone, PartialEq, Debug)]
116 pub enum MoveReason
{
122 #[derive(Copy, Clone, PartialEq, Debug)]
130 #[derive(Copy, Clone, PartialEq, Debug)]
131 enum TrackMatchMode
{
137 impl TrackMatchMode
{
138 // Builds up the whole match mode for a pattern from its constituent
139 // parts. The lattice looks like this:
155 // * `(_, some_int)` pattern is Copying, since
156 // NonBinding + Copying => Copying
158 // * `(some_int, some_box)` pattern is Moving, since
159 // Copying + Moving => Moving
161 // * `(ref x, some_box)` pattern is Conflicting, since
162 // Borrowing + Moving => Conflicting
164 // Note that the `Unknown` and `Conflicting` states are
165 // represented separately from the other more interesting
166 // `Definite` states, which simplifies logic here somewhat.
167 fn lub(&mut self, mode
: MatchMode
) {
168 *self = match (*self, mode
) {
169 // Note that clause order below is very significant.
170 (Unknown
, new
) => Definite(new
),
171 (Definite(old
), new
) if old
== new
=> Definite(old
),
173 (Definite(old
), NonBindingMatch
) => Definite(old
),
174 (Definite(NonBindingMatch
), new
) => Definite(new
),
176 (Definite(old
), CopyingMatch
) => Definite(old
),
177 (Definite(CopyingMatch
), new
) => Definite(new
),
179 (Definite(_
), _
) => Conflicting
,
180 (Conflicting
, _
) => *self,
184 fn match_mode(&self) -> MatchMode
{
186 Unknown
=> NonBindingMatch
,
187 Definite(mode
) => mode
,
189 // Conservatively return MovingMatch to let the
190 // compiler continue to make progress.
197 #[derive(Copy, Clone, PartialEq, Debug)]
198 pub enum MutateMode
{
201 WriteAndRead
, // x += y
204 #[derive(Copy, Clone)]
205 enum OverloadedCallType
{
208 FnOnceOverloadedCall
,
211 impl OverloadedCallType
{
212 fn from_trait_id(tcx
: &ty
::ctxt
, trait_id
: DefId
)
213 -> OverloadedCallType
{
214 for &(maybe_function_trait
, overloaded_call_type
) in &[
215 (tcx
.lang_items
.fn_once_trait(), FnOnceOverloadedCall
),
216 (tcx
.lang_items
.fn_mut_trait(), FnMutOverloadedCall
),
217 (tcx
.lang_items
.fn_trait(), FnOverloadedCall
)
219 match maybe_function_trait
{
220 Some(function_trait
) if function_trait
== trait_id
=> {
221 return overloaded_call_type
227 tcx
.sess
.bug("overloaded call didn't map to known function trait")
230 fn from_method_id(tcx
: &ty
::ctxt
, method_id
: DefId
)
231 -> OverloadedCallType
{
232 let method
= tcx
.impl_or_trait_item(method_id
);
233 OverloadedCallType
::from_trait_id(tcx
, method
.container().id())
237 ///////////////////////////////////////////////////////////////////////////
238 // The ExprUseVisitor type
240 // This is the code that actually walks the tree. Like
241 // mem_categorization, it requires a TYPER, which is a type that
242 // supplies types from the tree. After type checking is complete, you
243 // can just use the tcx as the typer.
244 pub struct ExprUseVisitor
<'d
, 't
, 'a
: 't
, 'tcx
:'a
+'d
> {
245 typer
: &'t infer
::InferCtxt
<'a
, 'tcx
>,
246 mc
: mc
::MemCategorizationContext
<'t
, 'a
, 'tcx
>,
247 delegate
: &'d
mut Delegate
<'tcx
>,
250 // If the TYPER results in an error, it's because the type check
251 // failed (or will fail, when the error is uncovered and reported
252 // during writeback). In this case, we just ignore this part of the
255 // Note that this macro appears similar to try!(), but, unlike try!(),
256 // it does not propagate the error.
257 macro_rules
! return_if_err
{
262 debug
!("mc reported err");
269 /// Whether the elements of an overloaded operation are passed by value or by reference
275 impl<'d
,'t
,'a
,'tcx
> ExprUseVisitor
<'d
,'t
,'a
,'tcx
> {
276 pub fn new(delegate
: &'d
mut (Delegate
<'tcx
>+'d
),
277 typer
: &'t infer
::InferCtxt
<'a
, 'tcx
>)
278 -> ExprUseVisitor
<'d
,'t
,'a
,'tcx
> where 'tcx
:'a
+'d
280 let mc
: mc
::MemCategorizationContext
<'t
, 'a
, 'tcx
> =
281 mc
::MemCategorizationContext
::new(typer
);
282 ExprUseVisitor { typer: typer, mc: mc, delegate: delegate }
285 pub fn walk_fn(&mut self,
288 self.walk_arg_patterns(decl
, body
);
289 self.walk_block(body
);
292 fn walk_arg_patterns(&mut self,
295 for arg
in &decl
.inputs
{
296 let arg_ty
= return_if_err
!(self.typer
.node_ty(arg
.pat
.id
));
298 let fn_body_scope
= self.tcx().region_maps
.node_extent(body
.id
);
299 let arg_cmt
= self.mc
.cat_rvalue(
302 ty
::ReScope(fn_body_scope
), // Args live only as long as the fn body.
305 self.walk_irrefutable_pat(arg_cmt
, &*arg
.pat
);
309 fn tcx(&self) -> &'t ty
::ctxt
<'tcx
> {
313 fn delegate_consume(&mut self,
314 consume_id
: ast
::NodeId
,
316 cmt
: mc
::cmt
<'tcx
>) {
317 debug
!("delegate_consume(consume_id={}, cmt={:?})",
320 let mode
= copy_or_move(self.typer
, &cmt
, DirectRefMove
);
321 self.delegate
.consume(consume_id
, consume_span
, cmt
, mode
);
324 fn consume_exprs(&mut self, exprs
: &[P
<hir
::Expr
>]) {
326 self.consume_expr(&**expr
);
330 pub fn consume_expr(&mut self, expr
: &hir
::Expr
) {
331 debug
!("consume_expr(expr={:?})", expr
);
333 let cmt
= return_if_err
!(self.mc
.cat_expr(expr
));
334 self.delegate_consume(expr
.id
, expr
.span
, cmt
);
335 self.walk_expr(expr
);
338 fn mutate_expr(&mut self,
339 assignment_expr
: &hir
::Expr
,
342 let cmt
= return_if_err
!(self.mc
.cat_expr(expr
));
343 self.delegate
.mutate(assignment_expr
.id
, assignment_expr
.span
, cmt
, mode
);
344 self.walk_expr(expr
);
347 fn borrow_expr(&mut self,
352 debug
!("borrow_expr(expr={:?}, r={:?}, bk={:?})",
355 let cmt
= return_if_err
!(self.mc
.cat_expr(expr
));
356 self.delegate
.borrow(expr
.id
, expr
.span
, cmt
, r
, bk
, cause
);
361 fn select_from_expr(&mut self, expr
: &hir
::Expr
) {
365 pub fn walk_expr(&mut self, expr
: &hir
::Expr
) {
366 debug
!("walk_expr(expr={:?})", expr
);
368 self.walk_adjustment(expr
);
371 hir
::ExprPath(..) => { }
373 hir
::ExprType(ref subexpr
, _
) => {
374 self.walk_expr(&**subexpr
)
377 hir
::ExprUnary(hir
::UnDeref
, ref base
) => { // *base
378 if !self.walk_overloaded_operator(expr
, &**base
, Vec
::new(), PassArgs
::ByRef
) {
379 self.select_from_expr(&**base
);
383 hir
::ExprField(ref base
, _
) => { // base.f
384 self.select_from_expr(&**base
);
387 hir
::ExprTupField(ref base
, _
) => { // base.<n>
388 self.select_from_expr(&**base
);
391 hir
::ExprIndex(ref lhs
, ref rhs
) => { // lhs[rhs]
392 if !self.walk_overloaded_operator(expr
,
396 self.select_from_expr(&**lhs
);
397 self.consume_expr(&**rhs
);
401 hir
::ExprRange(ref start
, ref end
) => {
402 start
.as_ref().map(|e
| self.consume_expr(&**e
));
403 end
.as_ref().map(|e
| self.consume_expr(&**e
));
406 hir
::ExprCall(ref callee
, ref args
) => { // callee(args)
407 self.walk_callee(expr
, &**callee
);
408 self.consume_exprs(args
);
411 hir
::ExprMethodCall(_
, _
, ref args
) => { // callee.m(args)
412 self.consume_exprs(args
);
415 hir
::ExprStruct(_
, ref fields
, ref opt_with
) => {
416 self.walk_struct_expr(expr
, fields
, opt_with
);
419 hir
::ExprTup(ref exprs
) => {
420 self.consume_exprs(exprs
);
423 hir
::ExprIf(ref cond_expr
, ref then_blk
, ref opt_else_expr
) => {
424 self.consume_expr(&**cond_expr
);
425 self.walk_block(&**then_blk
);
426 if let Some(ref else_expr
) = *opt_else_expr
{
427 self.consume_expr(&**else_expr
);
431 hir
::ExprMatch(ref discr
, ref arms
, _
) => {
432 let discr_cmt
= return_if_err
!(self.mc
.cat_expr(&**discr
));
433 self.borrow_expr(&**discr
, ty
::ReEmpty
, ty
::ImmBorrow
, MatchDiscriminant
);
435 // treatment of the discriminant is handled while walking the arms.
437 let mode
= self.arm_move_mode(discr_cmt
.clone(), arm
);
438 let mode
= mode
.match_mode();
439 self.walk_arm(discr_cmt
.clone(), arm
, mode
);
443 hir
::ExprVec(ref exprs
) => {
444 self.consume_exprs(exprs
);
447 hir
::ExprAddrOf(m
, ref base
) => { // &base
448 // make sure that the thing we are pointing out stays valid
449 // for the lifetime `scope_r` of the resulting ptr:
450 let expr_ty
= return_if_err
!(self.typer
.node_ty(expr
.id
));
451 if let ty
::TyRef(&r
, _
) = expr_ty
.sty
{
452 let bk
= ty
::BorrowKind
::from_mutbl(m
);
453 self.borrow_expr(&**base
, r
, bk
, AddrOf
);
457 hir
::ExprInlineAsm(ref ia
) => {
458 for &(_
, ref input
) in &ia
.inputs
{
459 self.consume_expr(&**input
);
462 for output
in &ia
.outputs
{
463 if output
.is_indirect
{
464 self.consume_expr(&*output
.expr
);
466 self.mutate_expr(expr
, &*output
.expr
,
468 MutateMode
::WriteAndRead
470 MutateMode
::JustWrite
478 hir
::ExprLit(..) => {}
480 hir
::ExprLoop(ref blk
, _
) => {
481 self.walk_block(&**blk
);
484 hir
::ExprWhile(ref cond_expr
, ref blk
, _
) => {
485 self.consume_expr(&**cond_expr
);
486 self.walk_block(&**blk
);
489 hir
::ExprUnary(op
, ref lhs
) => {
490 let pass_args
= if ::rustc_front
::util
::is_by_value_unop(op
) {
496 if !self.walk_overloaded_operator(expr
, &**lhs
, Vec
::new(), pass_args
) {
497 self.consume_expr(&**lhs
);
501 hir
::ExprBinary(op
, ref lhs
, ref rhs
) => {
502 let pass_args
= if ::rustc_front
::util
::is_by_value_binop(op
.node
) {
508 if !self.walk_overloaded_operator(expr
, &**lhs
, vec
![&**rhs
], pass_args
) {
509 self.consume_expr(&**lhs
);
510 self.consume_expr(&**rhs
);
514 hir
::ExprBlock(ref blk
) => {
515 self.walk_block(&**blk
);
518 hir
::ExprRet(ref opt_expr
) => {
519 if let Some(ref expr
) = *opt_expr
{
520 self.consume_expr(&**expr
);
524 hir
::ExprAssign(ref lhs
, ref rhs
) => {
525 self.mutate_expr(expr
, &**lhs
, MutateMode
::JustWrite
);
526 self.consume_expr(&**rhs
);
529 hir
::ExprCast(ref base
, _
) => {
530 self.consume_expr(&**base
);
533 hir
::ExprAssignOp(op
, ref lhs
, ref rhs
) => {
534 // NB All our assignment operations take the RHS by value
535 assert
!(::rustc_front
::util
::is_by_value_binop(op
.node
));
537 if !self.walk_overloaded_operator(expr
, lhs
, vec
![rhs
], PassArgs
::ByValue
) {
538 self.mutate_expr(expr
, &**lhs
, MutateMode
::WriteAndRead
);
539 self.consume_expr(&**rhs
);
543 hir
::ExprRepeat(ref base
, ref count
) => {
544 self.consume_expr(&**base
);
545 self.consume_expr(&**count
);
548 hir
::ExprClosure(..) => {
549 self.walk_captures(expr
)
552 hir
::ExprBox(ref base
) => {
553 self.consume_expr(&**base
);
558 fn walk_callee(&mut self, call
: &hir
::Expr
, callee
: &hir
::Expr
) {
559 let callee_ty
= return_if_err
!(self.typer
.expr_ty_adjusted(callee
));
560 debug
!("walk_callee: callee={:?} callee_ty={:?}",
562 let call_scope
= self.tcx().region_maps
.node_extent(call
.id
);
563 match callee_ty
.sty
{
564 ty
::TyBareFn(..) => {
565 self.consume_expr(callee
);
569 let overloaded_call_type
=
570 match self.typer
.node_method_id(ty
::MethodCall
::expr(call
.id
)) {
572 OverloadedCallType
::from_method_id(self.tcx(), method_id
)
575 self.tcx().sess
.span_bug(
577 &format
!("unexpected callee type {}", callee_ty
))
580 match overloaded_call_type
{
581 FnMutOverloadedCall
=> {
582 self.borrow_expr(callee
,
583 ty
::ReScope(call_scope
),
587 FnOverloadedCall
=> {
588 self.borrow_expr(callee
,
589 ty
::ReScope(call_scope
),
593 FnOnceOverloadedCall
=> self.consume_expr(callee
),
599 fn walk_stmt(&mut self, stmt
: &hir
::Stmt
) {
601 hir
::StmtDecl(ref decl
, _
) => {
603 hir
::DeclLocal(ref local
) => {
604 self.walk_local(&**local
);
607 hir
::DeclItem(_
) => {
608 // we don't visit nested items in this visitor,
609 // only the fn body we were given.
614 hir
::StmtExpr(ref expr
, _
) |
615 hir
::StmtSemi(ref expr
, _
) => {
616 self.consume_expr(&**expr
);
621 fn walk_local(&mut self, local
: &hir
::Local
) {
624 let delegate
= &mut self.delegate
;
625 pat_util
::pat_bindings(&self.typer
.tcx
.def_map
, &*local
.pat
,
627 delegate
.decl_without_init(id
, span
);
632 // Variable declarations with
633 // initializers are considered
634 // "assigns", which is handled by
636 self.walk_expr(&**expr
);
637 let init_cmt
= return_if_err
!(self.mc
.cat_expr(&**expr
));
638 self.walk_irrefutable_pat(init_cmt
, &*local
.pat
);
643 /// Indicates that the value of `blk` will be consumed, meaning either copied or moved
644 /// depending on its type.
645 fn walk_block(&mut self, blk
: &hir
::Block
) {
646 debug
!("walk_block(blk.id={})", blk
.id
);
648 for stmt
in &blk
.stmts
{
649 self.walk_stmt(stmt
);
652 if let Some(ref tail_expr
) = blk
.expr
{
653 self.consume_expr(&**tail_expr
);
657 fn walk_struct_expr(&mut self,
659 fields
: &[hir
::Field
],
660 opt_with
: &Option
<P
<hir
::Expr
>>) {
661 // Consume the expressions supplying values for each field.
662 for field
in fields
{
663 self.consume_expr(&*field
.expr
);
666 let with_expr
= match *opt_with
{
671 let with_cmt
= return_if_err
!(self.mc
.cat_expr(&*with_expr
));
673 // Select just those fields of the `with`
674 // expression that will actually be used
675 if let ty
::TyStruct(def
, substs
) = with_cmt
.ty
.sty
{
676 // Consume those fields of the with expression that are needed.
677 for with_field
in &def
.struct_variant().fields
{
678 if !contains_field_named(with_field
, fields
) {
679 let cmt_field
= self.mc
.cat_field(
683 with_field
.ty(self.tcx(), substs
)
685 self.delegate_consume(with_expr
.id
, with_expr
.span
, cmt_field
);
689 // the base expression should always evaluate to a
690 // struct; however, when EUV is run during typeck, it
691 // may not. This will generate an error earlier in typeck,
692 // so we can just ignore it.
693 if !self.tcx().sess
.has_errors() {
694 self.tcx().sess
.span_bug(
696 "with expression doesn't evaluate to a struct");
700 // walk the with expression so that complex expressions
701 // are properly handled.
702 self.walk_expr(with_expr
);
704 fn contains_field_named(field
: ty
::FieldDef
,
705 fields
: &[hir
::Field
])
709 |f
| f
.name
.node
== field
.name
)
713 // Invoke the appropriate delegate calls for anything that gets
714 // consumed or borrowed as part of the automatic adjustment
716 fn walk_adjustment(&mut self, expr
: &hir
::Expr
) {
717 let typer
= self.typer
;
718 //NOTE(@jroesch): mixed RefCell borrow causes crash
719 let adj
= typer
.adjustments().get(&expr
.id
).map(|x
| x
.clone());
720 if let Some(adjustment
) = adj
{
722 adjustment
::AdjustReifyFnPointer
|
723 adjustment
::AdjustUnsafeFnPointer
=> {
724 // Creating a closure/fn-pointer or unsizing consumes
725 // the input and stores it into the resulting rvalue.
726 debug
!("walk_adjustment(AdjustReifyFnPointer|AdjustUnsafeFnPointer)");
728 return_if_err
!(self.mc
.cat_expr_unadjusted(expr
));
729 self.delegate_consume(expr
.id
, expr
.span
, cmt_unadjusted
);
731 adjustment
::AdjustDerefRef(ref adj
) => {
732 self.walk_autoderefref(expr
, adj
);
738 /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
739 /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
740 /// `deref()` is declared with `&self`, this is an autoref of `x`.
741 fn walk_autoderefs(&mut self,
744 debug
!("walk_autoderefs expr={:?} autoderefs={}", expr
, autoderefs
);
746 for i
in 0..autoderefs
{
747 let deref_id
= ty
::MethodCall
::autoderef(expr
.id
, i
as u32);
748 match self.typer
.node_method_ty(deref_id
) {
751 let cmt
= return_if_err
!(self.mc
.cat_expr_autoderefd(expr
, i
));
753 // the method call infrastructure should have
754 // replaced all late-bound regions with variables:
755 let self_ty
= method_ty
.fn_sig().input(0);
756 let self_ty
= self.tcx().no_late_bound_regions(&self_ty
).unwrap();
758 let (m
, r
) = match self_ty
.sty
{
759 ty
::TyRef(r
, ref m
) => (m
.mutbl
, r
),
760 _
=> self.tcx().sess
.span_bug(expr
.span
,
761 &format
!("bad overloaded deref type {:?}",
764 let bk
= ty
::BorrowKind
::from_mutbl(m
);
765 self.delegate
.borrow(expr
.id
, expr
.span
, cmt
,
772 fn walk_autoderefref(&mut self,
774 adj
: &adjustment
::AutoDerefRef
<'tcx
>) {
775 debug
!("walk_autoderefref expr={:?} adj={:?}",
779 self.walk_autoderefs(expr
, adj
.autoderefs
);
782 return_if_err
!(self.mc
.cat_expr_autoderefd(expr
, adj
.autoderefs
));
785 self.walk_autoref(expr
, cmt_derefd
, adj
.autoref
);
787 if adj
.unsize
.is_some() {
788 // Unsizing consumes the thin pointer and produces a fat one.
789 self.delegate_consume(expr
.id
, expr
.span
, cmt_refd
);
794 /// Walks the autoref `opt_autoref` applied to the autoderef'd
795 /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
796 /// after all relevant autoderefs have occurred. Because AutoRefs
797 /// can be recursive, this function is recursive: it first walks
798 /// deeply all the way down the autoref chain, and then processes
799 /// the autorefs on the way out. At each point, it returns the
800 /// `cmt` for the rvalue that will be produced by introduced an
802 fn walk_autoref(&mut self,
804 cmt_base
: mc
::cmt
<'tcx
>,
805 opt_autoref
: Option
<adjustment
::AutoRef
<'tcx
>>)
808 debug
!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
813 let cmt_base_ty
= cmt_base
.ty
;
815 let autoref
= match opt_autoref
{
816 Some(ref autoref
) => autoref
,
824 adjustment
::AutoPtr(r
, m
) => {
825 self.delegate
.borrow(expr
.id
,
829 ty
::BorrowKind
::from_mutbl(m
),
833 adjustment
::AutoUnsafe(m
) => {
834 debug
!("walk_autoref: expr.id={} cmt_base={:?}",
838 // Converting from a &T to *T (or &mut T to *mut T) is
839 // treated as borrowing it for the enclosing temporary
841 let r
= ty
::ReScope(self.tcx().region_maps
.node_extent(expr
.id
));
843 self.delegate
.borrow(expr
.id
,
847 ty
::BorrowKind
::from_mutbl(m
),
852 // Construct the categorization for the result of the autoref.
853 // This is always an rvalue, since we are producing a new
854 // (temporary) indirection.
856 let adj_ty
= cmt_base_ty
.adjust_for_autoref(self.tcx(), opt_autoref
);
858 self.mc
.cat_rvalue_node(expr
.id
, expr
.span
, adj_ty
)
862 // When this returns true, it means that the expression *is* a
863 // method-call (i.e. via the operator-overload). This true result
864 // also implies that walk_overloaded_operator already took care of
865 // recursively processing the input arguments, and thus the caller
867 fn walk_overloaded_operator(&mut self,
869 receiver
: &hir
::Expr
,
870 rhs
: Vec
<&hir
::Expr
>,
874 if !self.typer
.is_method_call(expr
.id
) {
879 PassArgs
::ByValue
=> {
880 self.consume_expr(receiver
);
882 self.consume_expr(arg
);
887 PassArgs
::ByRef
=> {}
,
890 self.walk_expr(receiver
);
892 // Arguments (but not receivers) to overloaded operator
893 // methods are implicitly autoref'd which sadly does not use
894 // adjustments, so we must hardcode the borrow here.
896 let r
= ty
::ReScope(self.tcx().region_maps
.node_extent(expr
.id
));
897 let bk
= ty
::ImmBorrow
;
900 self.borrow_expr(arg
, r
, bk
, OverloadedOperator
);
905 fn arm_move_mode(&mut self, discr_cmt
: mc
::cmt
<'tcx
>, arm
: &hir
::Arm
) -> TrackMatchMode
{
906 let mut mode
= Unknown
;
907 for pat
in &arm
.pats
{
908 self.determine_pat_move_mode(discr_cmt
.clone(), &**pat
, &mut mode
);
913 fn walk_arm(&mut self, discr_cmt
: mc
::cmt
<'tcx
>, arm
: &hir
::Arm
, mode
: MatchMode
) {
914 for pat
in &arm
.pats
{
915 self.walk_pat(discr_cmt
.clone(), &**pat
, mode
);
918 if let Some(ref guard
) = arm
.guard
{
919 self.consume_expr(&**guard
);
922 self.consume_expr(&*arm
.body
);
925 /// Walks a pat that occurs in isolation (i.e. top-level of fn
926 /// arg or let binding. *Not* a match arm or nested pat.)
927 fn walk_irrefutable_pat(&mut self, cmt_discr
: mc
::cmt
<'tcx
>, pat
: &hir
::Pat
) {
928 let mut mode
= Unknown
;
929 self.determine_pat_move_mode(cmt_discr
.clone(), pat
, &mut mode
);
930 let mode
= mode
.match_mode();
931 self.walk_pat(cmt_discr
, pat
, mode
);
934 /// Identifies any bindings within `pat` and accumulates within
935 /// `mode` whether the overall pattern/match structure is a move,
937 fn determine_pat_move_mode(&mut self,
938 cmt_discr
: mc
::cmt
<'tcx
>,
940 mode
: &mut TrackMatchMode
) {
941 debug
!("determine_pat_move_mode cmt_discr={:?} pat={:?}", cmt_discr
,
943 return_if_err
!(self.mc
.cat_pattern(cmt_discr
, pat
, |_mc
, cmt_pat
, pat
| {
944 let tcx
= self.tcx();
945 let def_map
= &self.tcx().def_map
;
946 if pat_util
::pat_is_binding(&def_map
.borrow(), pat
) {
948 hir
::PatIdent(hir
::BindByRef(_
), _
, _
) =>
949 mode
.lub(BorrowingMatch
),
950 hir
::PatIdent(hir
::BindByValue(_
), _
, _
) => {
951 match copy_or_move(self.typer
, &cmt_pat
, PatBindingMove
) {
952 Copy
=> mode
.lub(CopyingMatch
),
953 Move(_
) => mode
.lub(MovingMatch
),
959 "binding pattern not an identifier");
966 /// The core driver for walking a pattern; `match_mode` must be
967 /// established up front, e.g. via `determine_pat_move_mode` (see
968 /// also `walk_irrefutable_pat` for patterns that stand alone).
969 fn walk_pat(&mut self,
970 cmt_discr
: mc
::cmt
<'tcx
>,
972 match_mode
: MatchMode
) {
973 debug
!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr
,
977 let typer
= self.typer
;
978 let def_map
= &self.tcx().def_map
;
979 let delegate
= &mut self.delegate
;
980 return_if_err
!(mc
.cat_pattern(cmt_discr
.clone(), pat
, |mc
, cmt_pat
, pat
| {
981 if pat_util
::pat_is_binding(&def_map
.borrow(), pat
) {
984 debug
!("binding cmt_pat={:?} pat={:?} match_mode={:?}",
989 // pat_ty: the type of the binding being produced.
990 let pat_ty
= return_if_err
!(typer
.node_ty(pat
.id
));
992 // Each match binding is effectively an assignment to the
993 // binding being produced.
994 let def
= def_map
.borrow().get(&pat
.id
).unwrap().full_def();
995 match mc
.cat_def(pat
.id
, pat
.span
, pat_ty
, def
) {
997 delegate
.mutate(pat
.id
, pat
.span
, binding_cmt
, MutateMode
::Init
);
1002 // It is also a borrow or copy/move of the value being matched.
1004 hir
::PatIdent(hir
::BindByRef(m
), _
, _
) => {
1005 if let ty
::TyRef(&r
, _
) = pat_ty
.sty
{
1006 let bk
= ty
::BorrowKind
::from_mutbl(m
);
1007 delegate
.borrow(pat
.id
, pat
.span
, cmt_pat
,
1011 hir
::PatIdent(hir
::BindByValue(_
), _
, _
) => {
1012 let mode
= copy_or_move(typer
, &cmt_pat
, PatBindingMove
);
1013 debug
!("walk_pat binding consuming pat");
1014 delegate
.consume_pat(pat
, cmt_pat
, mode
);
1019 "binding pattern not an identifier");
1024 hir
::PatVec(_
, Some(ref slice_pat
), _
) => {
1025 // The `slice_pat` here creates a slice into
1026 // the original vector. This is effectively a
1027 // borrow of the elements of the vector being
1030 let (slice_cmt
, slice_mutbl
, slice_r
) =
1031 return_if_err
!(mc
.cat_slice_pattern(cmt_pat
, &**slice_pat
));
1033 // Note: We declare here that the borrow
1034 // occurs upon entering the `[...]`
1035 // pattern. This implies that something like
1036 // `[a; b]` where `a` is a move is illegal,
1037 // because the borrow is already in effect.
1038 // In fact such a move would be safe-ish, but
1039 // it effectively *requires* that we use the
1040 // nulling out semantics to indicate when a
1041 // value has been moved, which we are trying
1042 // to move away from. Otherwise, how can we
1043 // indicate that the first element in the
1044 // vector has been moved? Eventually, we
1045 // could perhaps modify this rule to permit
1046 // `[..a, b]` where `b` is a move, because in
1047 // that case we can adjust the length of the
1048 // original vec accordingly, but we'd have to
1049 // make trans do the right thing, and it would
1050 // only work for `Box<[T]>`s. It seems simpler
1051 // to just require that people call
1052 // `vec.pop()` or `vec.unshift()`.
1053 let slice_bk
= ty
::BorrowKind
::from_mutbl(slice_mutbl
);
1054 delegate
.borrow(pat
.id
, pat
.span
,
1056 slice_bk
, RefBinding
);
1063 // Do a second pass over the pattern, calling `matched_pat` on
1064 // the interior nodes (enum variants and structs), as opposed
1065 // to the above loop's visit of than the bindings that form
1066 // the leaves of the pattern tree structure.
1067 return_if_err
!(mc
.cat_pattern(cmt_discr
, pat
, |mc
, cmt_pat
, pat
| {
1068 let def_map
= def_map
.borrow();
1069 let tcx
= typer
.tcx
;
1072 hir
::PatEnum(_
, _
) | hir
::PatQPath(..) |
1073 hir
::PatIdent(_
, _
, None
) | hir
::PatStruct(..) => {
1074 match def_map
.get(&pat
.id
).map(|d
| d
.full_def()) {
1076 // no definition found: pat is not a
1077 // struct or enum pattern.
1080 Some(def
::DefVariant(enum_did
, variant_did
, _is_struct
)) => {
1082 if tcx
.lookup_adt_def(enum_did
).is_univariant() {
1085 let cmt_pat_ty
= cmt_pat
.ty
;
1086 mc
.cat_downcast(pat
, cmt_pat
, cmt_pat_ty
, variant_did
)
1089 debug
!("variant downcast_cmt={:?} pat={:?}",
1093 delegate
.matched_pat(pat
, downcast_cmt
, match_mode
);
1096 Some(def
::DefStruct(..)) | Some(def
::DefTy(_
, false)) => {
1097 // A struct (in either the value or type
1098 // namespace; we encounter the former on
1099 // e.g. patterns for unit structs).
1101 debug
!("struct cmt_pat={:?} pat={:?}",
1105 delegate
.matched_pat(pat
, cmt_pat
, match_mode
);
1108 Some(def
::DefConst(..)) |
1109 Some(def
::DefAssociatedConst(..)) |
1110 Some(def
::DefLocal(..)) => {
1111 // This is a leaf (i.e. identifier binding
1112 // or constant value to match); thus no
1113 // `matched_pat` call.
1116 Some(def @ def
::DefTy(_
, true)) => {
1117 // An enum's type -- should never be in a
1120 if !tcx
.sess
.has_errors() {
1121 let msg
= format
!("Pattern has unexpected type: {:?} and type {:?}",
1124 tcx
.sess
.span_bug(pat
.span
, &msg
)
1129 // Remaining cases are e.g. DefFn, to
1130 // which identifiers within patterns
1131 // should not resolve. However, we do
1132 // encouter this when using the
1133 // expr-use-visitor during typeck. So just
1134 // ignore it, an error should have been
1137 if !tcx
.sess
.has_errors() {
1138 let msg
= format
!("Pattern has unexpected def: {:?} and type {:?}",
1141 tcx
.sess
.span_bug(pat
.span
, &msg
[..])
1147 hir
::PatIdent(_
, _
, Some(_
)) => {
1148 // Do nothing; this is a binding (not an enum
1149 // variant or struct), and the cat_pattern call
1150 // will visit the substructure recursively.
1153 hir
::PatWild
| hir
::PatTup(..) | hir
::PatBox(..) |
1154 hir
::PatRegion(..) | hir
::PatLit(..) | hir
::PatRange(..) |
1155 hir
::PatVec(..) => {
1156 // Similarly, each of these cases does not
1157 // correspond to an enum variant or struct, so we
1158 // do not do any `matched_pat` calls for these
1165 fn walk_captures(&mut self, closure_expr
: &hir
::Expr
) {
1166 debug
!("walk_captures({:?})", closure_expr
);
1168 self.tcx().with_freevars(closure_expr
.id
, |freevars
| {
1169 for freevar
in freevars
{
1170 let id_var
= freevar
.def
.var_id();
1171 let upvar_id
= ty
::UpvarId
{ var_id
: id_var
,
1172 closure_expr_id
: closure_expr
.id
};
1173 let upvar_capture
= self.typer
.upvar_capture(upvar_id
).unwrap();
1174 let cmt_var
= return_if_err
!(self.cat_captured_var(closure_expr
.id
,
1177 match upvar_capture
{
1178 ty
::UpvarCapture
::ByValue
=> {
1179 let mode
= copy_or_move(self.typer
, &cmt_var
, CaptureMove
);
1180 self.delegate
.consume(closure_expr
.id
, freevar
.span
, cmt_var
, mode
);
1182 ty
::UpvarCapture
::ByRef(upvar_borrow
) => {
1183 self.delegate
.borrow(closure_expr
.id
,
1186 upvar_borrow
.region
,
1188 ClosureCapture(freevar
.span
));
1195 fn cat_captured_var(&mut self,
1196 closure_id
: ast
::NodeId
,
1198 upvar_def
: def
::Def
)
1199 -> mc
::McResult
<mc
::cmt
<'tcx
>> {
1200 // Create the cmt for the variable being borrowed, from the
1201 // caller's perspective
1202 let var_id
= upvar_def
.var_id();
1203 let var_ty
= try
!(self.typer
.node_ty(var_id
));
1204 self.mc
.cat_def(closure_id
, closure_span
, var_ty
, upvar_def
)
1208 fn copy_or_move
<'a
, 'tcx
>(typer
: &infer
::InferCtxt
<'a
, 'tcx
>,
1209 cmt
: &mc
::cmt
<'tcx
>,
1210 move_reason
: MoveReason
)
1213 if typer
.type_moves_by_default(cmt
.ty
, cmt
.span
) {