1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A classic liveness analysis based on dataflow over the AST. Computes,
12 //! for each local variable in a function, whether that variable is live
13 //! at a given point. Program execution points are identified by their
18 //! The basic model is that each local variable is assigned an index. We
19 //! represent sets of local variables using a vector indexed by this
20 //! index. The value in the vector is either 0, indicating the variable
21 //! is dead, or the id of an expression that uses the variable.
23 //! We conceptually walk over the AST in reverse execution order. If we
24 //! find a use of a variable, we add it to the set of live variables. If
25 //! we find an assignment to a variable, we remove it from the set of live
26 //! variables. When we have to merge two flows, we take the union of
27 //! those two flows---if the variable is live on both paths, we simply
28 //! pick one id. In the event of loops, we continue doing this until a
29 //! fixed point is reached.
31 //! ## Checking initialization
33 //! At the function entry point, all variables must be dead. If this is
34 //! not the case, we can report an error using the id found in the set of
35 //! live variables, which identifies a use of the variable which is not
36 //! dominated by an assignment.
40 //! After each explicit move, the variable must be dead.
42 //! ## Computing last uses
44 //! Any use of the variable where the variable is dead afterwards is a
47 //! # Implementation details
49 //! The actual implementation contains two (nested) walks over the AST.
50 //! The outer walk has the job of building up the ir_maps instance for the
51 //! enclosing function. On the way down the tree, it identifies those AST
52 //! nodes and variable IDs that will be needed for the liveness analysis
53 //! and assigns them contiguous IDs. The liveness id for an AST node is
54 //! called a `live_node` (it's a newtype'd u32) and the id for a variable
55 //! is called a `variable` (another newtype'd u32).
57 //! On the way back up the tree, as we are about to exit from a function
58 //! declaration we allocate a `liveness` instance. Now that we know
59 //! precisely how many nodes and variables we need, we can allocate all
60 //! the various arrays that we will need to precisely the right size. We then
61 //! perform the actual propagation on the `liveness` instance.
63 //! This propagation is encoded in the various `propagate_through_*()`
64 //! methods. It effectively does a reverse walk of the AST; whenever we
65 //! reach a loop node, we iterate until a fixed point is reached.
67 //! ## The `RWU` struct
69 //! At each live node `N`, we track three pieces of information for each
70 //! variable `V` (these are encapsulated in the `RWU` struct):
72 //! - `reader`: the `LiveNode` ID of some node which will read the value
73 //! that `V` holds on entry to `N`. Formally: a node `M` such
74 //! that there exists a path `P` from `N` to `M` where `P` does not
75 //! write `V`. If the `reader` is `invalid_node()`, then the current
76 //! value will never be read (the variable is dead, essentially).
78 //! - `writer`: the `LiveNode` ID of some node which will write the
79 //! variable `V` and which is reachable from `N`. Formally: a node `M`
80 //! such that there exists a path `P` from `N` to `M` and `M` writes
81 //! `V`. If the `writer` is `invalid_node()`, then there is no writer
82 //! of `V` that follows `N`.
84 //! - `used`: a boolean value indicating whether `V` is *used*. We
85 //! distinguish a *read* from a *use* in that a *use* is some read that
86 //! is not just used to generate a new value. For example, `x += 1` is
87 //! a read but not a use. This is used to generate better warnings.
89 //! ## Special Variables
91 //! We generate various special variables for various, well, special purposes.
92 //! These are described in the `specials` struct:
94 //! - `exit_ln`: a live node that is generated to represent every 'exit' from
95 //! the function, whether it be by explicit return, panic, or other means.
97 //! - `fallthrough_ln`: a live node that represents a fallthrough
99 //! - `clean_exit_var`: a synthetic variable that is only 'read' from the
100 //! fallthrough node. It is only live if the function could converge
101 //! via means other than an explicit `return` expression. That is, it is
102 //! only dead if the end of the function's block can never be reached.
103 //! It is the responsibility of typeck to ensure that there are no
104 //! `return` expressions in a function declared as diverging.
106 use self::LoopKind
::*;
107 use self::LiveNodeKind
::*;
108 use self::VarKind
::*;
112 use ty
::{self, TyCtxt}
;
114 use errors
::Applicability
;
115 use util
::nodemap
::{NodeMap, HirIdMap, HirIdSet}
;
117 use std
::collections
::VecDeque
;
119 use std
::io
::prelude
::*;
122 use syntax
::ast
::{self, NodeId}
;
124 use syntax
::symbol
::keywords
;
125 use syntax_pos
::Span
;
127 use hir
::{Expr, HirId}
;
129 use hir
::intravisit
::{self, Visitor, FnKind, NestedVisitorMap}
;
131 /// For use with `propagate_through_loop`.
133 /// An endless `loop` loop.
135 /// A `while` loop, with the given expression as condition.
139 #[derive(Copy, Clone, PartialEq)]
140 struct Variable(u32);
142 #[derive(Copy, Clone, PartialEq)]
143 struct LiveNode(u32);
146 fn get(&self) -> usize { self.0 as usize }
150 fn get(&self) -> usize { self.0 as usize }
153 #[derive(Copy, Clone, PartialEq, Debug)]
161 fn live_node_kind_to_string(lnk
: LiveNodeKind
, tcx
: TyCtxt
<'_
, '_
, '_
>) -> String
{
162 let cm
= tcx
.sess
.source_map();
165 format
!("Free var node [{}]", cm
.span_to_string(s
))
168 format
!("Expr node [{}]", cm
.span_to_string(s
))
171 format
!("Var def node [{}]", cm
.span_to_string(s
))
173 ExitNode
=> "Exit node".to_owned(),
177 impl<'a
, 'tcx
> Visitor
<'tcx
> for IrMaps
<'a
, 'tcx
> {
178 fn nested_visit_map
<'this
>(&'this
mut self) -> NestedVisitorMap
<'this
, 'tcx
> {
179 NestedVisitorMap
::OnlyBodies(&self.tcx
.hir
)
182 fn visit_fn(&mut self, fk
: FnKind
<'tcx
>, fd
: &'tcx hir
::FnDecl
,
183 b
: hir
::BodyId
, s
: Span
, id
: NodeId
) {
184 visit_fn(self, fk
, fd
, b
, s
, id
);
187 fn visit_local(&mut self, l
: &'tcx hir
::Local
) { visit_local(self, l); }
188 fn visit_expr(&mut self, ex
: &'tcx Expr
) { visit_expr(self, ex); }
189 fn visit_arm(&mut self, a
: &'tcx hir
::Arm
) { visit_arm(self, a); }
192 pub fn check_crate
<'a
, 'tcx
>(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>) {
193 tcx
.hir
.krate().visit_all_item_likes(&mut IrMaps
::new(tcx
).as_deep_visitor());
194 tcx
.sess
.abort_if_errors();
197 impl fmt
::Debug
for LiveNode
{
198 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
199 write
!(f
, "ln({})", self.get())
203 impl fmt
::Debug
for Variable
{
204 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
205 write
!(f
, "v({})", self.get())
209 // ______________________________________________________________________
212 // This is the first pass and the one that drives the main
213 // computation. It walks up and down the IR once. On the way down,
214 // we count for each function the number of variables as well as
215 // liveness nodes. A liveness node is basically an expression or
216 // capture clause that does something of interest: either it has
217 // interesting control flow or it uses/defines a local variable.
219 // On the way back up, at each function node we create liveness sets
220 // (we now know precisely how big to make our various vectors and so
221 // forth) and then do the data-flow propagation to compute the set
222 // of live variables at each program point.
224 // Finally, we run back over the IR one last time and, using the
225 // computed liveness, check various safety conditions. For example,
226 // there must be no live nodes at the definition site for a variable
227 // unless it has an initializer. Similarly, each non-mutable local
228 // variable must not be assigned if there is some successor
229 // assignment. And so forth.
232 fn is_valid(&self) -> bool
{
237 fn invalid_node() -> LiveNode { LiveNode(u32::MAX) }
244 #[derive(Copy, Clone, Debug)]
251 #[derive(Copy, Clone, Debug)]
253 Arg(HirId
, ast
::Name
),
258 struct IrMaps
<'a
, 'tcx
: 'a
> {
259 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
260 num_live_nodes
: usize,
262 live_node_map
: HirIdMap
<LiveNode
>,
263 variable_map
: HirIdMap
<Variable
>,
264 capture_info_map
: NodeMap
<Rc
<Vec
<CaptureInfo
>>>,
265 var_kinds
: Vec
<VarKind
>,
266 lnks
: Vec
<LiveNodeKind
>,
269 impl<'a
, 'tcx
> IrMaps
<'a
, 'tcx
> {
270 fn new(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>) -> IrMaps
<'a
, 'tcx
> {
275 live_node_map
: HirIdMap(),
276 variable_map
: HirIdMap(),
277 capture_info_map
: NodeMap(),
278 var_kinds
: Vec
::new(),
283 fn add_live_node(&mut self, lnk
: LiveNodeKind
) -> LiveNode
{
284 let ln
= LiveNode(self.num_live_nodes
as u32);
286 self.num_live_nodes
+= 1;
288 debug
!("{:?} is of kind {}", ln
,
289 live_node_kind_to_string(lnk
, self.tcx
));
294 fn add_live_node_for_node(&mut self, hir_id
: HirId
, lnk
: LiveNodeKind
) {
295 let ln
= self.add_live_node(lnk
);
296 self.live_node_map
.insert(hir_id
, ln
);
298 debug
!("{:?} is node {:?}", ln
, hir_id
);
301 fn add_variable(&mut self, vk
: VarKind
) -> Variable
{
302 let v
= Variable(self.num_vars
as u32);
303 self.var_kinds
.push(vk
);
307 Local(LocalInfo { id: node_id, .. }
) | Arg(node_id
, _
) => {
308 self.variable_map
.insert(node_id
, v
);
313 debug
!("{:?} is {:?}", v
, vk
);
318 fn variable(&self, hir_id
: HirId
, span
: Span
) -> Variable
{
319 match self.variable_map
.get(&hir_id
) {
322 span_bug
!(span
, "no variable registered for id {:?}", hir_id
);
327 fn variable_name(&self, var
: Variable
) -> String
{
328 match self.var_kinds
[var
.get()] {
329 Local(LocalInfo { name, .. }
) | Arg(_
, name
) => {
332 CleanExit
=> "<clean-exit>".to_owned()
336 fn variable_is_shorthand(&self, var
: Variable
) -> bool
{
337 match self.var_kinds
[var
.get()] {
338 Local(LocalInfo { is_shorthand, .. }
) => is_shorthand
,
339 Arg(..) | CleanExit
=> false
343 fn set_captures(&mut self, node_id
: NodeId
, cs
: Vec
<CaptureInfo
>) {
344 self.capture_info_map
.insert(node_id
, Rc
::new(cs
));
347 fn lnk(&self, ln
: LiveNode
) -> LiveNodeKind
{
352 fn visit_fn
<'a
, 'tcx
: 'a
>(ir
: &mut IrMaps
<'a
, 'tcx
>,
354 decl
: &'tcx hir
::FnDecl
,
355 body_id
: hir
::BodyId
,
360 // swap in a new set of IR maps for this function body:
361 let mut fn_maps
= IrMaps
::new(ir
.tcx
);
363 // Don't run unused pass for #[derive()]
364 if let FnKind
::Method(..) = fk
{
365 let parent
= ir
.tcx
.hir
.get_parent(id
);
366 if let Some(Node
::Item(i
)) = ir
.tcx
.hir
.find(parent
) {
367 if i
.attrs
.iter().any(|a
| a
.check_name("automatically_derived")) {
373 debug
!("creating fn_maps: {:?}", &fn_maps
as *const IrMaps
<'_
, '_
>);
375 let body
= ir
.tcx
.hir
.body(body_id
);
377 for arg
in &body
.arguments
{
378 arg
.pat
.each_binding(|_bm
, hir_id
, _x
, ident
| {
379 debug
!("adding argument {:?}", hir_id
);
380 fn_maps
.add_variable(Arg(hir_id
, ident
.name
));
384 // gather up the various local variables, significant expressions,
386 intravisit
::walk_fn(&mut fn_maps
, fk
, decl
, body_id
, sp
, id
);
389 let mut lsets
= Liveness
::new(&mut fn_maps
, body_id
);
390 let entry_ln
= lsets
.compute(&body
.value
);
392 // check for various error conditions
393 lsets
.visit_body(body
);
394 lsets
.warn_about_unused_args(body
, entry_ln
);
397 fn add_from_pat
<'a
, 'tcx
>(ir
: &mut IrMaps
<'a
, 'tcx
>, pat
: &P
<hir
::Pat
>) {
398 // For struct patterns, take note of which fields used shorthand
399 // (`x` rather than `x: x`).
400 let mut shorthand_field_ids
= HirIdSet();
401 let mut pats
= VecDeque
::new();
403 while let Some(pat
) = pats
.pop_front() {
406 Binding(_
, _
, _
, ref inner_pat
) => {
407 pats
.extend(inner_pat
.iter());
409 Struct(_
, ref fields
, _
) => {
410 for field
in fields
{
411 if field
.node
.is_shorthand
{
412 shorthand_field_ids
.insert(field
.node
.pat
.hir_id
);
416 Ref(ref inner_pat
, _
) |
417 Box(ref inner_pat
) => {
418 pats
.push_back(inner_pat
);
420 TupleStruct(_
, ref inner_pats
, _
) |
421 Tuple(ref inner_pats
, _
) => {
422 pats
.extend(inner_pats
.iter());
424 Slice(ref pre_pats
, ref inner_pat
, ref post_pats
) => {
425 pats
.extend(pre_pats
.iter());
426 pats
.extend(inner_pat
.iter());
427 pats
.extend(post_pats
.iter());
433 pat
.each_binding(|_bm
, hir_id
, _sp
, ident
| {
434 ir
.add_live_node_for_node(hir_id
, VarDefNode(ident
.span
));
435 ir
.add_variable(Local(LocalInfo
{
438 is_shorthand
: shorthand_field_ids
.contains(&hir_id
)
443 fn visit_local
<'a
, 'tcx
>(ir
: &mut IrMaps
<'a
, 'tcx
>, local
: &'tcx hir
::Local
) {
444 add_from_pat(ir
, &local
.pat
);
445 intravisit
::walk_local(ir
, local
);
448 fn visit_arm
<'a
, 'tcx
>(ir
: &mut IrMaps
<'a
, 'tcx
>, arm
: &'tcx hir
::Arm
) {
449 for pat
in &arm
.pats
{
450 add_from_pat(ir
, pat
);
452 intravisit
::walk_arm(ir
, arm
);
455 fn visit_expr
<'a
, 'tcx
>(ir
: &mut IrMaps
<'a
, 'tcx
>, expr
: &'tcx Expr
) {
457 // live nodes required for uses or definitions of variables:
458 hir
::ExprKind
::Path(hir
::QPath
::Resolved(_
, ref path
)) => {
459 debug
!("expr {}: path that leads to {:?}", expr
.id
, path
.def
);
460 if let Def
::Local(..) = path
.def
{
461 ir
.add_live_node_for_node(expr
.hir_id
, ExprNode(expr
.span
));
463 intravisit
::walk_expr(ir
, expr
);
465 hir
::ExprKind
::Closure(..) => {
466 // Interesting control flow (for loops can contain labeled
467 // breaks or continues)
468 ir
.add_live_node_for_node(expr
.hir_id
, ExprNode(expr
.span
));
470 // Make a live_node for each captured variable, with the span
471 // being the location that the variable is used. This results
472 // in better error messages than just pointing at the closure
473 // construction site.
474 let mut call_caps
= Vec
::new();
475 ir
.tcx
.with_freevars(expr
.id
, |freevars
| {
476 call_caps
.extend(freevars
.iter().filter_map(|fv
| {
477 if let Def
::Local(rv
) = fv
.def
{
478 let fv_ln
= ir
.add_live_node(FreeVarNode(fv
.span
));
479 let var_hid
= ir
.tcx
.hir
.node_to_hir_id(rv
);
480 Some(CaptureInfo { ln: fv_ln, var_hid }
)
486 ir
.set_captures(expr
.id
, call_caps
);
488 intravisit
::walk_expr(ir
, expr
);
491 // live nodes required for interesting control flow:
492 hir
::ExprKind
::If(..) |
493 hir
::ExprKind
::Match(..) |
494 hir
::ExprKind
::While(..) |
495 hir
::ExprKind
::Loop(..) => {
496 ir
.add_live_node_for_node(expr
.hir_id
, ExprNode(expr
.span
));
497 intravisit
::walk_expr(ir
, expr
);
499 hir
::ExprKind
::Binary(op
, ..) if op
.node
.is_lazy() => {
500 ir
.add_live_node_for_node(expr
.hir_id
, ExprNode(expr
.span
));
501 intravisit
::walk_expr(ir
, expr
);
504 // otherwise, live nodes are not required:
505 hir
::ExprKind
::Index(..) |
506 hir
::ExprKind
::Field(..) |
507 hir
::ExprKind
::Array(..) |
508 hir
::ExprKind
::Call(..) |
509 hir
::ExprKind
::MethodCall(..) |
510 hir
::ExprKind
::Tup(..) |
511 hir
::ExprKind
::Binary(..) |
512 hir
::ExprKind
::AddrOf(..) |
513 hir
::ExprKind
::Cast(..) |
514 hir
::ExprKind
::Unary(..) |
515 hir
::ExprKind
::Break(..) |
516 hir
::ExprKind
::Continue(_
) |
517 hir
::ExprKind
::Lit(_
) |
518 hir
::ExprKind
::Ret(..) |
519 hir
::ExprKind
::Block(..) |
520 hir
::ExprKind
::Assign(..) |
521 hir
::ExprKind
::AssignOp(..) |
522 hir
::ExprKind
::Struct(..) |
523 hir
::ExprKind
::Repeat(..) |
524 hir
::ExprKind
::InlineAsm(..) |
525 hir
::ExprKind
::Box(..) |
526 hir
::ExprKind
::Yield(..) |
527 hir
::ExprKind
::Type(..) |
528 hir
::ExprKind
::Path(hir
::QPath
::TypeRelative(..)) => {
529 intravisit
::walk_expr(ir
, expr
);
534 // ______________________________________________________________________
535 // Computing liveness sets
537 // Actually we compute just a bit more than just liveness, but we use
538 // the same basic propagation framework in all cases.
540 #[derive(Clone, Copy)]
547 /// Conceptually, this is like a `Vec<RWU>`. But the number of `RWU`s can get
548 /// very large, so it uses a more compact representation that takes advantage
549 /// of the fact that when the number of `RWU`s is large, most of them have an
550 /// invalid reader and an invalid writer.
552 /// Each entry in `packed_rwus` is either INV_INV_FALSE, INV_INV_TRUE, or
553 /// an index into `unpacked_rwus`. In the common cases, this compacts the
554 /// 65 bits of data into 32; in the uncommon cases, it expands the 65 bits
557 /// More compact representations are possible -- e.g. use only 2 bits per
558 /// packed `RWU` and make the secondary table a HashMap that maps from
559 /// indices to `RWU`s -- but this one strikes a good balance between size
561 packed_rwus
: Vec
<u32>,
562 unpacked_rwus
: Vec
<RWU
>,
565 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: false }`.
566 const INV_INV_FALSE
: u32 = u32::MAX
;
568 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: true }`.
569 const INV_INV_TRUE
: u32 = u32::MAX
- 1;
572 fn new(num_rwus
: usize) -> RWUTable
{
574 packed_rwus
: vec
![INV_INV_FALSE
; num_rwus
],
575 unpacked_rwus
: vec
![],
579 fn get(&self, idx
: usize) -> RWU
{
580 let packed_rwu
= self.packed_rwus
[idx
];
582 INV_INV_FALSE
=> RWU { reader: invalid_node(), writer: invalid_node(), used: false }
,
583 INV_INV_TRUE
=> RWU { reader: invalid_node(), writer: invalid_node(), used: true }
,
584 _
=> self.unpacked_rwus
[packed_rwu
as usize],
588 fn get_reader(&self, idx
: usize) -> LiveNode
{
589 let packed_rwu
= self.packed_rwus
[idx
];
591 INV_INV_FALSE
| INV_INV_TRUE
=> invalid_node(),
592 _
=> self.unpacked_rwus
[packed_rwu
as usize].reader
,
596 fn get_writer(&self, idx
: usize) -> LiveNode
{
597 let packed_rwu
= self.packed_rwus
[idx
];
599 INV_INV_FALSE
| INV_INV_TRUE
=> invalid_node(),
600 _
=> self.unpacked_rwus
[packed_rwu
as usize].writer
,
604 fn get_used(&self, idx
: usize) -> bool
{
605 let packed_rwu
= self.packed_rwus
[idx
];
607 INV_INV_FALSE
=> false,
608 INV_INV_TRUE
=> true,
609 _
=> self.unpacked_rwus
[packed_rwu
as usize].used
,
614 fn copy_packed(&mut self, dst_idx
: usize, src_idx
: usize) {
615 self.packed_rwus
[dst_idx
] = self.packed_rwus
[src_idx
];
618 fn assign_unpacked(&mut self, idx
: usize, rwu
: RWU
) {
619 if rwu
.reader
== invalid_node() && rwu
.writer
== invalid_node() {
620 // When we overwrite an indexing entry in `self.packed_rwus` with
621 // `INV_INV_{TRUE,FALSE}` we don't remove the corresponding entry
622 // from `self.unpacked_rwus`; it's not worth the effort, and we
623 // can't have entries shifting around anyway.
624 self.packed_rwus
[idx
] = if rwu
.used
{
630 // Add a new RWU to `unpacked_rwus` and make `packed_rwus[idx]`
632 self.packed_rwus
[idx
] = self.unpacked_rwus
.len() as u32;
633 self.unpacked_rwus
.push(rwu
);
637 fn assign_inv_inv(&mut self, idx
: usize) {
638 self.packed_rwus
[idx
] = if self.get_used(idx
) {
646 #[derive(Copy, Clone)]
649 fallthrough_ln
: LiveNode
,
650 clean_exit_var
: Variable
653 const ACC_READ
: u32 = 1;
654 const ACC_WRITE
: u32 = 2;
655 const ACC_USE
: u32 = 4;
657 struct Liveness
<'a
, 'tcx
: 'a
> {
658 ir
: &'a
mut IrMaps
<'a
, 'tcx
>,
659 tables
: &'a ty
::TypeckTables
<'tcx
>,
661 successors
: Vec
<LiveNode
>,
664 // mappings from loop node ID to LiveNode
665 // ("break" label should map to loop node ID,
666 // it probably doesn't now)
667 break_ln
: NodeMap
<LiveNode
>,
668 cont_ln
: NodeMap
<LiveNode
>,
671 impl<'a
, 'tcx
> Liveness
<'a
, 'tcx
> {
672 fn new(ir
: &'a
mut IrMaps
<'a
, 'tcx
>, body
: hir
::BodyId
) -> Liveness
<'a
, 'tcx
> {
673 // Special nodes and variables:
674 // - exit_ln represents the end of the fn, either by return or panic
675 // - implicit_ret_var is a pseudo-variable that represents
676 // an implicit return
677 let specials
= Specials
{
678 exit_ln
: ir
.add_live_node(ExitNode
),
679 fallthrough_ln
: ir
.add_live_node(ExitNode
),
680 clean_exit_var
: ir
.add_variable(CleanExit
)
683 let tables
= ir
.tcx
.body_tables(body
);
685 let num_live_nodes
= ir
.num_live_nodes
;
686 let num_vars
= ir
.num_vars
;
692 successors
: vec
![invalid_node(); num_live_nodes
],
693 rwu_table
: RWUTable
::new(num_live_nodes
* num_vars
),
699 fn live_node(&self, hir_id
: HirId
, span
: Span
) -> LiveNode
{
700 match self.ir
.live_node_map
.get(&hir_id
) {
703 // This must be a mismatch between the ir_map construction
704 // above and the propagation code below; the two sets of
705 // code have to agree about which AST nodes are worth
706 // creating liveness nodes for.
709 "no live node registered for node {:?}",
715 fn variable(&self, hir_id
: HirId
, span
: Span
) -> Variable
{
716 self.ir
.variable(hir_id
, span
)
719 fn pat_bindings
<F
>(&mut self, pat
: &hir
::Pat
, mut f
: F
) where
720 F
: FnMut(&mut Liveness
<'a
, 'tcx
>, LiveNode
, Variable
, Span
, HirId
),
722 pat
.each_binding(|_bm
, hir_id
, sp
, n
| {
723 let ln
= self.live_node(hir_id
, sp
);
724 let var
= self.variable(hir_id
, n
.span
);
725 f(self, ln
, var
, n
.span
, hir_id
);
729 fn arm_pats_bindings
<F
>(&mut self, pat
: Option
<&hir
::Pat
>, f
: F
) where
730 F
: FnMut(&mut Liveness
<'a
, 'tcx
>, LiveNode
, Variable
, Span
, HirId
),
732 if let Some(pat
) = pat
{
733 self.pat_bindings(pat
, f
);
737 fn define_bindings_in_pat(&mut self, pat
: &hir
::Pat
, succ
: LiveNode
)
739 self.define_bindings_in_arm_pats(Some(pat
), succ
)
742 fn define_bindings_in_arm_pats(&mut self, pat
: Option
<&hir
::Pat
>, succ
: LiveNode
)
745 self.arm_pats_bindings(pat
, |this
, ln
, var
, _sp
, _id
| {
746 this
.init_from_succ(ln
, succ
);
747 this
.define(ln
, var
);
753 fn idx(&self, ln
: LiveNode
, var
: Variable
) -> usize {
754 ln
.get() * self.ir
.num_vars
+ var
.get()
757 fn live_on_entry(&self, ln
: LiveNode
, var
: Variable
) -> Option
<LiveNodeKind
> {
758 assert
!(ln
.is_valid());
759 let reader
= self.rwu_table
.get_reader(self.idx(ln
, var
));
760 if reader
.is_valid() { Some(self.ir.lnk(reader)) }
else { None }
763 // Is this variable live on entry to any of its successor nodes?
764 fn live_on_exit(&self, ln
: LiveNode
, var
: Variable
)
765 -> Option
<LiveNodeKind
> {
766 let successor
= self.successors
[ln
.get()];
767 self.live_on_entry(successor
, var
)
770 fn used_on_entry(&self, ln
: LiveNode
, var
: Variable
) -> bool
{
771 assert
!(ln
.is_valid());
772 self.rwu_table
.get_used(self.idx(ln
, var
))
775 fn assigned_on_entry(&self, ln
: LiveNode
, var
: Variable
)
776 -> Option
<LiveNodeKind
> {
777 assert
!(ln
.is_valid());
778 let writer
= self.rwu_table
.get_writer(self.idx(ln
, var
));
779 if writer
.is_valid() { Some(self.ir.lnk(writer)) }
else { None }
782 fn assigned_on_exit(&self, ln
: LiveNode
, var
: Variable
)
783 -> Option
<LiveNodeKind
> {
784 let successor
= self.successors
[ln
.get()];
785 self.assigned_on_entry(successor
, var
)
788 fn indices2
<F
>(&mut self, ln
: LiveNode
, succ_ln
: LiveNode
, mut op
: F
) where
789 F
: FnMut(&mut Liveness
<'a
, 'tcx
>, usize, usize),
791 let node_base_idx
= self.idx(ln
, Variable(0));
792 let succ_base_idx
= self.idx(succ_ln
, Variable(0));
793 for var_idx
in 0..self.ir
.num_vars
{
794 op(self, node_base_idx
+ var_idx
, succ_base_idx
+ var_idx
);
798 fn write_vars
<F
>(&self,
802 -> io
::Result
<()> where
803 F
: FnMut(usize) -> LiveNode
,
805 let node_base_idx
= self.idx(ln
, Variable(0));
806 for var_idx
in 0..self.ir
.num_vars
{
807 let idx
= node_base_idx
+ var_idx
;
808 if test(idx
).is_valid() {
809 write
!(wr
, " {:?}", Variable(var_idx
as u32))?
;
816 #[allow(unused_must_use)]
817 fn ln_str(&self, ln
: LiveNode
) -> String
{
818 let mut wr
= Vec
::new();
820 let wr
= &mut wr
as &mut dyn Write
;
821 write
!(wr
, "[ln({:?}) of kind {:?} reads", ln
.get(), self.ir
.lnk(ln
));
822 self.write_vars(wr
, ln
, |idx
| self.rwu_table
.get_reader(idx
));
823 write
!(wr
, " writes");
824 self.write_vars(wr
, ln
, |idx
| self.rwu_table
.get_writer(idx
));
825 write
!(wr
, " precedes {:?}]", self.successors
[ln
.get()]);
827 String
::from_utf8(wr
).unwrap()
830 fn init_empty(&mut self, ln
: LiveNode
, succ_ln
: LiveNode
) {
831 self.successors
[ln
.get()] = succ_ln
;
833 // It is not necessary to initialize the RWUs here because they are all
834 // set to INV_INV_FALSE when they are created, and the sets only grow
835 // during iterations.
838 fn init_from_succ(&mut self, ln
: LiveNode
, succ_ln
: LiveNode
) {
839 // more efficient version of init_empty() / merge_from_succ()
840 self.successors
[ln
.get()] = succ_ln
;
842 self.indices2(ln
, succ_ln
, |this
, idx
, succ_idx
| {
843 this
.rwu_table
.copy_packed(idx
, succ_idx
);
845 debug
!("init_from_succ(ln={}, succ={})",
846 self.ln_str(ln
), self.ln_str(succ_ln
));
849 fn merge_from_succ(&mut self,
854 if ln
== succ_ln { return false; }
856 let mut changed
= false;
857 self.indices2(ln
, succ_ln
, |this
, idx
, succ_idx
| {
858 let mut rwu
= this
.rwu_table
.get(idx
);
859 let succ_rwu
= this
.rwu_table
.get(succ_idx
);
860 if succ_rwu
.reader
.is_valid() && !rwu
.reader
.is_valid() {
861 rwu
.reader
= succ_rwu
.reader
;
865 if succ_rwu
.writer
.is_valid() && !rwu
.writer
.is_valid() {
866 rwu
.writer
= succ_rwu
.writer
;
870 if succ_rwu
.used
&& !rwu
.used
{
876 this
.rwu_table
.assign_unpacked(idx
, rwu
);
880 debug
!("merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})",
881 ln
, self.ln_str(succ_ln
), first_merge
, changed
);
885 // Indicates that a local variable was *defined*; we know that no
886 // uses of the variable can precede the definition (resolve checks
887 // this) so we just clear out all the data.
888 fn define(&mut self, writer
: LiveNode
, var
: Variable
) {
889 let idx
= self.idx(writer
, var
);
890 self.rwu_table
.assign_inv_inv(idx
);
892 debug
!("{:?} defines {:?} (idx={}): {}", writer
, var
,
893 idx
, self.ln_str(writer
));
896 // Either read, write, or both depending on the acc bitset
897 fn acc(&mut self, ln
: LiveNode
, var
: Variable
, acc
: u32) {
898 debug
!("{:?} accesses[{:x}] {:?}: {}",
899 ln
, acc
, var
, self.ln_str(ln
));
901 let idx
= self.idx(ln
, var
);
902 let mut rwu
= self.rwu_table
.get(idx
);
904 if (acc
& ACC_WRITE
) != 0 {
905 rwu
.reader
= invalid_node();
909 // Important: if we both read/write, must do read second
910 // or else the write will override.
911 if (acc
& ACC_READ
) != 0 {
915 if (acc
& ACC_USE
) != 0 {
919 self.rwu_table
.assign_unpacked(idx
, rwu
);
922 fn compute(&mut self, body
: &hir
::Expr
) -> LiveNode
{
923 // if there is a `break` or `again` at the top level, then it's
924 // effectively a return---this only occurs in `for` loops,
925 // where the body is really a closure.
927 debug
!("compute: using id for body, {}", self.ir
.tcx
.hir
.node_to_pretty_string(body
.id
));
929 let exit_ln
= self.s
.exit_ln
;
931 self.break_ln
.insert(body
.id
, exit_ln
);
932 self.cont_ln
.insert(body
.id
, exit_ln
);
934 // the fallthrough exit is only for those cases where we do not
935 // explicitly return:
937 self.init_from_succ(s
.fallthrough_ln
, s
.exit_ln
);
938 self.acc(s
.fallthrough_ln
, s
.clean_exit_var
, ACC_READ
);
940 let entry_ln
= self.propagate_through_expr(body
, s
.fallthrough_ln
);
942 // hack to skip the loop unless debug! is enabled:
943 debug
!("^^ liveness computation results for body {} (entry={:?})", {
944 for ln_idx
in 0..self.ir
.num_live_nodes
{
945 debug
!("{:?}", self.ln_str(LiveNode(ln_idx
as u32)));
954 fn propagate_through_block(&mut self, blk
: &hir
::Block
, succ
: LiveNode
)
956 if blk
.targeted_by_break
{
957 self.break_ln
.insert(blk
.id
, succ
);
959 let succ
= self.propagate_through_opt_expr(blk
.expr
.as_ref().map(|e
| &**e
), succ
);
960 blk
.stmts
.iter().rev().fold(succ
, |succ
, stmt
| {
961 self.propagate_through_stmt(stmt
, succ
)
965 fn propagate_through_stmt(&mut self, stmt
: &hir
::Stmt
, succ
: LiveNode
)
968 hir
::StmtKind
::Decl(ref decl
, _
) => {
969 self.propagate_through_decl(&decl
, succ
)
972 hir
::StmtKind
::Expr(ref expr
, _
) | hir
::StmtKind
::Semi(ref expr
, _
) => {
973 self.propagate_through_expr(&expr
, succ
)
978 fn propagate_through_decl(&mut self, decl
: &hir
::Decl
, succ
: LiveNode
)
981 hir
::DeclKind
::Local(ref local
) => {
982 self.propagate_through_local(&local
, succ
)
984 hir
::DeclKind
::Item(_
) => succ
,
988 fn propagate_through_local(&mut self, local
: &hir
::Local
, succ
: LiveNode
)
990 // Note: we mark the variable as defined regardless of whether
991 // there is an initializer. Initially I had thought to only mark
992 // the live variable as defined if it was initialized, and then we
993 // could check for uninit variables just by scanning what is live
994 // at the start of the function. But that doesn't work so well for
995 // immutable variables defined in a loop:
996 // loop { let x; x = 5; }
997 // because the "assignment" loops back around and generates an error.
999 // So now we just check that variables defined w/o an
1000 // initializer are not live at the point of their
1001 // initialization, which is mildly more complex than checking
1002 // once at the func header but otherwise equivalent.
1004 let succ
= self.propagate_through_opt_expr(local
.init
.as_ref().map(|e
| &**e
), succ
);
1005 self.define_bindings_in_pat(&local
.pat
, succ
)
1008 fn propagate_through_exprs(&mut self, exprs
: &[Expr
], succ
: LiveNode
)
1010 exprs
.iter().rev().fold(succ
, |succ
, expr
| {
1011 self.propagate_through_expr(&expr
, succ
)
1015 fn propagate_through_opt_expr(&mut self,
1016 opt_expr
: Option
<&Expr
>,
1019 opt_expr
.map_or(succ
, |expr
| self.propagate_through_expr(expr
, succ
))
1022 fn propagate_through_expr(&mut self, expr
: &Expr
, succ
: LiveNode
)
1024 debug
!("propagate_through_expr: {}", self.ir
.tcx
.hir
.node_to_pretty_string(expr
.id
));
1027 // Interesting cases with control flow or which gen/kill
1028 hir
::ExprKind
::Path(hir
::QPath
::Resolved(_
, ref path
)) => {
1029 self.access_path(expr
.hir_id
, path
, succ
, ACC_READ
| ACC_USE
)
1032 hir
::ExprKind
::Field(ref e
, _
) => {
1033 self.propagate_through_expr(&e
, succ
)
1036 hir
::ExprKind
::Closure(.., blk_id
, _
, _
) => {
1037 debug
!("{} is an ExprKind::Closure",
1038 self.ir
.tcx
.hir
.node_to_pretty_string(expr
.id
));
1040 // The next-node for a break is the successor of the entire
1041 // loop. The next-node for a continue is the top of this loop.
1042 let node
= self.live_node(expr
.hir_id
, expr
.span
);
1044 let break_ln
= succ
;
1046 self.break_ln
.insert(blk_id
.node_id
, break_ln
);
1047 self.cont_ln
.insert(blk_id
.node_id
, cont_ln
);
1049 // the construction of a closure itself is not important,
1050 // but we have to consider the closed over variables.
1051 let caps
= self.ir
.capture_info_map
.get(&expr
.id
).cloned().unwrap_or_else(||
1052 span_bug
!(expr
.span
, "no registered caps"));
1054 caps
.iter().rev().fold(succ
, |succ
, cap
| {
1055 self.init_from_succ(cap
.ln
, succ
);
1056 let var
= self.variable(cap
.var_hid
, expr
.span
);
1057 self.acc(cap
.ln
, var
, ACC_READ
| ACC_USE
);
1062 hir
::ExprKind
::If(ref cond
, ref then
, ref els
) => {
1076 let else_ln
= self.propagate_through_opt_expr(els
.as_ref().map(|e
| &**e
), succ
);
1077 let then_ln
= self.propagate_through_expr(&then
, succ
);
1078 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
1079 self.init_from_succ(ln
, else_ln
);
1080 self.merge_from_succ(ln
, then_ln
, false);
1081 self.propagate_through_expr(&cond
, ln
)
1084 hir
::ExprKind
::While(ref cond
, ref blk
, _
) => {
1085 self.propagate_through_loop(expr
, WhileLoop(&cond
), &blk
, succ
)
1088 // Note that labels have been resolved, so we don't need to look
1089 // at the label ident
1090 hir
::ExprKind
::Loop(ref blk
, _
, _
) => {
1091 self.propagate_through_loop(expr
, LoopLoop
, &blk
, succ
)
1094 hir
::ExprKind
::Match(ref e
, ref arms
, _
) => {
1109 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
1110 self.init_empty(ln
, succ
);
1111 let mut first_merge
= true;
1113 let body_succ
= self.propagate_through_expr(&arm
.body
, succ
);
1115 let guard_succ
= self.propagate_through_opt_expr(
1116 arm
.guard
.as_ref().map(|hir
::Guard
::If(e
)| &**e
),
1119 // only consider the first pattern; any later patterns must have
1120 // the same bindings, and we also consider the first pattern to be
1121 // the "authoritative" set of ids
1123 self.define_bindings_in_arm_pats(arm
.pats
.first().map(|p
| &**p
),
1125 self.merge_from_succ(ln
, arm_succ
, first_merge
);
1126 first_merge
= false;
1128 self.propagate_through_expr(&e
, ln
)
1131 hir
::ExprKind
::Ret(ref o_e
) => {
1132 // ignore succ and subst exit_ln:
1133 let exit_ln
= self.s
.exit_ln
;
1134 self.propagate_through_opt_expr(o_e
.as_ref().map(|e
| &**e
), exit_ln
)
1137 hir
::ExprKind
::Break(label
, ref opt_expr
) => {
1138 // Find which label this break jumps to
1139 let target
= match label
.target_id
{
1140 Ok(node_id
) => self.break_ln
.get(&node_id
),
1141 Err(err
) => span_bug
!(expr
.span
, "loop scope error: {}", err
),
1144 // Now that we know the label we're going to,
1145 // look it up in the break loop nodes table
1148 Some(b
) => self.propagate_through_opt_expr(opt_expr
.as_ref().map(|e
| &**e
), b
),
1149 None
=> span_bug
!(expr
.span
, "break to unknown label")
1153 hir
::ExprKind
::Continue(label
) => {
1154 // Find which label this expr continues to
1155 let sc
= label
.target_id
.unwrap_or_else(|err
|
1156 span_bug
!(expr
.span
, "loop scope error: {}", err
));
1158 // Now that we know the label we're going to,
1159 // look it up in the continue loop nodes table
1160 self.cont_ln
.get(&sc
).cloned().unwrap_or_else(||
1161 span_bug
!(expr
.span
, "continue to unknown label"))
1164 hir
::ExprKind
::Assign(ref l
, ref r
) => {
1165 // see comment on places in
1166 // propagate_through_place_components()
1167 let succ
= self.write_place(&l
, succ
, ACC_WRITE
);
1168 let succ
= self.propagate_through_place_components(&l
, succ
);
1169 self.propagate_through_expr(&r
, succ
)
1172 hir
::ExprKind
::AssignOp(_
, ref l
, ref r
) => {
1173 // an overloaded assign op is like a method call
1174 if self.tables
.is_method_call(expr
) {
1175 let succ
= self.propagate_through_expr(&l
, succ
);
1176 self.propagate_through_expr(&r
, succ
)
1178 // see comment on places in
1179 // propagate_through_place_components()
1180 let succ
= self.write_place(&l
, succ
, ACC_WRITE
|ACC_READ
);
1181 let succ
= self.propagate_through_expr(&r
, succ
);
1182 self.propagate_through_place_components(&l
, succ
)
1186 // Uninteresting cases: just propagate in rev exec order
1188 hir
::ExprKind
::Array(ref exprs
) => {
1189 self.propagate_through_exprs(exprs
, succ
)
1192 hir
::ExprKind
::Struct(_
, ref fields
, ref with_expr
) => {
1193 let succ
= self.propagate_through_opt_expr(with_expr
.as_ref().map(|e
| &**e
), succ
);
1194 fields
.iter().rev().fold(succ
, |succ
, field
| {
1195 self.propagate_through_expr(&field
.expr
, succ
)
1199 hir
::ExprKind
::Call(ref f
, ref args
) => {
1200 // FIXME(canndrew): This is_never should really be an is_uninhabited
1201 let succ
= if self.tables
.expr_ty(expr
).is_never() {
1206 let succ
= self.propagate_through_exprs(args
, succ
);
1207 self.propagate_through_expr(&f
, succ
)
1210 hir
::ExprKind
::MethodCall(.., ref args
) => {
1211 // FIXME(canndrew): This is_never should really be an is_uninhabited
1212 let succ
= if self.tables
.expr_ty(expr
).is_never() {
1218 self.propagate_through_exprs(args
, succ
)
1221 hir
::ExprKind
::Tup(ref exprs
) => {
1222 self.propagate_through_exprs(exprs
, succ
)
1225 hir
::ExprKind
::Binary(op
, ref l
, ref r
) if op
.node
.is_lazy() => {
1226 let r_succ
= self.propagate_through_expr(&r
, succ
);
1228 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
1229 self.init_from_succ(ln
, succ
);
1230 self.merge_from_succ(ln
, r_succ
, false);
1232 self.propagate_through_expr(&l
, ln
)
1235 hir
::ExprKind
::Index(ref l
, ref r
) |
1236 hir
::ExprKind
::Binary(_
, ref l
, ref r
) => {
1237 let r_succ
= self.propagate_through_expr(&r
, succ
);
1238 self.propagate_through_expr(&l
, r_succ
)
1241 hir
::ExprKind
::Box(ref e
) |
1242 hir
::ExprKind
::AddrOf(_
, ref e
) |
1243 hir
::ExprKind
::Cast(ref e
, _
) |
1244 hir
::ExprKind
::Type(ref e
, _
) |
1245 hir
::ExprKind
::Unary(_
, ref e
) |
1246 hir
::ExprKind
::Yield(ref e
) |
1247 hir
::ExprKind
::Repeat(ref e
, _
) => {
1248 self.propagate_through_expr(&e
, succ
)
1251 hir
::ExprKind
::InlineAsm(ref ia
, ref outputs
, ref inputs
) => {
1252 let succ
= ia
.outputs
.iter().zip(outputs
).rev().fold(succ
, |succ
, (o
, output
)| {
1253 // see comment on places
1254 // in propagate_through_place_components()
1256 self.propagate_through_expr(output
, succ
)
1258 let acc
= if o
.is_rw { ACC_WRITE|ACC_READ }
else { ACC_WRITE }
;
1259 let succ
= self.write_place(output
, succ
, acc
);
1260 self.propagate_through_place_components(output
, succ
)
1263 // Inputs are executed first. Propagate last because of rev order
1264 self.propagate_through_exprs(inputs
, succ
)
1267 hir
::ExprKind
::Lit(..) | hir
::ExprKind
::Path(hir
::QPath
::TypeRelative(..)) => {
1271 // Note that labels have been resolved, so we don't need to look
1272 // at the label ident
1273 hir
::ExprKind
::Block(ref blk
, _
) => {
1274 self.propagate_through_block(&blk
, succ
)
1279 fn propagate_through_place_components(&mut self,
1285 // In general, the full flow graph structure for an
1286 // assignment/move/etc can be handled in one of two ways,
1287 // depending on whether what is being assigned is a "tracked
1288 // value" or not. A tracked value is basically a local
1289 // variable or argument.
1291 // The two kinds of graphs are:
1293 // Tracked place Untracked place
1294 // ----------------------++-----------------------
1298 // (rvalue) || (rvalue)
1301 // (write of place) || (place components)
1306 // ----------------------++-----------------------
1308 // I will cover the two cases in turn:
1312 // A tracked place is a local variable/argument `x`. In
1313 // these cases, the link_node where the write occurs is linked
1314 // to node id of `x`. The `write_place()` routine generates
1315 // the contents of this node. There are no subcomponents to
1318 // # Non-tracked places
1320 // These are places like `x[5]` or `x.f`. In that case, we
1321 // basically ignore the value which is written to but generate
1322 // reads for the components---`x` in these two examples. The
1323 // components reads are generated by
1324 // `propagate_through_place_components()` (this fn).
1328 // It is still possible to observe assignments to non-places;
1329 // these errors are detected in the later pass borrowck. We
1330 // just ignore such cases and treat them as reads.
1333 hir
::ExprKind
::Path(_
) => succ
,
1334 hir
::ExprKind
::Field(ref e
, _
) => self.propagate_through_expr(&e
, succ
),
1335 _
=> self.propagate_through_expr(expr
, succ
)
1339 // see comment on propagate_through_place()
1340 fn write_place(&mut self, expr
: &Expr
, succ
: LiveNode
, acc
: u32) -> LiveNode
{
1342 hir
::ExprKind
::Path(hir
::QPath
::Resolved(_
, ref path
)) => {
1343 self.access_path(expr
.hir_id
, path
, succ
, acc
)
1346 // We do not track other places, so just propagate through
1347 // to their subcomponents. Also, it may happen that
1348 // non-places occur here, because those are detected in the
1349 // later pass borrowck.
1354 fn access_var(&mut self, hir_id
: HirId
, nid
: NodeId
, succ
: LiveNode
, acc
: u32, span
: Span
)
1356 let ln
= self.live_node(hir_id
, span
);
1358 self.init_from_succ(ln
, succ
);
1359 let var_hid
= self.ir
.tcx
.hir
.node_to_hir_id(nid
);
1360 let var
= self.variable(var_hid
, span
);
1361 self.acc(ln
, var
, acc
);
1366 fn access_path(&mut self, hir_id
: HirId
, path
: &hir
::Path
, succ
: LiveNode
, acc
: u32)
1369 Def
::Local(nid
) => {
1370 self.access_var(hir_id
, nid
, succ
, acc
, path
.span
)
1376 fn propagate_through_loop(&mut self,
1384 We model control flow like this:
1402 let mut first_merge
= true;
1403 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
1404 self.init_empty(ln
, succ
);
1408 // If this is not a `loop` loop, then it's possible we bypass
1409 // the body altogether. Otherwise, the only way is via a `break`
1410 // in the loop body.
1411 self.merge_from_succ(ln
, succ
, first_merge
);
1412 first_merge
= false;
1415 debug
!("propagate_through_loop: using id for loop body {} {}",
1416 expr
.id
, self.ir
.tcx
.hir
.node_to_pretty_string(body
.id
));
1418 let break_ln
= succ
;
1420 self.break_ln
.insert(expr
.id
, break_ln
);
1421 self.cont_ln
.insert(expr
.id
, cont_ln
);
1423 let cond_ln
= match kind
{
1425 WhileLoop(ref cond
) => self.propagate_through_expr(&cond
, ln
),
1427 let body_ln
= self.propagate_through_block(body
, cond_ln
);
1429 // repeat until fixed point is reached:
1430 while self.merge_from_succ(ln
, body_ln
, first_merge
) {
1431 first_merge
= false;
1433 let new_cond_ln
= match kind
{
1435 WhileLoop(ref cond
) => {
1436 self.propagate_through_expr(&cond
, ln
)
1439 assert_eq
!(cond_ln
, new_cond_ln
);
1440 assert_eq
!(body_ln
, self.propagate_through_block(body
, cond_ln
));
1447 // _______________________________________________________________________
1448 // Checking for error conditions
1450 impl<'a
, 'tcx
> Visitor
<'tcx
> for Liveness
<'a
, 'tcx
> {
1451 fn nested_visit_map
<'this
>(&'this
mut self) -> NestedVisitorMap
<'this
, 'tcx
> {
1452 NestedVisitorMap
::None
1455 fn visit_local(&mut self, l
: &'tcx hir
::Local
) {
1456 check_local(self, l
);
1458 fn visit_expr(&mut self, ex
: &'tcx Expr
) {
1459 check_expr(self, ex
);
1461 fn visit_arm(&mut self, a
: &'tcx hir
::Arm
) {
1466 fn check_local
<'a
, 'tcx
>(this
: &mut Liveness
<'a
, 'tcx
>, local
: &'tcx hir
::Local
) {
1469 this
.warn_about_unused_or_dead_vars_in_pat(&local
.pat
);
1472 this
.pat_bindings(&local
.pat
, |this
, ln
, var
, sp
, id
| {
1473 let span
= local
.pat
.simple_ident().map_or(sp
, |ident
| ident
.span
);
1474 this
.warn_about_unused(span
, id
, ln
, var
);
1479 intravisit
::walk_local(this
, local
);
1482 fn check_arm
<'a
, 'tcx
>(this
: &mut Liveness
<'a
, 'tcx
>, arm
: &'tcx hir
::Arm
) {
1483 // only consider the first pattern; any later patterns must have
1484 // the same bindings, and we also consider the first pattern to be
1485 // the "authoritative" set of ids
1486 this
.arm_pats_bindings(arm
.pats
.first().map(|p
| &**p
), |this
, ln
, var
, sp
, id
| {
1487 this
.warn_about_unused(sp
, id
, ln
, var
);
1489 intravisit
::walk_arm(this
, arm
);
1492 fn check_expr
<'a
, 'tcx
>(this
: &mut Liveness
<'a
, 'tcx
>, expr
: &'tcx Expr
) {
1494 hir
::ExprKind
::Assign(ref l
, _
) => {
1495 this
.check_place(&l
);
1497 intravisit
::walk_expr(this
, expr
);
1500 hir
::ExprKind
::AssignOp(_
, ref l
, _
) => {
1501 if !this
.tables
.is_method_call(expr
) {
1502 this
.check_place(&l
);
1505 intravisit
::walk_expr(this
, expr
);
1508 hir
::ExprKind
::InlineAsm(ref ia
, ref outputs
, ref inputs
) => {
1509 for input
in inputs
{
1510 this
.visit_expr(input
);
1513 // Output operands must be places
1514 for (o
, output
) in ia
.outputs
.iter().zip(outputs
) {
1516 this
.check_place(output
);
1518 this
.visit_expr(output
);
1521 intravisit
::walk_expr(this
, expr
);
1524 // no correctness conditions related to liveness
1525 hir
::ExprKind
::Call(..) | hir
::ExprKind
::MethodCall(..) | hir
::ExprKind
::If(..) |
1526 hir
::ExprKind
::Match(..) | hir
::ExprKind
::While(..) | hir
::ExprKind
::Loop(..) |
1527 hir
::ExprKind
::Index(..) | hir
::ExprKind
::Field(..) |
1528 hir
::ExprKind
::Array(..) | hir
::ExprKind
::Tup(..) | hir
::ExprKind
::Binary(..) |
1529 hir
::ExprKind
::Cast(..) | hir
::ExprKind
::Unary(..) | hir
::ExprKind
::Ret(..) |
1530 hir
::ExprKind
::Break(..) | hir
::ExprKind
::Continue(..) | hir
::ExprKind
::Lit(_
) |
1531 hir
::ExprKind
::Block(..) | hir
::ExprKind
::AddrOf(..) |
1532 hir
::ExprKind
::Struct(..) | hir
::ExprKind
::Repeat(..) |
1533 hir
::ExprKind
::Closure(..) | hir
::ExprKind
::Path(_
) | hir
::ExprKind
::Yield(..) |
1534 hir
::ExprKind
::Box(..) | hir
::ExprKind
::Type(..) => {
1535 intravisit
::walk_expr(this
, expr
);
1540 impl<'a
, 'tcx
> Liveness
<'a
, 'tcx
> {
1541 fn check_place(&mut self, expr
: &'tcx Expr
) {
1543 hir
::ExprKind
::Path(hir
::QPath
::Resolved(_
, ref path
)) => {
1544 if let Def
::Local(nid
) = path
.def
{
1545 // Assignment to an immutable variable or argument: only legal
1546 // if there is no later assignment. If this local is actually
1547 // mutable, then check for a reassignment to flag the mutability
1549 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
1550 let var_hid
= self.ir
.tcx
.hir
.node_to_hir_id(nid
);
1551 let var
= self.variable(var_hid
, expr
.span
);
1552 self.warn_about_dead_assign(expr
.span
, expr
.hir_id
, ln
, var
);
1556 // For other kinds of places, no checks are required,
1557 // and any embedded expressions are actually rvalues
1558 intravisit
::walk_expr(self, expr
);
1563 fn should_warn(&self, var
: Variable
) -> Option
<String
> {
1564 let name
= self.ir
.variable_name(var
);
1565 if name
.is_empty() || name
.as_bytes()[0] == b'_'
{
1572 fn warn_about_unused_args(&self, body
: &hir
::Body
, entry_ln
: LiveNode
) {
1573 for arg
in &body
.arguments
{
1574 arg
.pat
.each_binding(|_bm
, hir_id
, _
, ident
| {
1575 let sp
= ident
.span
;
1576 let var
= self.variable(hir_id
, sp
);
1577 // Ignore unused self.
1578 if ident
.name
!= keywords
::SelfValue
.name() {
1579 if !self.warn_about_unused(sp
, hir_id
, entry_ln
, var
) {
1580 if self.live_on_entry(entry_ln
, var
).is_none() {
1581 self.report_dead_assign(hir_id
, sp
, var
, true);
1589 fn warn_about_unused_or_dead_vars_in_pat(&mut self, pat
: &hir
::Pat
) {
1590 self.pat_bindings(pat
, |this
, ln
, var
, sp
, id
| {
1591 if !this
.warn_about_unused(sp
, id
, ln
, var
) {
1592 this
.warn_about_dead_assign(sp
, id
, ln
, var
);
1597 fn warn_about_unused(&self,
1603 if !self.used_on_entry(ln
, var
) {
1604 let r
= self.should_warn(var
);
1605 if let Some(name
) = r
{
1606 // annoying: for parameters in funcs like `fn(x: i32)
1607 // {ret}`, there is only one node, so asking about
1608 // assigned_on_exit() is not meaningful.
1609 let is_assigned
= if ln
== self.s
.exit_ln
{
1612 self.assigned_on_exit(ln
, var
).is_some()
1615 let suggest_underscore_msg
= format
!("consider using `_{}` instead", name
);
1619 .lint_hir_note(lint
::builtin
::UNUSED_VARIABLES
, hir_id
, sp
,
1620 &format
!("variable `{}` is assigned to, but never used",
1622 &suggest_underscore_msg
);
1623 } else if name
!= "self" {
1624 let msg
= format
!("unused variable: `{}`", name
);
1625 let mut err
= self.ir
.tcx
1626 .struct_span_lint_hir(lint
::builtin
::UNUSED_VARIABLES
, hir_id
, sp
, &msg
);
1627 if self.ir
.variable_is_shorthand(var
) {
1628 err
.span_suggestion_with_applicability(sp
, "try ignoring the field",
1629 format
!("{}: _", name
),
1630 Applicability
::MachineApplicable
);
1632 err
.span_suggestion_short_with_applicability(
1633 sp
, &suggest_underscore_msg
,
1634 format
!("_{}", name
),
1635 Applicability
::MachineApplicable
,
1647 fn warn_about_dead_assign(&self,
1652 if self.live_on_exit(ln
, var
).is_none() {
1653 self.report_dead_assign(hir_id
, sp
, var
, false);
1657 fn report_dead_assign(&self, hir_id
: HirId
, sp
: Span
, var
: Variable
, is_argument
: bool
) {
1658 if let Some(name
) = self.should_warn(var
) {
1660 self.ir
.tcx
.lint_hir(lint
::builtin
::UNUSED_ASSIGNMENTS
, hir_id
, sp
,
1661 &format
!("value passed to `{}` is never read", name
));
1663 self.ir
.tcx
.lint_hir(lint
::builtin
::UNUSED_ASSIGNMENTS
, hir_id
, sp
,
1664 &format
!("value assigned to `{}` is never read", name
));