]> git.proxmox.com Git - rustc.git/blob - src/librustc/middle/liveness.rs
New upstream version 1.31.0~beta.4+dfsg1
[rustc.git] / src / librustc / middle / liveness.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! A classic liveness analysis based on dataflow over the AST. Computes,
12 //! for each local variable in a function, whether that variable is live
13 //! at a given point. Program execution points are identified by their
14 //! id.
15 //!
16 //! # Basic idea
17 //!
18 //! The basic model is that each local variable is assigned an index. We
19 //! represent sets of local variables using a vector indexed by this
20 //! index. The value in the vector is either 0, indicating the variable
21 //! is dead, or the id of an expression that uses the variable.
22 //!
23 //! We conceptually walk over the AST in reverse execution order. If we
24 //! find a use of a variable, we add it to the set of live variables. If
25 //! we find an assignment to a variable, we remove it from the set of live
26 //! variables. When we have to merge two flows, we take the union of
27 //! those two flows---if the variable is live on both paths, we simply
28 //! pick one id. In the event of loops, we continue doing this until a
29 //! fixed point is reached.
30 //!
31 //! ## Checking initialization
32 //!
33 //! At the function entry point, all variables must be dead. If this is
34 //! not the case, we can report an error using the id found in the set of
35 //! live variables, which identifies a use of the variable which is not
36 //! dominated by an assignment.
37 //!
38 //! ## Checking moves
39 //!
40 //! After each explicit move, the variable must be dead.
41 //!
42 //! ## Computing last uses
43 //!
44 //! Any use of the variable where the variable is dead afterwards is a
45 //! last use.
46 //!
47 //! # Implementation details
48 //!
49 //! The actual implementation contains two (nested) walks over the AST.
50 //! The outer walk has the job of building up the ir_maps instance for the
51 //! enclosing function. On the way down the tree, it identifies those AST
52 //! nodes and variable IDs that will be needed for the liveness analysis
53 //! and assigns them contiguous IDs. The liveness id for an AST node is
54 //! called a `live_node` (it's a newtype'd u32) and the id for a variable
55 //! is called a `variable` (another newtype'd u32).
56 //!
57 //! On the way back up the tree, as we are about to exit from a function
58 //! declaration we allocate a `liveness` instance. Now that we know
59 //! precisely how many nodes and variables we need, we can allocate all
60 //! the various arrays that we will need to precisely the right size. We then
61 //! perform the actual propagation on the `liveness` instance.
62 //!
63 //! This propagation is encoded in the various `propagate_through_*()`
64 //! methods. It effectively does a reverse walk of the AST; whenever we
65 //! reach a loop node, we iterate until a fixed point is reached.
66 //!
67 //! ## The `RWU` struct
68 //!
69 //! At each live node `N`, we track three pieces of information for each
70 //! variable `V` (these are encapsulated in the `RWU` struct):
71 //!
72 //! - `reader`: the `LiveNode` ID of some node which will read the value
73 //! that `V` holds on entry to `N`. Formally: a node `M` such
74 //! that there exists a path `P` from `N` to `M` where `P` does not
75 //! write `V`. If the `reader` is `invalid_node()`, then the current
76 //! value will never be read (the variable is dead, essentially).
77 //!
78 //! - `writer`: the `LiveNode` ID of some node which will write the
79 //! variable `V` and which is reachable from `N`. Formally: a node `M`
80 //! such that there exists a path `P` from `N` to `M` and `M` writes
81 //! `V`. If the `writer` is `invalid_node()`, then there is no writer
82 //! of `V` that follows `N`.
83 //!
84 //! - `used`: a boolean value indicating whether `V` is *used*. We
85 //! distinguish a *read* from a *use* in that a *use* is some read that
86 //! is not just used to generate a new value. For example, `x += 1` is
87 //! a read but not a use. This is used to generate better warnings.
88 //!
89 //! ## Special Variables
90 //!
91 //! We generate various special variables for various, well, special purposes.
92 //! These are described in the `specials` struct:
93 //!
94 //! - `exit_ln`: a live node that is generated to represent every 'exit' from
95 //! the function, whether it be by explicit return, panic, or other means.
96 //!
97 //! - `fallthrough_ln`: a live node that represents a fallthrough
98 //!
99 //! - `clean_exit_var`: a synthetic variable that is only 'read' from the
100 //! fallthrough node. It is only live if the function could converge
101 //! via means other than an explicit `return` expression. That is, it is
102 //! only dead if the end of the function's block can never be reached.
103 //! It is the responsibility of typeck to ensure that there are no
104 //! `return` expressions in a function declared as diverging.
105
106 use self::LoopKind::*;
107 use self::LiveNodeKind::*;
108 use self::VarKind::*;
109
110 use hir::def::*;
111 use hir::Node;
112 use ty::{self, TyCtxt};
113 use lint;
114 use errors::Applicability;
115 use util::nodemap::{NodeMap, HirIdMap, HirIdSet};
116
117 use std::collections::VecDeque;
118 use std::{fmt, u32};
119 use std::io::prelude::*;
120 use std::io;
121 use std::rc::Rc;
122 use syntax::ast::{self, NodeId};
123 use syntax::ptr::P;
124 use syntax::symbol::keywords;
125 use syntax_pos::Span;
126
127 use hir::{Expr, HirId};
128 use hir;
129 use hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap};
130
131 /// For use with `propagate_through_loop`.
132 enum LoopKind<'a> {
133 /// An endless `loop` loop.
134 LoopLoop,
135 /// A `while` loop, with the given expression as condition.
136 WhileLoop(&'a Expr),
137 }
138
139 #[derive(Copy, Clone, PartialEq)]
140 struct Variable(u32);
141
142 #[derive(Copy, Clone, PartialEq)]
143 struct LiveNode(u32);
144
145 impl Variable {
146 fn get(&self) -> usize { self.0 as usize }
147 }
148
149 impl LiveNode {
150 fn get(&self) -> usize { self.0 as usize }
151 }
152
153 #[derive(Copy, Clone, PartialEq, Debug)]
154 enum LiveNodeKind {
155 FreeVarNode(Span),
156 ExprNode(Span),
157 VarDefNode(Span),
158 ExitNode
159 }
160
161 fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_, '_, '_>) -> String {
162 let cm = tcx.sess.source_map();
163 match lnk {
164 FreeVarNode(s) => {
165 format!("Free var node [{}]", cm.span_to_string(s))
166 }
167 ExprNode(s) => {
168 format!("Expr node [{}]", cm.span_to_string(s))
169 }
170 VarDefNode(s) => {
171 format!("Var def node [{}]", cm.span_to_string(s))
172 }
173 ExitNode => "Exit node".to_owned(),
174 }
175 }
176
177 impl<'a, 'tcx> Visitor<'tcx> for IrMaps<'a, 'tcx> {
178 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
179 NestedVisitorMap::OnlyBodies(&self.tcx.hir)
180 }
181
182 fn visit_fn(&mut self, fk: FnKind<'tcx>, fd: &'tcx hir::FnDecl,
183 b: hir::BodyId, s: Span, id: NodeId) {
184 visit_fn(self, fk, fd, b, s, id);
185 }
186
187 fn visit_local(&mut self, l: &'tcx hir::Local) { visit_local(self, l); }
188 fn visit_expr(&mut self, ex: &'tcx Expr) { visit_expr(self, ex); }
189 fn visit_arm(&mut self, a: &'tcx hir::Arm) { visit_arm(self, a); }
190 }
191
192 pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
193 tcx.hir.krate().visit_all_item_likes(&mut IrMaps::new(tcx).as_deep_visitor());
194 tcx.sess.abort_if_errors();
195 }
196
197 impl fmt::Debug for LiveNode {
198 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
199 write!(f, "ln({})", self.get())
200 }
201 }
202
203 impl fmt::Debug for Variable {
204 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
205 write!(f, "v({})", self.get())
206 }
207 }
208
209 // ______________________________________________________________________
210 // Creating ir_maps
211 //
212 // This is the first pass and the one that drives the main
213 // computation. It walks up and down the IR once. On the way down,
214 // we count for each function the number of variables as well as
215 // liveness nodes. A liveness node is basically an expression or
216 // capture clause that does something of interest: either it has
217 // interesting control flow or it uses/defines a local variable.
218 //
219 // On the way back up, at each function node we create liveness sets
220 // (we now know precisely how big to make our various vectors and so
221 // forth) and then do the data-flow propagation to compute the set
222 // of live variables at each program point.
223 //
224 // Finally, we run back over the IR one last time and, using the
225 // computed liveness, check various safety conditions. For example,
226 // there must be no live nodes at the definition site for a variable
227 // unless it has an initializer. Similarly, each non-mutable local
228 // variable must not be assigned if there is some successor
229 // assignment. And so forth.
230
231 impl LiveNode {
232 fn is_valid(&self) -> bool {
233 self.0 != u32::MAX
234 }
235 }
236
237 fn invalid_node() -> LiveNode { LiveNode(u32::MAX) }
238
239 struct CaptureInfo {
240 ln: LiveNode,
241 var_hid: HirId
242 }
243
244 #[derive(Copy, Clone, Debug)]
245 struct LocalInfo {
246 id: HirId,
247 name: ast::Name,
248 is_shorthand: bool,
249 }
250
251 #[derive(Copy, Clone, Debug)]
252 enum VarKind {
253 Arg(HirId, ast::Name),
254 Local(LocalInfo),
255 CleanExit
256 }
257
258 struct IrMaps<'a, 'tcx: 'a> {
259 tcx: TyCtxt<'a, 'tcx, 'tcx>,
260 num_live_nodes: usize,
261 num_vars: usize,
262 live_node_map: HirIdMap<LiveNode>,
263 variable_map: HirIdMap<Variable>,
264 capture_info_map: NodeMap<Rc<Vec<CaptureInfo>>>,
265 var_kinds: Vec<VarKind>,
266 lnks: Vec<LiveNodeKind>,
267 }
268
269 impl<'a, 'tcx> IrMaps<'a, 'tcx> {
270 fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> IrMaps<'a, 'tcx> {
271 IrMaps {
272 tcx,
273 num_live_nodes: 0,
274 num_vars: 0,
275 live_node_map: HirIdMap(),
276 variable_map: HirIdMap(),
277 capture_info_map: NodeMap(),
278 var_kinds: Vec::new(),
279 lnks: Vec::new(),
280 }
281 }
282
283 fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
284 let ln = LiveNode(self.num_live_nodes as u32);
285 self.lnks.push(lnk);
286 self.num_live_nodes += 1;
287
288 debug!("{:?} is of kind {}", ln,
289 live_node_kind_to_string(lnk, self.tcx));
290
291 ln
292 }
293
294 fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
295 let ln = self.add_live_node(lnk);
296 self.live_node_map.insert(hir_id, ln);
297
298 debug!("{:?} is node {:?}", ln, hir_id);
299 }
300
301 fn add_variable(&mut self, vk: VarKind) -> Variable {
302 let v = Variable(self.num_vars as u32);
303 self.var_kinds.push(vk);
304 self.num_vars += 1;
305
306 match vk {
307 Local(LocalInfo { id: node_id, .. }) | Arg(node_id, _) => {
308 self.variable_map.insert(node_id, v);
309 },
310 CleanExit => {}
311 }
312
313 debug!("{:?} is {:?}", v, vk);
314
315 v
316 }
317
318 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
319 match self.variable_map.get(&hir_id) {
320 Some(&var) => var,
321 None => {
322 span_bug!(span, "no variable registered for id {:?}", hir_id);
323 }
324 }
325 }
326
327 fn variable_name(&self, var: Variable) -> String {
328 match self.var_kinds[var.get()] {
329 Local(LocalInfo { name, .. }) | Arg(_, name) => {
330 name.to_string()
331 },
332 CleanExit => "<clean-exit>".to_owned()
333 }
334 }
335
336 fn variable_is_shorthand(&self, var: Variable) -> bool {
337 match self.var_kinds[var.get()] {
338 Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
339 Arg(..) | CleanExit => false
340 }
341 }
342
343 fn set_captures(&mut self, node_id: NodeId, cs: Vec<CaptureInfo>) {
344 self.capture_info_map.insert(node_id, Rc::new(cs));
345 }
346
347 fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
348 self.lnks[ln.get()]
349 }
350 }
351
352 fn visit_fn<'a, 'tcx: 'a>(ir: &mut IrMaps<'a, 'tcx>,
353 fk: FnKind<'tcx>,
354 decl: &'tcx hir::FnDecl,
355 body_id: hir::BodyId,
356 sp: Span,
357 id: ast::NodeId) {
358 debug!("visit_fn");
359
360 // swap in a new set of IR maps for this function body:
361 let mut fn_maps = IrMaps::new(ir.tcx);
362
363 // Don't run unused pass for #[derive()]
364 if let FnKind::Method(..) = fk {
365 let parent = ir.tcx.hir.get_parent(id);
366 if let Some(Node::Item(i)) = ir.tcx.hir.find(parent) {
367 if i.attrs.iter().any(|a| a.check_name("automatically_derived")) {
368 return;
369 }
370 }
371 }
372
373 debug!("creating fn_maps: {:?}", &fn_maps as *const IrMaps<'_, '_>);
374
375 let body = ir.tcx.hir.body(body_id);
376
377 for arg in &body.arguments {
378 arg.pat.each_binding(|_bm, hir_id, _x, ident| {
379 debug!("adding argument {:?}", hir_id);
380 fn_maps.add_variable(Arg(hir_id, ident.name));
381 })
382 };
383
384 // gather up the various local variables, significant expressions,
385 // and so forth:
386 intravisit::walk_fn(&mut fn_maps, fk, decl, body_id, sp, id);
387
388 // compute liveness
389 let mut lsets = Liveness::new(&mut fn_maps, body_id);
390 let entry_ln = lsets.compute(&body.value);
391
392 // check for various error conditions
393 lsets.visit_body(body);
394 lsets.warn_about_unused_args(body, entry_ln);
395 }
396
397 fn add_from_pat<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, pat: &P<hir::Pat>) {
398 // For struct patterns, take note of which fields used shorthand
399 // (`x` rather than `x: x`).
400 let mut shorthand_field_ids = HirIdSet();
401 let mut pats = VecDeque::new();
402 pats.push_back(pat);
403 while let Some(pat) = pats.pop_front() {
404 use hir::PatKind::*;
405 match pat.node {
406 Binding(_, _, _, ref inner_pat) => {
407 pats.extend(inner_pat.iter());
408 }
409 Struct(_, ref fields, _) => {
410 for field in fields {
411 if field.node.is_shorthand {
412 shorthand_field_ids.insert(field.node.pat.hir_id);
413 }
414 }
415 }
416 Ref(ref inner_pat, _) |
417 Box(ref inner_pat) => {
418 pats.push_back(inner_pat);
419 }
420 TupleStruct(_, ref inner_pats, _) |
421 Tuple(ref inner_pats, _) => {
422 pats.extend(inner_pats.iter());
423 }
424 Slice(ref pre_pats, ref inner_pat, ref post_pats) => {
425 pats.extend(pre_pats.iter());
426 pats.extend(inner_pat.iter());
427 pats.extend(post_pats.iter());
428 }
429 _ => {}
430 }
431 }
432
433 pat.each_binding(|_bm, hir_id, _sp, ident| {
434 ir.add_live_node_for_node(hir_id, VarDefNode(ident.span));
435 ir.add_variable(Local(LocalInfo {
436 id: hir_id,
437 name: ident.name,
438 is_shorthand: shorthand_field_ids.contains(&hir_id)
439 }));
440 });
441 }
442
443 fn visit_local<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, local: &'tcx hir::Local) {
444 add_from_pat(ir, &local.pat);
445 intravisit::walk_local(ir, local);
446 }
447
448 fn visit_arm<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, arm: &'tcx hir::Arm) {
449 for pat in &arm.pats {
450 add_from_pat(ir, pat);
451 }
452 intravisit::walk_arm(ir, arm);
453 }
454
455 fn visit_expr<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, expr: &'tcx Expr) {
456 match expr.node {
457 // live nodes required for uses or definitions of variables:
458 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
459 debug!("expr {}: path that leads to {:?}", expr.id, path.def);
460 if let Def::Local(..) = path.def {
461 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
462 }
463 intravisit::walk_expr(ir, expr);
464 }
465 hir::ExprKind::Closure(..) => {
466 // Interesting control flow (for loops can contain labeled
467 // breaks or continues)
468 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
469
470 // Make a live_node for each captured variable, with the span
471 // being the location that the variable is used. This results
472 // in better error messages than just pointing at the closure
473 // construction site.
474 let mut call_caps = Vec::new();
475 ir.tcx.with_freevars(expr.id, |freevars| {
476 call_caps.extend(freevars.iter().filter_map(|fv| {
477 if let Def::Local(rv) = fv.def {
478 let fv_ln = ir.add_live_node(FreeVarNode(fv.span));
479 let var_hid = ir.tcx.hir.node_to_hir_id(rv);
480 Some(CaptureInfo { ln: fv_ln, var_hid })
481 } else {
482 None
483 }
484 }));
485 });
486 ir.set_captures(expr.id, call_caps);
487
488 intravisit::walk_expr(ir, expr);
489 }
490
491 // live nodes required for interesting control flow:
492 hir::ExprKind::If(..) |
493 hir::ExprKind::Match(..) |
494 hir::ExprKind::While(..) |
495 hir::ExprKind::Loop(..) => {
496 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
497 intravisit::walk_expr(ir, expr);
498 }
499 hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
500 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
501 intravisit::walk_expr(ir, expr);
502 }
503
504 // otherwise, live nodes are not required:
505 hir::ExprKind::Index(..) |
506 hir::ExprKind::Field(..) |
507 hir::ExprKind::Array(..) |
508 hir::ExprKind::Call(..) |
509 hir::ExprKind::MethodCall(..) |
510 hir::ExprKind::Tup(..) |
511 hir::ExprKind::Binary(..) |
512 hir::ExprKind::AddrOf(..) |
513 hir::ExprKind::Cast(..) |
514 hir::ExprKind::Unary(..) |
515 hir::ExprKind::Break(..) |
516 hir::ExprKind::Continue(_) |
517 hir::ExprKind::Lit(_) |
518 hir::ExprKind::Ret(..) |
519 hir::ExprKind::Block(..) |
520 hir::ExprKind::Assign(..) |
521 hir::ExprKind::AssignOp(..) |
522 hir::ExprKind::Struct(..) |
523 hir::ExprKind::Repeat(..) |
524 hir::ExprKind::InlineAsm(..) |
525 hir::ExprKind::Box(..) |
526 hir::ExprKind::Yield(..) |
527 hir::ExprKind::Type(..) |
528 hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => {
529 intravisit::walk_expr(ir, expr);
530 }
531 }
532 }
533
534 // ______________________________________________________________________
535 // Computing liveness sets
536 //
537 // Actually we compute just a bit more than just liveness, but we use
538 // the same basic propagation framework in all cases.
539
540 #[derive(Clone, Copy)]
541 struct RWU {
542 reader: LiveNode,
543 writer: LiveNode,
544 used: bool
545 }
546
547 /// Conceptually, this is like a `Vec<RWU>`. But the number of `RWU`s can get
548 /// very large, so it uses a more compact representation that takes advantage
549 /// of the fact that when the number of `RWU`s is large, most of them have an
550 /// invalid reader and an invalid writer.
551 struct RWUTable {
552 /// Each entry in `packed_rwus` is either INV_INV_FALSE, INV_INV_TRUE, or
553 /// an index into `unpacked_rwus`. In the common cases, this compacts the
554 /// 65 bits of data into 32; in the uncommon cases, it expands the 65 bits
555 /// in 96.
556 ///
557 /// More compact representations are possible -- e.g. use only 2 bits per
558 /// packed `RWU` and make the secondary table a HashMap that maps from
559 /// indices to `RWU`s -- but this one strikes a good balance between size
560 /// and speed.
561 packed_rwus: Vec<u32>,
562 unpacked_rwus: Vec<RWU>,
563 }
564
565 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: false }`.
566 const INV_INV_FALSE: u32 = u32::MAX;
567
568 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: true }`.
569 const INV_INV_TRUE: u32 = u32::MAX - 1;
570
571 impl RWUTable {
572 fn new(num_rwus: usize) -> RWUTable {
573 Self {
574 packed_rwus: vec![INV_INV_FALSE; num_rwus],
575 unpacked_rwus: vec![],
576 }
577 }
578
579 fn get(&self, idx: usize) -> RWU {
580 let packed_rwu = self.packed_rwus[idx];
581 match packed_rwu {
582 INV_INV_FALSE => RWU { reader: invalid_node(), writer: invalid_node(), used: false },
583 INV_INV_TRUE => RWU { reader: invalid_node(), writer: invalid_node(), used: true },
584 _ => self.unpacked_rwus[packed_rwu as usize],
585 }
586 }
587
588 fn get_reader(&self, idx: usize) -> LiveNode {
589 let packed_rwu = self.packed_rwus[idx];
590 match packed_rwu {
591 INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
592 _ => self.unpacked_rwus[packed_rwu as usize].reader,
593 }
594 }
595
596 fn get_writer(&self, idx: usize) -> LiveNode {
597 let packed_rwu = self.packed_rwus[idx];
598 match packed_rwu {
599 INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
600 _ => self.unpacked_rwus[packed_rwu as usize].writer,
601 }
602 }
603
604 fn get_used(&self, idx: usize) -> bool {
605 let packed_rwu = self.packed_rwus[idx];
606 match packed_rwu {
607 INV_INV_FALSE => false,
608 INV_INV_TRUE => true,
609 _ => self.unpacked_rwus[packed_rwu as usize].used,
610 }
611 }
612
613 #[inline]
614 fn copy_packed(&mut self, dst_idx: usize, src_idx: usize) {
615 self.packed_rwus[dst_idx] = self.packed_rwus[src_idx];
616 }
617
618 fn assign_unpacked(&mut self, idx: usize, rwu: RWU) {
619 if rwu.reader == invalid_node() && rwu.writer == invalid_node() {
620 // When we overwrite an indexing entry in `self.packed_rwus` with
621 // `INV_INV_{TRUE,FALSE}` we don't remove the corresponding entry
622 // from `self.unpacked_rwus`; it's not worth the effort, and we
623 // can't have entries shifting around anyway.
624 self.packed_rwus[idx] = if rwu.used {
625 INV_INV_TRUE
626 } else {
627 INV_INV_FALSE
628 }
629 } else {
630 // Add a new RWU to `unpacked_rwus` and make `packed_rwus[idx]`
631 // point to it.
632 self.packed_rwus[idx] = self.unpacked_rwus.len() as u32;
633 self.unpacked_rwus.push(rwu);
634 }
635 }
636
637 fn assign_inv_inv(&mut self, idx: usize) {
638 self.packed_rwus[idx] = if self.get_used(idx) {
639 INV_INV_TRUE
640 } else {
641 INV_INV_FALSE
642 };
643 }
644 }
645
646 #[derive(Copy, Clone)]
647 struct Specials {
648 exit_ln: LiveNode,
649 fallthrough_ln: LiveNode,
650 clean_exit_var: Variable
651 }
652
653 const ACC_READ: u32 = 1;
654 const ACC_WRITE: u32 = 2;
655 const ACC_USE: u32 = 4;
656
657 struct Liveness<'a, 'tcx: 'a> {
658 ir: &'a mut IrMaps<'a, 'tcx>,
659 tables: &'a ty::TypeckTables<'tcx>,
660 s: Specials,
661 successors: Vec<LiveNode>,
662 rwu_table: RWUTable,
663
664 // mappings from loop node ID to LiveNode
665 // ("break" label should map to loop node ID,
666 // it probably doesn't now)
667 break_ln: NodeMap<LiveNode>,
668 cont_ln: NodeMap<LiveNode>,
669 }
670
671 impl<'a, 'tcx> Liveness<'a, 'tcx> {
672 fn new(ir: &'a mut IrMaps<'a, 'tcx>, body: hir::BodyId) -> Liveness<'a, 'tcx> {
673 // Special nodes and variables:
674 // - exit_ln represents the end of the fn, either by return or panic
675 // - implicit_ret_var is a pseudo-variable that represents
676 // an implicit return
677 let specials = Specials {
678 exit_ln: ir.add_live_node(ExitNode),
679 fallthrough_ln: ir.add_live_node(ExitNode),
680 clean_exit_var: ir.add_variable(CleanExit)
681 };
682
683 let tables = ir.tcx.body_tables(body);
684
685 let num_live_nodes = ir.num_live_nodes;
686 let num_vars = ir.num_vars;
687
688 Liveness {
689 ir,
690 tables,
691 s: specials,
692 successors: vec![invalid_node(); num_live_nodes],
693 rwu_table: RWUTable::new(num_live_nodes * num_vars),
694 break_ln: NodeMap(),
695 cont_ln: NodeMap(),
696 }
697 }
698
699 fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
700 match self.ir.live_node_map.get(&hir_id) {
701 Some(&ln) => ln,
702 None => {
703 // This must be a mismatch between the ir_map construction
704 // above and the propagation code below; the two sets of
705 // code have to agree about which AST nodes are worth
706 // creating liveness nodes for.
707 span_bug!(
708 span,
709 "no live node registered for node {:?}",
710 hir_id);
711 }
712 }
713 }
714
715 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
716 self.ir.variable(hir_id, span)
717 }
718
719 fn pat_bindings<F>(&mut self, pat: &hir::Pat, mut f: F) where
720 F: FnMut(&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, HirId),
721 {
722 pat.each_binding(|_bm, hir_id, sp, n| {
723 let ln = self.live_node(hir_id, sp);
724 let var = self.variable(hir_id, n.span);
725 f(self, ln, var, n.span, hir_id);
726 })
727 }
728
729 fn arm_pats_bindings<F>(&mut self, pat: Option<&hir::Pat>, f: F) where
730 F: FnMut(&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, HirId),
731 {
732 if let Some(pat) = pat {
733 self.pat_bindings(pat, f);
734 }
735 }
736
737 fn define_bindings_in_pat(&mut self, pat: &hir::Pat, succ: LiveNode)
738 -> LiveNode {
739 self.define_bindings_in_arm_pats(Some(pat), succ)
740 }
741
742 fn define_bindings_in_arm_pats(&mut self, pat: Option<&hir::Pat>, succ: LiveNode)
743 -> LiveNode {
744 let mut succ = succ;
745 self.arm_pats_bindings(pat, |this, ln, var, _sp, _id| {
746 this.init_from_succ(ln, succ);
747 this.define(ln, var);
748 succ = ln;
749 });
750 succ
751 }
752
753 fn idx(&self, ln: LiveNode, var: Variable) -> usize {
754 ln.get() * self.ir.num_vars + var.get()
755 }
756
757 fn live_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
758 assert!(ln.is_valid());
759 let reader = self.rwu_table.get_reader(self.idx(ln, var));
760 if reader.is_valid() { Some(self.ir.lnk(reader)) } else { None }
761 }
762
763 // Is this variable live on entry to any of its successor nodes?
764 fn live_on_exit(&self, ln: LiveNode, var: Variable)
765 -> Option<LiveNodeKind> {
766 let successor = self.successors[ln.get()];
767 self.live_on_entry(successor, var)
768 }
769
770 fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
771 assert!(ln.is_valid());
772 self.rwu_table.get_used(self.idx(ln, var))
773 }
774
775 fn assigned_on_entry(&self, ln: LiveNode, var: Variable)
776 -> Option<LiveNodeKind> {
777 assert!(ln.is_valid());
778 let writer = self.rwu_table.get_writer(self.idx(ln, var));
779 if writer.is_valid() { Some(self.ir.lnk(writer)) } else { None }
780 }
781
782 fn assigned_on_exit(&self, ln: LiveNode, var: Variable)
783 -> Option<LiveNodeKind> {
784 let successor = self.successors[ln.get()];
785 self.assigned_on_entry(successor, var)
786 }
787
788 fn indices2<F>(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F) where
789 F: FnMut(&mut Liveness<'a, 'tcx>, usize, usize),
790 {
791 let node_base_idx = self.idx(ln, Variable(0));
792 let succ_base_idx = self.idx(succ_ln, Variable(0));
793 for var_idx in 0..self.ir.num_vars {
794 op(self, node_base_idx + var_idx, succ_base_idx + var_idx);
795 }
796 }
797
798 fn write_vars<F>(&self,
799 wr: &mut dyn Write,
800 ln: LiveNode,
801 mut test: F)
802 -> io::Result<()> where
803 F: FnMut(usize) -> LiveNode,
804 {
805 let node_base_idx = self.idx(ln, Variable(0));
806 for var_idx in 0..self.ir.num_vars {
807 let idx = node_base_idx + var_idx;
808 if test(idx).is_valid() {
809 write!(wr, " {:?}", Variable(var_idx as u32))?;
810 }
811 }
812 Ok(())
813 }
814
815
816 #[allow(unused_must_use)]
817 fn ln_str(&self, ln: LiveNode) -> String {
818 let mut wr = Vec::new();
819 {
820 let wr = &mut wr as &mut dyn Write;
821 write!(wr, "[ln({:?}) of kind {:?} reads", ln.get(), self.ir.lnk(ln));
822 self.write_vars(wr, ln, |idx| self.rwu_table.get_reader(idx));
823 write!(wr, " writes");
824 self.write_vars(wr, ln, |idx| self.rwu_table.get_writer(idx));
825 write!(wr, " precedes {:?}]", self.successors[ln.get()]);
826 }
827 String::from_utf8(wr).unwrap()
828 }
829
830 fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
831 self.successors[ln.get()] = succ_ln;
832
833 // It is not necessary to initialize the RWUs here because they are all
834 // set to INV_INV_FALSE when they are created, and the sets only grow
835 // during iterations.
836 }
837
838 fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
839 // more efficient version of init_empty() / merge_from_succ()
840 self.successors[ln.get()] = succ_ln;
841
842 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
843 this.rwu_table.copy_packed(idx, succ_idx);
844 });
845 debug!("init_from_succ(ln={}, succ={})",
846 self.ln_str(ln), self.ln_str(succ_ln));
847 }
848
849 fn merge_from_succ(&mut self,
850 ln: LiveNode,
851 succ_ln: LiveNode,
852 first_merge: bool)
853 -> bool {
854 if ln == succ_ln { return false; }
855
856 let mut changed = false;
857 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
858 let mut rwu = this.rwu_table.get(idx);
859 let succ_rwu = this.rwu_table.get(succ_idx);
860 if succ_rwu.reader.is_valid() && !rwu.reader.is_valid() {
861 rwu.reader = succ_rwu.reader;
862 changed = true
863 }
864
865 if succ_rwu.writer.is_valid() && !rwu.writer.is_valid() {
866 rwu.writer = succ_rwu.writer;
867 changed = true
868 }
869
870 if succ_rwu.used && !rwu.used {
871 rwu.used = true;
872 changed = true;
873 }
874
875 if changed {
876 this.rwu_table.assign_unpacked(idx, rwu);
877 }
878 });
879
880 debug!("merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})",
881 ln, self.ln_str(succ_ln), first_merge, changed);
882 return changed;
883 }
884
885 // Indicates that a local variable was *defined*; we know that no
886 // uses of the variable can precede the definition (resolve checks
887 // this) so we just clear out all the data.
888 fn define(&mut self, writer: LiveNode, var: Variable) {
889 let idx = self.idx(writer, var);
890 self.rwu_table.assign_inv_inv(idx);
891
892 debug!("{:?} defines {:?} (idx={}): {}", writer, var,
893 idx, self.ln_str(writer));
894 }
895
896 // Either read, write, or both depending on the acc bitset
897 fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
898 debug!("{:?} accesses[{:x}] {:?}: {}",
899 ln, acc, var, self.ln_str(ln));
900
901 let idx = self.idx(ln, var);
902 let mut rwu = self.rwu_table.get(idx);
903
904 if (acc & ACC_WRITE) != 0 {
905 rwu.reader = invalid_node();
906 rwu.writer = ln;
907 }
908
909 // Important: if we both read/write, must do read second
910 // or else the write will override.
911 if (acc & ACC_READ) != 0 {
912 rwu.reader = ln;
913 }
914
915 if (acc & ACC_USE) != 0 {
916 rwu.used = true;
917 }
918
919 self.rwu_table.assign_unpacked(idx, rwu);
920 }
921
922 fn compute(&mut self, body: &hir::Expr) -> LiveNode {
923 // if there is a `break` or `again` at the top level, then it's
924 // effectively a return---this only occurs in `for` loops,
925 // where the body is really a closure.
926
927 debug!("compute: using id for body, {}", self.ir.tcx.hir.node_to_pretty_string(body.id));
928
929 let exit_ln = self.s.exit_ln;
930
931 self.break_ln.insert(body.id, exit_ln);
932 self.cont_ln.insert(body.id, exit_ln);
933
934 // the fallthrough exit is only for those cases where we do not
935 // explicitly return:
936 let s = self.s;
937 self.init_from_succ(s.fallthrough_ln, s.exit_ln);
938 self.acc(s.fallthrough_ln, s.clean_exit_var, ACC_READ);
939
940 let entry_ln = self.propagate_through_expr(body, s.fallthrough_ln);
941
942 // hack to skip the loop unless debug! is enabled:
943 debug!("^^ liveness computation results for body {} (entry={:?})", {
944 for ln_idx in 0..self.ir.num_live_nodes {
945 debug!("{:?}", self.ln_str(LiveNode(ln_idx as u32)));
946 }
947 body.id
948 },
949 entry_ln);
950
951 entry_ln
952 }
953
954 fn propagate_through_block(&mut self, blk: &hir::Block, succ: LiveNode)
955 -> LiveNode {
956 if blk.targeted_by_break {
957 self.break_ln.insert(blk.id, succ);
958 }
959 let succ = self.propagate_through_opt_expr(blk.expr.as_ref().map(|e| &**e), succ);
960 blk.stmts.iter().rev().fold(succ, |succ, stmt| {
961 self.propagate_through_stmt(stmt, succ)
962 })
963 }
964
965 fn propagate_through_stmt(&mut self, stmt: &hir::Stmt, succ: LiveNode)
966 -> LiveNode {
967 match stmt.node {
968 hir::StmtKind::Decl(ref decl, _) => {
969 self.propagate_through_decl(&decl, succ)
970 }
971
972 hir::StmtKind::Expr(ref expr, _) | hir::StmtKind::Semi(ref expr, _) => {
973 self.propagate_through_expr(&expr, succ)
974 }
975 }
976 }
977
978 fn propagate_through_decl(&mut self, decl: &hir::Decl, succ: LiveNode)
979 -> LiveNode {
980 match decl.node {
981 hir::DeclKind::Local(ref local) => {
982 self.propagate_through_local(&local, succ)
983 }
984 hir::DeclKind::Item(_) => succ,
985 }
986 }
987
988 fn propagate_through_local(&mut self, local: &hir::Local, succ: LiveNode)
989 -> LiveNode {
990 // Note: we mark the variable as defined regardless of whether
991 // there is an initializer. Initially I had thought to only mark
992 // the live variable as defined if it was initialized, and then we
993 // could check for uninit variables just by scanning what is live
994 // at the start of the function. But that doesn't work so well for
995 // immutable variables defined in a loop:
996 // loop { let x; x = 5; }
997 // because the "assignment" loops back around and generates an error.
998 //
999 // So now we just check that variables defined w/o an
1000 // initializer are not live at the point of their
1001 // initialization, which is mildly more complex than checking
1002 // once at the func header but otherwise equivalent.
1003
1004 let succ = self.propagate_through_opt_expr(local.init.as_ref().map(|e| &**e), succ);
1005 self.define_bindings_in_pat(&local.pat, succ)
1006 }
1007
1008 fn propagate_through_exprs(&mut self, exprs: &[Expr], succ: LiveNode)
1009 -> LiveNode {
1010 exprs.iter().rev().fold(succ, |succ, expr| {
1011 self.propagate_through_expr(&expr, succ)
1012 })
1013 }
1014
1015 fn propagate_through_opt_expr(&mut self,
1016 opt_expr: Option<&Expr>,
1017 succ: LiveNode)
1018 -> LiveNode {
1019 opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
1020 }
1021
1022 fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode)
1023 -> LiveNode {
1024 debug!("propagate_through_expr: {}", self.ir.tcx.hir.node_to_pretty_string(expr.id));
1025
1026 match expr.node {
1027 // Interesting cases with control flow or which gen/kill
1028 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1029 self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
1030 }
1031
1032 hir::ExprKind::Field(ref e, _) => {
1033 self.propagate_through_expr(&e, succ)
1034 }
1035
1036 hir::ExprKind::Closure(.., blk_id, _, _) => {
1037 debug!("{} is an ExprKind::Closure",
1038 self.ir.tcx.hir.node_to_pretty_string(expr.id));
1039
1040 // The next-node for a break is the successor of the entire
1041 // loop. The next-node for a continue is the top of this loop.
1042 let node = self.live_node(expr.hir_id, expr.span);
1043
1044 let break_ln = succ;
1045 let cont_ln = node;
1046 self.break_ln.insert(blk_id.node_id, break_ln);
1047 self.cont_ln.insert(blk_id.node_id, cont_ln);
1048
1049 // the construction of a closure itself is not important,
1050 // but we have to consider the closed over variables.
1051 let caps = self.ir.capture_info_map.get(&expr.id).cloned().unwrap_or_else(||
1052 span_bug!(expr.span, "no registered caps"));
1053
1054 caps.iter().rev().fold(succ, |succ, cap| {
1055 self.init_from_succ(cap.ln, succ);
1056 let var = self.variable(cap.var_hid, expr.span);
1057 self.acc(cap.ln, var, ACC_READ | ACC_USE);
1058 cap.ln
1059 })
1060 }
1061
1062 hir::ExprKind::If(ref cond, ref then, ref els) => {
1063 //
1064 // (cond)
1065 // |
1066 // v
1067 // (expr)
1068 // / \
1069 // | |
1070 // v v
1071 // (then)(els)
1072 // | |
1073 // v v
1074 // ( succ )
1075 //
1076 let else_ln = self.propagate_through_opt_expr(els.as_ref().map(|e| &**e), succ);
1077 let then_ln = self.propagate_through_expr(&then, succ);
1078 let ln = self.live_node(expr.hir_id, expr.span);
1079 self.init_from_succ(ln, else_ln);
1080 self.merge_from_succ(ln, then_ln, false);
1081 self.propagate_through_expr(&cond, ln)
1082 }
1083
1084 hir::ExprKind::While(ref cond, ref blk, _) => {
1085 self.propagate_through_loop(expr, WhileLoop(&cond), &blk, succ)
1086 }
1087
1088 // Note that labels have been resolved, so we don't need to look
1089 // at the label ident
1090 hir::ExprKind::Loop(ref blk, _, _) => {
1091 self.propagate_through_loop(expr, LoopLoop, &blk, succ)
1092 }
1093
1094 hir::ExprKind::Match(ref e, ref arms, _) => {
1095 //
1096 // (e)
1097 // |
1098 // v
1099 // (expr)
1100 // / | \
1101 // | | |
1102 // v v v
1103 // (..arms..)
1104 // | | |
1105 // v v v
1106 // ( succ )
1107 //
1108 //
1109 let ln = self.live_node(expr.hir_id, expr.span);
1110 self.init_empty(ln, succ);
1111 let mut first_merge = true;
1112 for arm in arms {
1113 let body_succ = self.propagate_through_expr(&arm.body, succ);
1114
1115 let guard_succ = self.propagate_through_opt_expr(
1116 arm.guard.as_ref().map(|hir::Guard::If(e)| &**e),
1117 body_succ
1118 );
1119 // only consider the first pattern; any later patterns must have
1120 // the same bindings, and we also consider the first pattern to be
1121 // the "authoritative" set of ids
1122 let arm_succ =
1123 self.define_bindings_in_arm_pats(arm.pats.first().map(|p| &**p),
1124 guard_succ);
1125 self.merge_from_succ(ln, arm_succ, first_merge);
1126 first_merge = false;
1127 };
1128 self.propagate_through_expr(&e, ln)
1129 }
1130
1131 hir::ExprKind::Ret(ref o_e) => {
1132 // ignore succ and subst exit_ln:
1133 let exit_ln = self.s.exit_ln;
1134 self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), exit_ln)
1135 }
1136
1137 hir::ExprKind::Break(label, ref opt_expr) => {
1138 // Find which label this break jumps to
1139 let target = match label.target_id {
1140 Ok(node_id) => self.break_ln.get(&node_id),
1141 Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
1142 }.cloned();
1143
1144 // Now that we know the label we're going to,
1145 // look it up in the break loop nodes table
1146
1147 match target {
1148 Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
1149 None => span_bug!(expr.span, "break to unknown label")
1150 }
1151 }
1152
1153 hir::ExprKind::Continue(label) => {
1154 // Find which label this expr continues to
1155 let sc = label.target_id.unwrap_or_else(|err|
1156 span_bug!(expr.span, "loop scope error: {}", err));
1157
1158 // Now that we know the label we're going to,
1159 // look it up in the continue loop nodes table
1160 self.cont_ln.get(&sc).cloned().unwrap_or_else(||
1161 span_bug!(expr.span, "continue to unknown label"))
1162 }
1163
1164 hir::ExprKind::Assign(ref l, ref r) => {
1165 // see comment on places in
1166 // propagate_through_place_components()
1167 let succ = self.write_place(&l, succ, ACC_WRITE);
1168 let succ = self.propagate_through_place_components(&l, succ);
1169 self.propagate_through_expr(&r, succ)
1170 }
1171
1172 hir::ExprKind::AssignOp(_, ref l, ref r) => {
1173 // an overloaded assign op is like a method call
1174 if self.tables.is_method_call(expr) {
1175 let succ = self.propagate_through_expr(&l, succ);
1176 self.propagate_through_expr(&r, succ)
1177 } else {
1178 // see comment on places in
1179 // propagate_through_place_components()
1180 let succ = self.write_place(&l, succ, ACC_WRITE|ACC_READ);
1181 let succ = self.propagate_through_expr(&r, succ);
1182 self.propagate_through_place_components(&l, succ)
1183 }
1184 }
1185
1186 // Uninteresting cases: just propagate in rev exec order
1187
1188 hir::ExprKind::Array(ref exprs) => {
1189 self.propagate_through_exprs(exprs, succ)
1190 }
1191
1192 hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
1193 let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
1194 fields.iter().rev().fold(succ, |succ, field| {
1195 self.propagate_through_expr(&field.expr, succ)
1196 })
1197 }
1198
1199 hir::ExprKind::Call(ref f, ref args) => {
1200 // FIXME(canndrew): This is_never should really be an is_uninhabited
1201 let succ = if self.tables.expr_ty(expr).is_never() {
1202 self.s.exit_ln
1203 } else {
1204 succ
1205 };
1206 let succ = self.propagate_through_exprs(args, succ);
1207 self.propagate_through_expr(&f, succ)
1208 }
1209
1210 hir::ExprKind::MethodCall(.., ref args) => {
1211 // FIXME(canndrew): This is_never should really be an is_uninhabited
1212 let succ = if self.tables.expr_ty(expr).is_never() {
1213 self.s.exit_ln
1214 } else {
1215 succ
1216 };
1217
1218 self.propagate_through_exprs(args, succ)
1219 }
1220
1221 hir::ExprKind::Tup(ref exprs) => {
1222 self.propagate_through_exprs(exprs, succ)
1223 }
1224
1225 hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
1226 let r_succ = self.propagate_through_expr(&r, succ);
1227
1228 let ln = self.live_node(expr.hir_id, expr.span);
1229 self.init_from_succ(ln, succ);
1230 self.merge_from_succ(ln, r_succ, false);
1231
1232 self.propagate_through_expr(&l, ln)
1233 }
1234
1235 hir::ExprKind::Index(ref l, ref r) |
1236 hir::ExprKind::Binary(_, ref l, ref r) => {
1237 let r_succ = self.propagate_through_expr(&r, succ);
1238 self.propagate_through_expr(&l, r_succ)
1239 }
1240
1241 hir::ExprKind::Box(ref e) |
1242 hir::ExprKind::AddrOf(_, ref e) |
1243 hir::ExprKind::Cast(ref e, _) |
1244 hir::ExprKind::Type(ref e, _) |
1245 hir::ExprKind::Unary(_, ref e) |
1246 hir::ExprKind::Yield(ref e) |
1247 hir::ExprKind::Repeat(ref e, _) => {
1248 self.propagate_through_expr(&e, succ)
1249 }
1250
1251 hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => {
1252 let succ = ia.outputs.iter().zip(outputs).rev().fold(succ, |succ, (o, output)| {
1253 // see comment on places
1254 // in propagate_through_place_components()
1255 if o.is_indirect {
1256 self.propagate_through_expr(output, succ)
1257 } else {
1258 let acc = if o.is_rw { ACC_WRITE|ACC_READ } else { ACC_WRITE };
1259 let succ = self.write_place(output, succ, acc);
1260 self.propagate_through_place_components(output, succ)
1261 }});
1262
1263 // Inputs are executed first. Propagate last because of rev order
1264 self.propagate_through_exprs(inputs, succ)
1265 }
1266
1267 hir::ExprKind::Lit(..) | hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => {
1268 succ
1269 }
1270
1271 // Note that labels have been resolved, so we don't need to look
1272 // at the label ident
1273 hir::ExprKind::Block(ref blk, _) => {
1274 self.propagate_through_block(&blk, succ)
1275 }
1276 }
1277 }
1278
1279 fn propagate_through_place_components(&mut self,
1280 expr: &Expr,
1281 succ: LiveNode)
1282 -> LiveNode {
1283 // # Places
1284 //
1285 // In general, the full flow graph structure for an
1286 // assignment/move/etc can be handled in one of two ways,
1287 // depending on whether what is being assigned is a "tracked
1288 // value" or not. A tracked value is basically a local
1289 // variable or argument.
1290 //
1291 // The two kinds of graphs are:
1292 //
1293 // Tracked place Untracked place
1294 // ----------------------++-----------------------
1295 // ||
1296 // | || |
1297 // v || v
1298 // (rvalue) || (rvalue)
1299 // | || |
1300 // v || v
1301 // (write of place) || (place components)
1302 // | || |
1303 // v || v
1304 // (succ) || (succ)
1305 // ||
1306 // ----------------------++-----------------------
1307 //
1308 // I will cover the two cases in turn:
1309 //
1310 // # Tracked places
1311 //
1312 // A tracked place is a local variable/argument `x`. In
1313 // these cases, the link_node where the write occurs is linked
1314 // to node id of `x`. The `write_place()` routine generates
1315 // the contents of this node. There are no subcomponents to
1316 // consider.
1317 //
1318 // # Non-tracked places
1319 //
1320 // These are places like `x[5]` or `x.f`. In that case, we
1321 // basically ignore the value which is written to but generate
1322 // reads for the components---`x` in these two examples. The
1323 // components reads are generated by
1324 // `propagate_through_place_components()` (this fn).
1325 //
1326 // # Illegal places
1327 //
1328 // It is still possible to observe assignments to non-places;
1329 // these errors are detected in the later pass borrowck. We
1330 // just ignore such cases and treat them as reads.
1331
1332 match expr.node {
1333 hir::ExprKind::Path(_) => succ,
1334 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
1335 _ => self.propagate_through_expr(expr, succ)
1336 }
1337 }
1338
1339 // see comment on propagate_through_place()
1340 fn write_place(&mut self, expr: &Expr, succ: LiveNode, acc: u32) -> LiveNode {
1341 match expr.node {
1342 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1343 self.access_path(expr.hir_id, path, succ, acc)
1344 }
1345
1346 // We do not track other places, so just propagate through
1347 // to their subcomponents. Also, it may happen that
1348 // non-places occur here, because those are detected in the
1349 // later pass borrowck.
1350 _ => succ
1351 }
1352 }
1353
1354 fn access_var(&mut self, hir_id: HirId, nid: NodeId, succ: LiveNode, acc: u32, span: Span)
1355 -> LiveNode {
1356 let ln = self.live_node(hir_id, span);
1357 if acc != 0 {
1358 self.init_from_succ(ln, succ);
1359 let var_hid = self.ir.tcx.hir.node_to_hir_id(nid);
1360 let var = self.variable(var_hid, span);
1361 self.acc(ln, var, acc);
1362 }
1363 ln
1364 }
1365
1366 fn access_path(&mut self, hir_id: HirId, path: &hir::Path, succ: LiveNode, acc: u32)
1367 -> LiveNode {
1368 match path.def {
1369 Def::Local(nid) => {
1370 self.access_var(hir_id, nid, succ, acc, path.span)
1371 }
1372 _ => succ
1373 }
1374 }
1375
1376 fn propagate_through_loop(&mut self,
1377 expr: &Expr,
1378 kind: LoopKind<'_>,
1379 body: &hir::Block,
1380 succ: LiveNode)
1381 -> LiveNode {
1382 /*
1383
1384 We model control flow like this:
1385
1386 (cond) <--+
1387 | |
1388 v |
1389 +-- (expr) |
1390 | | |
1391 | v |
1392 | (body) ---+
1393 |
1394 |
1395 v
1396 (succ)
1397
1398 */
1399
1400
1401 // first iteration:
1402 let mut first_merge = true;
1403 let ln = self.live_node(expr.hir_id, expr.span);
1404 self.init_empty(ln, succ);
1405 match kind {
1406 LoopLoop => {}
1407 _ => {
1408 // If this is not a `loop` loop, then it's possible we bypass
1409 // the body altogether. Otherwise, the only way is via a `break`
1410 // in the loop body.
1411 self.merge_from_succ(ln, succ, first_merge);
1412 first_merge = false;
1413 }
1414 }
1415 debug!("propagate_through_loop: using id for loop body {} {}",
1416 expr.id, self.ir.tcx.hir.node_to_pretty_string(body.id));
1417
1418 let break_ln = succ;
1419 let cont_ln = ln;
1420 self.break_ln.insert(expr.id, break_ln);
1421 self.cont_ln.insert(expr.id, cont_ln);
1422
1423 let cond_ln = match kind {
1424 LoopLoop => ln,
1425 WhileLoop(ref cond) => self.propagate_through_expr(&cond, ln),
1426 };
1427 let body_ln = self.propagate_through_block(body, cond_ln);
1428
1429 // repeat until fixed point is reached:
1430 while self.merge_from_succ(ln, body_ln, first_merge) {
1431 first_merge = false;
1432
1433 let new_cond_ln = match kind {
1434 LoopLoop => ln,
1435 WhileLoop(ref cond) => {
1436 self.propagate_through_expr(&cond, ln)
1437 }
1438 };
1439 assert_eq!(cond_ln, new_cond_ln);
1440 assert_eq!(body_ln, self.propagate_through_block(body, cond_ln));
1441 }
1442
1443 cond_ln
1444 }
1445 }
1446
1447 // _______________________________________________________________________
1448 // Checking for error conditions
1449
1450 impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
1451 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
1452 NestedVisitorMap::None
1453 }
1454
1455 fn visit_local(&mut self, l: &'tcx hir::Local) {
1456 check_local(self, l);
1457 }
1458 fn visit_expr(&mut self, ex: &'tcx Expr) {
1459 check_expr(self, ex);
1460 }
1461 fn visit_arm(&mut self, a: &'tcx hir::Arm) {
1462 check_arm(self, a);
1463 }
1464 }
1465
1466 fn check_local<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, local: &'tcx hir::Local) {
1467 match local.init {
1468 Some(_) => {
1469 this.warn_about_unused_or_dead_vars_in_pat(&local.pat);
1470 },
1471 None => {
1472 this.pat_bindings(&local.pat, |this, ln, var, sp, id| {
1473 let span = local.pat.simple_ident().map_or(sp, |ident| ident.span);
1474 this.warn_about_unused(span, id, ln, var);
1475 })
1476 }
1477 }
1478
1479 intravisit::walk_local(this, local);
1480 }
1481
1482 fn check_arm<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, arm: &'tcx hir::Arm) {
1483 // only consider the first pattern; any later patterns must have
1484 // the same bindings, and we also consider the first pattern to be
1485 // the "authoritative" set of ids
1486 this.arm_pats_bindings(arm.pats.first().map(|p| &**p), |this, ln, var, sp, id| {
1487 this.warn_about_unused(sp, id, ln, var);
1488 });
1489 intravisit::walk_arm(this, arm);
1490 }
1491
1492 fn check_expr<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, expr: &'tcx Expr) {
1493 match expr.node {
1494 hir::ExprKind::Assign(ref l, _) => {
1495 this.check_place(&l);
1496
1497 intravisit::walk_expr(this, expr);
1498 }
1499
1500 hir::ExprKind::AssignOp(_, ref l, _) => {
1501 if !this.tables.is_method_call(expr) {
1502 this.check_place(&l);
1503 }
1504
1505 intravisit::walk_expr(this, expr);
1506 }
1507
1508 hir::ExprKind::InlineAsm(ref ia, ref outputs, ref inputs) => {
1509 for input in inputs {
1510 this.visit_expr(input);
1511 }
1512
1513 // Output operands must be places
1514 for (o, output) in ia.outputs.iter().zip(outputs) {
1515 if !o.is_indirect {
1516 this.check_place(output);
1517 }
1518 this.visit_expr(output);
1519 }
1520
1521 intravisit::walk_expr(this, expr);
1522 }
1523
1524 // no correctness conditions related to liveness
1525 hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) | hir::ExprKind::If(..) |
1526 hir::ExprKind::Match(..) | hir::ExprKind::While(..) | hir::ExprKind::Loop(..) |
1527 hir::ExprKind::Index(..) | hir::ExprKind::Field(..) |
1528 hir::ExprKind::Array(..) | hir::ExprKind::Tup(..) | hir::ExprKind::Binary(..) |
1529 hir::ExprKind::Cast(..) | hir::ExprKind::Unary(..) | hir::ExprKind::Ret(..) |
1530 hir::ExprKind::Break(..) | hir::ExprKind::Continue(..) | hir::ExprKind::Lit(_) |
1531 hir::ExprKind::Block(..) | hir::ExprKind::AddrOf(..) |
1532 hir::ExprKind::Struct(..) | hir::ExprKind::Repeat(..) |
1533 hir::ExprKind::Closure(..) | hir::ExprKind::Path(_) | hir::ExprKind::Yield(..) |
1534 hir::ExprKind::Box(..) | hir::ExprKind::Type(..) => {
1535 intravisit::walk_expr(this, expr);
1536 }
1537 }
1538 }
1539
1540 impl<'a, 'tcx> Liveness<'a, 'tcx> {
1541 fn check_place(&mut self, expr: &'tcx Expr) {
1542 match expr.node {
1543 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1544 if let Def::Local(nid) = path.def {
1545 // Assignment to an immutable variable or argument: only legal
1546 // if there is no later assignment. If this local is actually
1547 // mutable, then check for a reassignment to flag the mutability
1548 // as being used.
1549 let ln = self.live_node(expr.hir_id, expr.span);
1550 let var_hid = self.ir.tcx.hir.node_to_hir_id(nid);
1551 let var = self.variable(var_hid, expr.span);
1552 self.warn_about_dead_assign(expr.span, expr.hir_id, ln, var);
1553 }
1554 }
1555 _ => {
1556 // For other kinds of places, no checks are required,
1557 // and any embedded expressions are actually rvalues
1558 intravisit::walk_expr(self, expr);
1559 }
1560 }
1561 }
1562
1563 fn should_warn(&self, var: Variable) -> Option<String> {
1564 let name = self.ir.variable_name(var);
1565 if name.is_empty() || name.as_bytes()[0] == b'_' {
1566 None
1567 } else {
1568 Some(name)
1569 }
1570 }
1571
1572 fn warn_about_unused_args(&self, body: &hir::Body, entry_ln: LiveNode) {
1573 for arg in &body.arguments {
1574 arg.pat.each_binding(|_bm, hir_id, _, ident| {
1575 let sp = ident.span;
1576 let var = self.variable(hir_id, sp);
1577 // Ignore unused self.
1578 if ident.name != keywords::SelfValue.name() {
1579 if !self.warn_about_unused(sp, hir_id, entry_ln, var) {
1580 if self.live_on_entry(entry_ln, var).is_none() {
1581 self.report_dead_assign(hir_id, sp, var, true);
1582 }
1583 }
1584 }
1585 })
1586 }
1587 }
1588
1589 fn warn_about_unused_or_dead_vars_in_pat(&mut self, pat: &hir::Pat) {
1590 self.pat_bindings(pat, |this, ln, var, sp, id| {
1591 if !this.warn_about_unused(sp, id, ln, var) {
1592 this.warn_about_dead_assign(sp, id, ln, var);
1593 }
1594 })
1595 }
1596
1597 fn warn_about_unused(&self,
1598 sp: Span,
1599 hir_id: HirId,
1600 ln: LiveNode,
1601 var: Variable)
1602 -> bool {
1603 if !self.used_on_entry(ln, var) {
1604 let r = self.should_warn(var);
1605 if let Some(name) = r {
1606 // annoying: for parameters in funcs like `fn(x: i32)
1607 // {ret}`, there is only one node, so asking about
1608 // assigned_on_exit() is not meaningful.
1609 let is_assigned = if ln == self.s.exit_ln {
1610 false
1611 } else {
1612 self.assigned_on_exit(ln, var).is_some()
1613 };
1614
1615 let suggest_underscore_msg = format!("consider using `_{}` instead", name);
1616
1617 if is_assigned {
1618 self.ir.tcx
1619 .lint_hir_note(lint::builtin::UNUSED_VARIABLES, hir_id, sp,
1620 &format!("variable `{}` is assigned to, but never used",
1621 name),
1622 &suggest_underscore_msg);
1623 } else if name != "self" {
1624 let msg = format!("unused variable: `{}`", name);
1625 let mut err = self.ir.tcx
1626 .struct_span_lint_hir(lint::builtin::UNUSED_VARIABLES, hir_id, sp, &msg);
1627 if self.ir.variable_is_shorthand(var) {
1628 err.span_suggestion_with_applicability(sp, "try ignoring the field",
1629 format!("{}: _", name),
1630 Applicability::MachineApplicable);
1631 } else {
1632 err.span_suggestion_short_with_applicability(
1633 sp, &suggest_underscore_msg,
1634 format!("_{}", name),
1635 Applicability::MachineApplicable,
1636 );
1637 }
1638 err.emit()
1639 }
1640 }
1641 true
1642 } else {
1643 false
1644 }
1645 }
1646
1647 fn warn_about_dead_assign(&self,
1648 sp: Span,
1649 hir_id: HirId,
1650 ln: LiveNode,
1651 var: Variable) {
1652 if self.live_on_exit(ln, var).is_none() {
1653 self.report_dead_assign(hir_id, sp, var, false);
1654 }
1655 }
1656
1657 fn report_dead_assign(&self, hir_id: HirId, sp: Span, var: Variable, is_argument: bool) {
1658 if let Some(name) = self.should_warn(var) {
1659 if is_argument {
1660 self.ir.tcx.lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, sp,
1661 &format!("value passed to `{}` is never read", name));
1662 } else {
1663 self.ir.tcx.lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, sp,
1664 &format!("value assigned to `{}` is never read", name));
1665 }
1666 }
1667 }
1668 }