]> git.proxmox.com Git - rustc.git/blob - src/librustc_passes/liveness.rs
New upstream version 1.41.1+dfsg1
[rustc.git] / src / librustc_passes / liveness.rs
1 //! A classic liveness analysis based on dataflow over the AST. Computes,
2 //! for each local variable in a function, whether that variable is live
3 //! at a given point. Program execution points are identified by their
4 //! IDs.
5 //!
6 //! # Basic idea
7 //!
8 //! The basic model is that each local variable is assigned an index. We
9 //! represent sets of local variables using a vector indexed by this
10 //! index. The value in the vector is either 0, indicating the variable
11 //! is dead, or the ID of an expression that uses the variable.
12 //!
13 //! We conceptually walk over the AST in reverse execution order. If we
14 //! find a use of a variable, we add it to the set of live variables. If
15 //! we find an assignment to a variable, we remove it from the set of live
16 //! variables. When we have to merge two flows, we take the union of
17 //! those two flows -- if the variable is live on both paths, we simply
18 //! pick one ID. In the event of loops, we continue doing this until a
19 //! fixed point is reached.
20 //!
21 //! ## Checking initialization
22 //!
23 //! At the function entry point, all variables must be dead. If this is
24 //! not the case, we can report an error using the ID found in the set of
25 //! live variables, which identifies a use of the variable which is not
26 //! dominated by an assignment.
27 //!
28 //! ## Checking moves
29 //!
30 //! After each explicit move, the variable must be dead.
31 //!
32 //! ## Computing last uses
33 //!
34 //! Any use of the variable where the variable is dead afterwards is a
35 //! last use.
36 //!
37 //! # Implementation details
38 //!
39 //! The actual implementation contains two (nested) walks over the AST.
40 //! The outer walk has the job of building up the ir_maps instance for the
41 //! enclosing function. On the way down the tree, it identifies those AST
42 //! nodes and variable IDs that will be needed for the liveness analysis
43 //! and assigns them contiguous IDs. The liveness ID for an AST node is
44 //! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
45 //! is called a `variable` (another newtype'd `u32`).
46 //!
47 //! On the way back up the tree, as we are about to exit from a function
48 //! declaration we allocate a `liveness` instance. Now that we know
49 //! precisely how many nodes and variables we need, we can allocate all
50 //! the various arrays that we will need to precisely the right size. We then
51 //! perform the actual propagation on the `liveness` instance.
52 //!
53 //! This propagation is encoded in the various `propagate_through_*()`
54 //! methods. It effectively does a reverse walk of the AST; whenever we
55 //! reach a loop node, we iterate until a fixed point is reached.
56 //!
57 //! ## The `RWU` struct
58 //!
59 //! At each live node `N`, we track three pieces of information for each
60 //! variable `V` (these are encapsulated in the `RWU` struct):
61 //!
62 //! - `reader`: the `LiveNode` ID of some node which will read the value
63 //! that `V` holds on entry to `N`. Formally: a node `M` such
64 //! that there exists a path `P` from `N` to `M` where `P` does not
65 //! write `V`. If the `reader` is `invalid_node()`, then the current
66 //! value will never be read (the variable is dead, essentially).
67 //!
68 //! - `writer`: the `LiveNode` ID of some node which will write the
69 //! variable `V` and which is reachable from `N`. Formally: a node `M`
70 //! such that there exists a path `P` from `N` to `M` and `M` writes
71 //! `V`. If the `writer` is `invalid_node()`, then there is no writer
72 //! of `V` that follows `N`.
73 //!
74 //! - `used`: a boolean value indicating whether `V` is *used*. We
75 //! distinguish a *read* from a *use* in that a *use* is some read that
76 //! is not just used to generate a new value. For example, `x += 1` is
77 //! a read but not a use. This is used to generate better warnings.
78 //!
79 //! ## Special Variables
80 //!
81 //! We generate various special variables for various, well, special purposes.
82 //! These are described in the `specials` struct:
83 //!
84 //! - `exit_ln`: a live node that is generated to represent every 'exit' from
85 //! the function, whether it be by explicit return, panic, or other means.
86 //!
87 //! - `fallthrough_ln`: a live node that represents a fallthrough
88 //!
89 //! - `clean_exit_var`: a synthetic variable that is only 'read' from the
90 //! fallthrough node. It is only live if the function could converge
91 //! via means other than an explicit `return` expression. That is, it is
92 //! only dead if the end of the function's block can never be reached.
93 //! It is the responsibility of typeck to ensure that there are no
94 //! `return` expressions in a function declared as diverging.
95
96 use self::LiveNodeKind::*;
97 use self::VarKind::*;
98
99 use rustc::hir;
100 use rustc::hir::{Expr, HirId};
101 use rustc::hir::def::*;
102 use rustc::hir::def_id::DefId;
103 use rustc::hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap};
104 use rustc::hir::Node;
105 use rustc::hir::ptr::P;
106 use rustc::ty::{self, TyCtxt};
107 use rustc::ty::query::Providers;
108 use rustc::lint;
109 use rustc::util::nodemap::{HirIdMap, HirIdSet};
110
111 use errors::Applicability;
112 use rustc_data_structures::fx::FxIndexMap;
113 use std::collections::VecDeque;
114 use std::{fmt, u32};
115 use std::io::prelude::*;
116 use std::io;
117 use std::rc::Rc;
118 use syntax::ast;
119 use syntax::symbol::sym;
120 use syntax_pos::Span;
121
122 #[derive(Copy, Clone, PartialEq)]
123 struct Variable(u32);
124
125 #[derive(Copy, Clone, PartialEq)]
126 struct LiveNode(u32);
127
128 impl Variable {
129 fn get(&self) -> usize { self.0 as usize }
130 }
131
132 impl LiveNode {
133 fn get(&self) -> usize { self.0 as usize }
134 }
135
136 #[derive(Copy, Clone, PartialEq, Debug)]
137 enum LiveNodeKind {
138 UpvarNode(Span),
139 ExprNode(Span),
140 VarDefNode(Span),
141 ExitNode
142 }
143
144 fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
145 let cm = tcx.sess.source_map();
146 match lnk {
147 UpvarNode(s) => {
148 format!("Upvar node [{}]", cm.span_to_string(s))
149 }
150 ExprNode(s) => {
151 format!("Expr node [{}]", cm.span_to_string(s))
152 }
153 VarDefNode(s) => {
154 format!("Var def node [{}]", cm.span_to_string(s))
155 }
156 ExitNode => "Exit node".to_owned(),
157 }
158 }
159
160 impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
161 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
162 NestedVisitorMap::OnlyBodies(&self.tcx.hir())
163 }
164
165 fn visit_fn(&mut self, fk: FnKind<'tcx>, fd: &'tcx hir::FnDecl,
166 b: hir::BodyId, s: Span, id: HirId) {
167 visit_fn(self, fk, fd, b, s, id);
168 }
169
170 fn visit_local(&mut self, l: &'tcx hir::Local) { visit_local(self, l); }
171 fn visit_expr(&mut self, ex: &'tcx Expr) { visit_expr(self, ex); }
172 fn visit_arm(&mut self, a: &'tcx hir::Arm) { visit_arm(self, a); }
173 }
174
175 fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: DefId) {
176 tcx.hir().visit_item_likes_in_module(
177 module_def_id,
178 &mut IrMaps::new(tcx, module_def_id).as_deep_visitor(),
179 );
180 }
181
182 pub fn provide(providers: &mut Providers<'_>) {
183 *providers = Providers {
184 check_mod_liveness,
185 ..*providers
186 };
187 }
188
189 impl fmt::Debug for LiveNode {
190 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
191 write!(f, "ln({})", self.get())
192 }
193 }
194
195 impl fmt::Debug for Variable {
196 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
197 write!(f, "v({})", self.get())
198 }
199 }
200
201 // ______________________________________________________________________
202 // Creating ir_maps
203 //
204 // This is the first pass and the one that drives the main
205 // computation. It walks up and down the IR once. On the way down,
206 // we count for each function the number of variables as well as
207 // liveness nodes. A liveness node is basically an expression or
208 // capture clause that does something of interest: either it has
209 // interesting control flow or it uses/defines a local variable.
210 //
211 // On the way back up, at each function node we create liveness sets
212 // (we now know precisely how big to make our various vectors and so
213 // forth) and then do the data-flow propagation to compute the set
214 // of live variables at each program point.
215 //
216 // Finally, we run back over the IR one last time and, using the
217 // computed liveness, check various safety conditions. For example,
218 // there must be no live nodes at the definition site for a variable
219 // unless it has an initializer. Similarly, each non-mutable local
220 // variable must not be assigned if there is some successor
221 // assignment. And so forth.
222
223 impl LiveNode {
224 fn is_valid(&self) -> bool {
225 self.0 != u32::MAX
226 }
227 }
228
229 fn invalid_node() -> LiveNode { LiveNode(u32::MAX) }
230
231 struct CaptureInfo {
232 ln: LiveNode,
233 var_hid: HirId
234 }
235
236 #[derive(Copy, Clone, Debug)]
237 struct LocalInfo {
238 id: HirId,
239 name: ast::Name,
240 is_shorthand: bool,
241 }
242
243 #[derive(Copy, Clone, Debug)]
244 enum VarKind {
245 Param(HirId, ast::Name),
246 Local(LocalInfo),
247 CleanExit
248 }
249
250 struct IrMaps<'tcx> {
251 tcx: TyCtxt<'tcx>,
252 body_owner: DefId,
253 num_live_nodes: usize,
254 num_vars: usize,
255 live_node_map: HirIdMap<LiveNode>,
256 variable_map: HirIdMap<Variable>,
257 capture_info_map: HirIdMap<Rc<Vec<CaptureInfo>>>,
258 var_kinds: Vec<VarKind>,
259 lnks: Vec<LiveNodeKind>,
260 }
261
262 impl IrMaps<'tcx> {
263 fn new(tcx: TyCtxt<'tcx>, body_owner: DefId) -> IrMaps<'tcx> {
264 IrMaps {
265 tcx,
266 body_owner,
267 num_live_nodes: 0,
268 num_vars: 0,
269 live_node_map: HirIdMap::default(),
270 variable_map: HirIdMap::default(),
271 capture_info_map: Default::default(),
272 var_kinds: Vec::new(),
273 lnks: Vec::new(),
274 }
275 }
276
277 fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
278 let ln = LiveNode(self.num_live_nodes as u32);
279 self.lnks.push(lnk);
280 self.num_live_nodes += 1;
281
282 debug!("{:?} is of kind {}", ln,
283 live_node_kind_to_string(lnk, self.tcx));
284
285 ln
286 }
287
288 fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
289 let ln = self.add_live_node(lnk);
290 self.live_node_map.insert(hir_id, ln);
291
292 debug!("{:?} is node {:?}", ln, hir_id);
293 }
294
295 fn add_variable(&mut self, vk: VarKind) -> Variable {
296 let v = Variable(self.num_vars as u32);
297 self.var_kinds.push(vk);
298 self.num_vars += 1;
299
300 match vk {
301 Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) => {
302 self.variable_map.insert(node_id, v);
303 },
304 CleanExit => {}
305 }
306
307 debug!("{:?} is {:?}", v, vk);
308
309 v
310 }
311
312 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
313 match self.variable_map.get(&hir_id) {
314 Some(&var) => var,
315 None => {
316 span_bug!(span, "no variable registered for id {:?}", hir_id);
317 }
318 }
319 }
320
321 fn variable_name(&self, var: Variable) -> String {
322 match self.var_kinds[var.get()] {
323 Local(LocalInfo { name, .. }) | Param(_, name) => {
324 name.to_string()
325 },
326 CleanExit => "<clean-exit>".to_owned()
327 }
328 }
329
330 fn variable_is_shorthand(&self, var: Variable) -> bool {
331 match self.var_kinds[var.get()] {
332 Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
333 Param(..) | CleanExit => false
334 }
335 }
336
337 fn set_captures(&mut self, hir_id: HirId, cs: Vec<CaptureInfo>) {
338 self.capture_info_map.insert(hir_id, Rc::new(cs));
339 }
340
341 fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
342 self.lnks[ln.get()]
343 }
344 }
345
346 fn visit_fn<'tcx>(
347 ir: &mut IrMaps<'tcx>,
348 fk: FnKind<'tcx>,
349 decl: &'tcx hir::FnDecl,
350 body_id: hir::BodyId,
351 sp: Span,
352 id: hir::HirId,
353 ) {
354 debug!("visit_fn");
355
356 // swap in a new set of IR maps for this function body:
357 let def_id = ir.tcx.hir().local_def_id(id);
358 let mut fn_maps = IrMaps::new(ir.tcx, def_id);
359
360 // Don't run unused pass for #[derive()]
361 if let FnKind::Method(..) = fk {
362 let parent = ir.tcx.hir().get_parent_item(id);
363 if let Some(Node::Item(i)) = ir.tcx.hir().find(parent) {
364 if i.attrs.iter().any(|a| a.check_name(sym::automatically_derived)) {
365 return;
366 }
367 }
368 }
369
370 debug!("creating fn_maps: {:p}", &fn_maps);
371
372 let body = ir.tcx.hir().body(body_id);
373
374 for param in &body.params {
375 let is_shorthand = match param.pat.kind {
376 rustc::hir::PatKind::Struct(..) => true,
377 _ => false,
378 };
379 param.pat.each_binding(|_bm, hir_id, _x, ident| {
380 debug!("adding parameters {:?}", hir_id);
381 let var = if is_shorthand {
382 Local(LocalInfo {
383 id: hir_id,
384 name: ident.name,
385 is_shorthand: true,
386 })
387 } else {
388 Param(hir_id, ident.name)
389 };
390 fn_maps.add_variable(var);
391 })
392 };
393
394 // gather up the various local variables, significant expressions,
395 // and so forth:
396 intravisit::walk_fn(&mut fn_maps, fk, decl, body_id, sp, id);
397
398 // compute liveness
399 let mut lsets = Liveness::new(&mut fn_maps, body_id);
400 let entry_ln = lsets.compute(&body.value);
401
402 // check for various error conditions
403 lsets.visit_body(body);
404 lsets.warn_about_unused_args(body, entry_ln);
405 }
406
407 fn add_from_pat(ir: &mut IrMaps<'_>, pat: &P<hir::Pat>) {
408 // For struct patterns, take note of which fields used shorthand
409 // (`x` rather than `x: x`).
410 let mut shorthand_field_ids = HirIdSet::default();
411 let mut pats = VecDeque::new();
412 pats.push_back(pat);
413 while let Some(pat) = pats.pop_front() {
414 use rustc::hir::PatKind::*;
415 match &pat.kind {
416 Binding(.., inner_pat) => {
417 pats.extend(inner_pat.iter());
418 }
419 Struct(_, fields, _) => {
420 let ids = fields.iter().filter(|f| f.is_shorthand).map(|f| f.pat.hir_id);
421 shorthand_field_ids.extend(ids);
422 }
423 Ref(inner_pat, _) | Box(inner_pat) => {
424 pats.push_back(inner_pat);
425 }
426 TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => {
427 pats.extend(inner_pats.iter());
428 }
429 Slice(pre_pats, inner_pat, post_pats) => {
430 pats.extend(pre_pats.iter());
431 pats.extend(inner_pat.iter());
432 pats.extend(post_pats.iter());
433 }
434 _ => {}
435 }
436 }
437
438 pat.each_binding(|_, hir_id, _, ident| {
439 ir.add_live_node_for_node(hir_id, VarDefNode(ident.span));
440 ir.add_variable(Local(LocalInfo {
441 id: hir_id,
442 name: ident.name,
443 is_shorthand: shorthand_field_ids.contains(&hir_id)
444 }));
445 });
446 }
447
448 fn visit_local<'tcx>(ir: &mut IrMaps<'tcx>, local: &'tcx hir::Local) {
449 add_from_pat(ir, &local.pat);
450 intravisit::walk_local(ir, local);
451 }
452
453 fn visit_arm<'tcx>(ir: &mut IrMaps<'tcx>, arm: &'tcx hir::Arm) {
454 add_from_pat(ir, &arm.pat);
455 intravisit::walk_arm(ir, arm);
456 }
457
458 fn visit_expr<'tcx>(ir: &mut IrMaps<'tcx>, expr: &'tcx Expr) {
459 match expr.kind {
460 // live nodes required for uses or definitions of variables:
461 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
462 debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res);
463 if let Res::Local(var_hir_id) = path.res {
464 let upvars = ir.tcx.upvars(ir.body_owner);
465 if !upvars.map_or(false, |upvars| upvars.contains_key(&var_hir_id)) {
466 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
467 }
468 }
469 intravisit::walk_expr(ir, expr);
470 }
471 hir::ExprKind::Closure(..) => {
472 // Interesting control flow (for loops can contain labeled
473 // breaks or continues)
474 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
475
476 // Make a live_node for each captured variable, with the span
477 // being the location that the variable is used. This results
478 // in better error messages than just pointing at the closure
479 // construction site.
480 let mut call_caps = Vec::new();
481 let closure_def_id = ir.tcx.hir().local_def_id(expr.hir_id);
482 if let Some(upvars) = ir.tcx.upvars(closure_def_id) {
483 let parent_upvars = ir.tcx.upvars(ir.body_owner);
484 call_caps.extend(upvars.iter().filter_map(|(&var_id, upvar)| {
485 let has_parent = parent_upvars
486 .map_or(false, |upvars| upvars.contains_key(&var_id));
487 if !has_parent {
488 let upvar_ln = ir.add_live_node(UpvarNode(upvar.span));
489 Some(CaptureInfo { ln: upvar_ln, var_hid: var_id })
490 } else {
491 None
492 }
493 }));
494 }
495 ir.set_captures(expr.hir_id, call_caps);
496 let old_body_owner = ir.body_owner;
497 ir.body_owner = closure_def_id;
498 intravisit::walk_expr(ir, expr);
499 ir.body_owner = old_body_owner;
500 }
501
502 // live nodes required for interesting control flow:
503 hir::ExprKind::Match(..) |
504 hir::ExprKind::Loop(..) => {
505 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
506 intravisit::walk_expr(ir, expr);
507 }
508 hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
509 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
510 intravisit::walk_expr(ir, expr);
511 }
512
513 // otherwise, live nodes are not required:
514 hir::ExprKind::Index(..) |
515 hir::ExprKind::Field(..) |
516 hir::ExprKind::Array(..) |
517 hir::ExprKind::Call(..) |
518 hir::ExprKind::MethodCall(..) |
519 hir::ExprKind::Tup(..) |
520 hir::ExprKind::Binary(..) |
521 hir::ExprKind::AddrOf(..) |
522 hir::ExprKind::Cast(..) |
523 hir::ExprKind::DropTemps(..) |
524 hir::ExprKind::Unary(..) |
525 hir::ExprKind::Break(..) |
526 hir::ExprKind::Continue(_) |
527 hir::ExprKind::Lit(_) |
528 hir::ExprKind::Ret(..) |
529 hir::ExprKind::Block(..) |
530 hir::ExprKind::Assign(..) |
531 hir::ExprKind::AssignOp(..) |
532 hir::ExprKind::Struct(..) |
533 hir::ExprKind::Repeat(..) |
534 hir::ExprKind::InlineAsm(..) |
535 hir::ExprKind::Box(..) |
536 hir::ExprKind::Yield(..) |
537 hir::ExprKind::Type(..) |
538 hir::ExprKind::Err |
539 hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => {
540 intravisit::walk_expr(ir, expr);
541 }
542 }
543 }
544
545 // ______________________________________________________________________
546 // Computing liveness sets
547 //
548 // Actually we compute just a bit more than just liveness, but we use
549 // the same basic propagation framework in all cases.
550
551 #[derive(Clone, Copy)]
552 struct RWU {
553 reader: LiveNode,
554 writer: LiveNode,
555 used: bool
556 }
557
558 /// Conceptually, this is like a `Vec<RWU>`. But the number of `RWU`s can get
559 /// very large, so it uses a more compact representation that takes advantage
560 /// of the fact that when the number of `RWU`s is large, most of them have an
561 /// invalid reader and an invalid writer.
562 struct RWUTable {
563 /// Each entry in `packed_rwus` is either INV_INV_FALSE, INV_INV_TRUE, or
564 /// an index into `unpacked_rwus`. In the common cases, this compacts the
565 /// 65 bits of data into 32; in the uncommon cases, it expands the 65 bits
566 /// in 96.
567 ///
568 /// More compact representations are possible -- e.g., use only 2 bits per
569 /// packed `RWU` and make the secondary table a HashMap that maps from
570 /// indices to `RWU`s -- but this one strikes a good balance between size
571 /// and speed.
572 packed_rwus: Vec<u32>,
573 unpacked_rwus: Vec<RWU>,
574 }
575
576 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: false }`.
577 const INV_INV_FALSE: u32 = u32::MAX;
578
579 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: true }`.
580 const INV_INV_TRUE: u32 = u32::MAX - 1;
581
582 impl RWUTable {
583 fn new(num_rwus: usize) -> RWUTable {
584 Self {
585 packed_rwus: vec![INV_INV_FALSE; num_rwus],
586 unpacked_rwus: vec![],
587 }
588 }
589
590 fn get(&self, idx: usize) -> RWU {
591 let packed_rwu = self.packed_rwus[idx];
592 match packed_rwu {
593 INV_INV_FALSE => RWU { reader: invalid_node(), writer: invalid_node(), used: false },
594 INV_INV_TRUE => RWU { reader: invalid_node(), writer: invalid_node(), used: true },
595 _ => self.unpacked_rwus[packed_rwu as usize],
596 }
597 }
598
599 fn get_reader(&self, idx: usize) -> LiveNode {
600 let packed_rwu = self.packed_rwus[idx];
601 match packed_rwu {
602 INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
603 _ => self.unpacked_rwus[packed_rwu as usize].reader,
604 }
605 }
606
607 fn get_writer(&self, idx: usize) -> LiveNode {
608 let packed_rwu = self.packed_rwus[idx];
609 match packed_rwu {
610 INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
611 _ => self.unpacked_rwus[packed_rwu as usize].writer,
612 }
613 }
614
615 fn get_used(&self, idx: usize) -> bool {
616 let packed_rwu = self.packed_rwus[idx];
617 match packed_rwu {
618 INV_INV_FALSE => false,
619 INV_INV_TRUE => true,
620 _ => self.unpacked_rwus[packed_rwu as usize].used,
621 }
622 }
623
624 #[inline]
625 fn copy_packed(&mut self, dst_idx: usize, src_idx: usize) {
626 self.packed_rwus[dst_idx] = self.packed_rwus[src_idx];
627 }
628
629 fn assign_unpacked(&mut self, idx: usize, rwu: RWU) {
630 if rwu.reader == invalid_node() && rwu.writer == invalid_node() {
631 // When we overwrite an indexing entry in `self.packed_rwus` with
632 // `INV_INV_{TRUE,FALSE}` we don't remove the corresponding entry
633 // from `self.unpacked_rwus`; it's not worth the effort, and we
634 // can't have entries shifting around anyway.
635 self.packed_rwus[idx] = if rwu.used {
636 INV_INV_TRUE
637 } else {
638 INV_INV_FALSE
639 }
640 } else {
641 // Add a new RWU to `unpacked_rwus` and make `packed_rwus[idx]`
642 // point to it.
643 self.packed_rwus[idx] = self.unpacked_rwus.len() as u32;
644 self.unpacked_rwus.push(rwu);
645 }
646 }
647
648 fn assign_inv_inv(&mut self, idx: usize) {
649 self.packed_rwus[idx] = if self.get_used(idx) {
650 INV_INV_TRUE
651 } else {
652 INV_INV_FALSE
653 };
654 }
655 }
656
657 #[derive(Copy, Clone)]
658 struct Specials {
659 exit_ln: LiveNode,
660 fallthrough_ln: LiveNode,
661 clean_exit_var: Variable
662 }
663
664 const ACC_READ: u32 = 1;
665 const ACC_WRITE: u32 = 2;
666 const ACC_USE: u32 = 4;
667
668 struct Liveness<'a, 'tcx> {
669 ir: &'a mut IrMaps<'tcx>,
670 tables: &'a ty::TypeckTables<'tcx>,
671 s: Specials,
672 successors: Vec<LiveNode>,
673 rwu_table: RWUTable,
674
675 // mappings from loop node ID to LiveNode
676 // ("break" label should map to loop node ID,
677 // it probably doesn't now)
678 break_ln: HirIdMap<LiveNode>,
679 cont_ln: HirIdMap<LiveNode>,
680 }
681
682 impl<'a, 'tcx> Liveness<'a, 'tcx> {
683 fn new(ir: &'a mut IrMaps<'tcx>, body: hir::BodyId) -> Liveness<'a, 'tcx> {
684 // Special nodes and variables:
685 // - exit_ln represents the end of the fn, either by return or panic
686 // - implicit_ret_var is a pseudo-variable that represents
687 // an implicit return
688 let specials = Specials {
689 exit_ln: ir.add_live_node(ExitNode),
690 fallthrough_ln: ir.add_live_node(ExitNode),
691 clean_exit_var: ir.add_variable(CleanExit)
692 };
693
694 let tables = ir.tcx.body_tables(body);
695
696 let num_live_nodes = ir.num_live_nodes;
697 let num_vars = ir.num_vars;
698
699 Liveness {
700 ir,
701 tables,
702 s: specials,
703 successors: vec![invalid_node(); num_live_nodes],
704 rwu_table: RWUTable::new(num_live_nodes * num_vars),
705 break_ln: Default::default(),
706 cont_ln: Default::default(),
707 }
708 }
709
710 fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
711 match self.ir.live_node_map.get(&hir_id) {
712 Some(&ln) => ln,
713 None => {
714 // This must be a mismatch between the ir_map construction
715 // above and the propagation code below; the two sets of
716 // code have to agree about which AST nodes are worth
717 // creating liveness nodes for.
718 span_bug!(
719 span,
720 "no live node registered for node {:?}",
721 hir_id);
722 }
723 }
724 }
725
726 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
727 self.ir.variable(hir_id, span)
728 }
729
730 fn define_bindings_in_pat(&mut self, pat: &hir::Pat, mut succ: LiveNode) -> LiveNode {
731 // In an or-pattern, only consider the first pattern; any later patterns
732 // must have the same bindings, and we also consider the first pattern
733 // to be the "authoritative" set of ids.
734 pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| {
735 let ln = self.live_node(hir_id, pat_sp);
736 let var = self.variable(hir_id, ident.span);
737 self.init_from_succ(ln, succ);
738 self.define(ln, var);
739 succ = ln;
740 });
741 succ
742 }
743
744 fn idx(&self, ln: LiveNode, var: Variable) -> usize {
745 ln.get() * self.ir.num_vars + var.get()
746 }
747
748 fn live_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
749 assert!(ln.is_valid());
750 let reader = self.rwu_table.get_reader(self.idx(ln, var));
751 if reader.is_valid() { Some(self.ir.lnk(reader)) } else { None }
752 }
753
754 // Is this variable live on entry to any of its successor nodes?
755 fn live_on_exit(&self, ln: LiveNode, var: Variable)
756 -> Option<LiveNodeKind> {
757 let successor = self.successors[ln.get()];
758 self.live_on_entry(successor, var)
759 }
760
761 fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
762 assert!(ln.is_valid());
763 self.rwu_table.get_used(self.idx(ln, var))
764 }
765
766 fn assigned_on_entry(&self, ln: LiveNode, var: Variable)
767 -> Option<LiveNodeKind> {
768 assert!(ln.is_valid());
769 let writer = self.rwu_table.get_writer(self.idx(ln, var));
770 if writer.is_valid() { Some(self.ir.lnk(writer)) } else { None }
771 }
772
773 fn assigned_on_exit(&self, ln: LiveNode, var: Variable)
774 -> Option<LiveNodeKind> {
775 let successor = self.successors[ln.get()];
776 self.assigned_on_entry(successor, var)
777 }
778
779 fn indices2<F>(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F) where
780 F: FnMut(&mut Liveness<'a, 'tcx>, usize, usize),
781 {
782 let node_base_idx = self.idx(ln, Variable(0));
783 let succ_base_idx = self.idx(succ_ln, Variable(0));
784 for var_idx in 0..self.ir.num_vars {
785 op(self, node_base_idx + var_idx, succ_base_idx + var_idx);
786 }
787 }
788
789 fn write_vars<F>(&self,
790 wr: &mut dyn Write,
791 ln: LiveNode,
792 mut test: F)
793 -> io::Result<()> where
794 F: FnMut(usize) -> LiveNode,
795 {
796 let node_base_idx = self.idx(ln, Variable(0));
797 for var_idx in 0..self.ir.num_vars {
798 let idx = node_base_idx + var_idx;
799 if test(idx).is_valid() {
800 write!(wr, " {:?}", Variable(var_idx as u32))?;
801 }
802 }
803 Ok(())
804 }
805
806
807 #[allow(unused_must_use)]
808 fn ln_str(&self, ln: LiveNode) -> String {
809 let mut wr = Vec::new();
810 {
811 let wr = &mut wr as &mut dyn Write;
812 write!(wr, "[ln({:?}) of kind {:?} reads", ln.get(), self.ir.lnk(ln));
813 self.write_vars(wr, ln, |idx| self.rwu_table.get_reader(idx));
814 write!(wr, " writes");
815 self.write_vars(wr, ln, |idx| self.rwu_table.get_writer(idx));
816 write!(wr, " precedes {:?}]", self.successors[ln.get()]);
817 }
818 String::from_utf8(wr).unwrap()
819 }
820
821 fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
822 self.successors[ln.get()] = succ_ln;
823
824 // It is not necessary to initialize the RWUs here because they are all
825 // set to INV_INV_FALSE when they are created, and the sets only grow
826 // during iterations.
827 }
828
829 fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
830 // more efficient version of init_empty() / merge_from_succ()
831 self.successors[ln.get()] = succ_ln;
832
833 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
834 this.rwu_table.copy_packed(idx, succ_idx);
835 });
836 debug!("init_from_succ(ln={}, succ={})",
837 self.ln_str(ln), self.ln_str(succ_ln));
838 }
839
840 fn merge_from_succ(&mut self,
841 ln: LiveNode,
842 succ_ln: LiveNode,
843 first_merge: bool)
844 -> bool {
845 if ln == succ_ln { return false; }
846
847 let mut changed = false;
848 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
849 let mut rwu = this.rwu_table.get(idx);
850 let succ_rwu = this.rwu_table.get(succ_idx);
851 if succ_rwu.reader.is_valid() && !rwu.reader.is_valid() {
852 rwu.reader = succ_rwu.reader;
853 changed = true
854 }
855
856 if succ_rwu.writer.is_valid() && !rwu.writer.is_valid() {
857 rwu.writer = succ_rwu.writer;
858 changed = true
859 }
860
861 if succ_rwu.used && !rwu.used {
862 rwu.used = true;
863 changed = true;
864 }
865
866 if changed {
867 this.rwu_table.assign_unpacked(idx, rwu);
868 }
869 });
870
871 debug!("merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})",
872 ln, self.ln_str(succ_ln), first_merge, changed);
873 return changed;
874 }
875
876 // Indicates that a local variable was *defined*; we know that no
877 // uses of the variable can precede the definition (resolve checks
878 // this) so we just clear out all the data.
879 fn define(&mut self, writer: LiveNode, var: Variable) {
880 let idx = self.idx(writer, var);
881 self.rwu_table.assign_inv_inv(idx);
882
883 debug!("{:?} defines {:?} (idx={}): {}", writer, var,
884 idx, self.ln_str(writer));
885 }
886
887 // Either read, write, or both depending on the acc bitset
888 fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
889 debug!("{:?} accesses[{:x}] {:?}: {}",
890 ln, acc, var, self.ln_str(ln));
891
892 let idx = self.idx(ln, var);
893 let mut rwu = self.rwu_table.get(idx);
894
895 if (acc & ACC_WRITE) != 0 {
896 rwu.reader = invalid_node();
897 rwu.writer = ln;
898 }
899
900 // Important: if we both read/write, must do read second
901 // or else the write will override.
902 if (acc & ACC_READ) != 0 {
903 rwu.reader = ln;
904 }
905
906 if (acc & ACC_USE) != 0 {
907 rwu.used = true;
908 }
909
910 self.rwu_table.assign_unpacked(idx, rwu);
911 }
912
913 fn compute(&mut self, body: &hir::Expr) -> LiveNode {
914 debug!("compute: using id for body, {}",
915 self.ir.tcx.hir().hir_to_pretty_string(body.hir_id));
916
917 // the fallthrough exit is only for those cases where we do not
918 // explicitly return:
919 let s = self.s;
920 self.init_from_succ(s.fallthrough_ln, s.exit_ln);
921 self.acc(s.fallthrough_ln, s.clean_exit_var, ACC_READ);
922
923 let entry_ln = self.propagate_through_expr(body, s.fallthrough_ln);
924
925 // hack to skip the loop unless debug! is enabled:
926 debug!("^^ liveness computation results for body {} (entry={:?})", {
927 for ln_idx in 0..self.ir.num_live_nodes {
928 debug!("{:?}", self.ln_str(LiveNode(ln_idx as u32)));
929 }
930 body.hir_id
931 },
932 entry_ln);
933
934 entry_ln
935 }
936
937 fn propagate_through_block(&mut self, blk: &hir::Block, succ: LiveNode)
938 -> LiveNode {
939 if blk.targeted_by_break {
940 self.break_ln.insert(blk.hir_id, succ);
941 }
942 let succ = self.propagate_through_opt_expr(blk.expr.as_ref().map(|e| &**e), succ);
943 blk.stmts.iter().rev().fold(succ, |succ, stmt| {
944 self.propagate_through_stmt(stmt, succ)
945 })
946 }
947
948 fn propagate_through_stmt(&mut self, stmt: &hir::Stmt, succ: LiveNode)
949 -> LiveNode {
950 match stmt.kind {
951 hir::StmtKind::Local(ref local) => {
952 // Note: we mark the variable as defined regardless of whether
953 // there is an initializer. Initially I had thought to only mark
954 // the live variable as defined if it was initialized, and then we
955 // could check for uninit variables just by scanning what is live
956 // at the start of the function. But that doesn't work so well for
957 // immutable variables defined in a loop:
958 // loop { let x; x = 5; }
959 // because the "assignment" loops back around and generates an error.
960 //
961 // So now we just check that variables defined w/o an
962 // initializer are not live at the point of their
963 // initialization, which is mildly more complex than checking
964 // once at the func header but otherwise equivalent.
965
966 let succ = self.propagate_through_opt_expr(local.init.as_ref().map(|e| &**e), succ);
967 self.define_bindings_in_pat(&local.pat, succ)
968 }
969 hir::StmtKind::Item(..) => succ,
970 hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
971 self.propagate_through_expr(&expr, succ)
972 }
973 }
974 }
975
976 fn propagate_through_exprs(&mut self, exprs: &[Expr], succ: LiveNode)
977 -> LiveNode {
978 exprs.iter().rev().fold(succ, |succ, expr| {
979 self.propagate_through_expr(&expr, succ)
980 })
981 }
982
983 fn propagate_through_opt_expr(&mut self,
984 opt_expr: Option<&Expr>,
985 succ: LiveNode)
986 -> LiveNode {
987 opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
988 }
989
990 fn propagate_through_expr(&mut self, expr: &Expr, succ: LiveNode) -> LiveNode {
991 debug!("propagate_through_expr: {}", self.ir.tcx.hir().hir_to_pretty_string(expr.hir_id));
992
993 match expr.kind {
994 // Interesting cases with control flow or which gen/kill
995 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
996 self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
997 }
998
999 hir::ExprKind::Field(ref e, _) => {
1000 self.propagate_through_expr(&e, succ)
1001 }
1002
1003 hir::ExprKind::Closure(..) => {
1004 debug!("{} is an ExprKind::Closure",
1005 self.ir.tcx.hir().hir_to_pretty_string(expr.hir_id));
1006
1007 // the construction of a closure itself is not important,
1008 // but we have to consider the closed over variables.
1009 let caps = self.ir.capture_info_map.get(&expr.hir_id).cloned().unwrap_or_else(||
1010 span_bug!(expr.span, "no registered caps"));
1011
1012 caps.iter().rev().fold(succ, |succ, cap| {
1013 self.init_from_succ(cap.ln, succ);
1014 let var = self.variable(cap.var_hid, expr.span);
1015 self.acc(cap.ln, var, ACC_READ | ACC_USE);
1016 cap.ln
1017 })
1018 }
1019
1020 // Note that labels have been resolved, so we don't need to look
1021 // at the label ident
1022 hir::ExprKind::Loop(ref blk, _, _) => {
1023 self.propagate_through_loop(expr, &blk, succ)
1024 }
1025
1026 hir::ExprKind::Match(ref e, ref arms, _) => {
1027 //
1028 // (e)
1029 // |
1030 // v
1031 // (expr)
1032 // / | \
1033 // | | |
1034 // v v v
1035 // (..arms..)
1036 // | | |
1037 // v v v
1038 // ( succ )
1039 //
1040 //
1041 let ln = self.live_node(expr.hir_id, expr.span);
1042 self.init_empty(ln, succ);
1043 let mut first_merge = true;
1044 for arm in arms {
1045 let body_succ = self.propagate_through_expr(&arm.body, succ);
1046
1047 let guard_succ = self.propagate_through_opt_expr(
1048 arm.guard.as_ref().map(|hir::Guard::If(e)| &**e),
1049 body_succ
1050 );
1051 let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ);
1052 self.merge_from_succ(ln, arm_succ, first_merge);
1053 first_merge = false;
1054 };
1055 self.propagate_through_expr(&e, ln)
1056 }
1057
1058 hir::ExprKind::Ret(ref o_e) => {
1059 // ignore succ and subst exit_ln:
1060 let exit_ln = self.s.exit_ln;
1061 self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), exit_ln)
1062 }
1063
1064 hir::ExprKind::Break(label, ref opt_expr) => {
1065 // Find which label this break jumps to
1066 let target = match label.target_id {
1067 Ok(hir_id) => self.break_ln.get(&hir_id),
1068 Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
1069 }.cloned();
1070
1071 // Now that we know the label we're going to,
1072 // look it up in the break loop nodes table
1073
1074 match target {
1075 Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
1076 None => {
1077 // FIXME: This should have been checked earlier. Once this is fixed,
1078 // replace with `delay_span_bug`. (#62480)
1079 self.ir.tcx.sess.struct_span_err(
1080 expr.span,
1081 "`break` to unknown label",
1082 ).emit();
1083 errors::FatalError.raise()
1084 }
1085 }
1086 }
1087
1088 hir::ExprKind::Continue(label) => {
1089 // Find which label this expr continues to
1090 let sc = label.target_id.unwrap_or_else(|err|
1091 span_bug!(expr.span, "loop scope error: {}", err));
1092
1093 // Now that we know the label we're going to,
1094 // look it up in the continue loop nodes table
1095 self.cont_ln.get(&sc).cloned().unwrap_or_else(||
1096 span_bug!(expr.span, "continue to unknown label"))
1097 }
1098
1099 hir::ExprKind::Assign(ref l, ref r) => {
1100 // see comment on places in
1101 // propagate_through_place_components()
1102 let succ = self.write_place(&l, succ, ACC_WRITE);
1103 let succ = self.propagate_through_place_components(&l, succ);
1104 self.propagate_through_expr(&r, succ)
1105 }
1106
1107 hir::ExprKind::AssignOp(_, ref l, ref r) => {
1108 // an overloaded assign op is like a method call
1109 if self.tables.is_method_call(expr) {
1110 let succ = self.propagate_through_expr(&l, succ);
1111 self.propagate_through_expr(&r, succ)
1112 } else {
1113 // see comment on places in
1114 // propagate_through_place_components()
1115 let succ = self.write_place(&l, succ, ACC_WRITE|ACC_READ);
1116 let succ = self.propagate_through_expr(&r, succ);
1117 self.propagate_through_place_components(&l, succ)
1118 }
1119 }
1120
1121 // Uninteresting cases: just propagate in rev exec order
1122
1123 hir::ExprKind::Array(ref exprs) => {
1124 self.propagate_through_exprs(exprs, succ)
1125 }
1126
1127 hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
1128 let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
1129 fields.iter().rev().fold(succ, |succ, field| {
1130 self.propagate_through_expr(&field.expr, succ)
1131 })
1132 }
1133
1134 hir::ExprKind::Call(ref f, ref args) => {
1135 let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
1136 let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
1137 self.s.exit_ln
1138 } else {
1139 succ
1140 };
1141 let succ = self.propagate_through_exprs(args, succ);
1142 self.propagate_through_expr(&f, succ)
1143 }
1144
1145 hir::ExprKind::MethodCall(.., ref args) => {
1146 let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
1147 let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
1148 self.s.exit_ln
1149 } else {
1150 succ
1151 };
1152
1153 self.propagate_through_exprs(args, succ)
1154 }
1155
1156 hir::ExprKind::Tup(ref exprs) => {
1157 self.propagate_through_exprs(exprs, succ)
1158 }
1159
1160 hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
1161 let r_succ = self.propagate_through_expr(&r, succ);
1162
1163 let ln = self.live_node(expr.hir_id, expr.span);
1164 self.init_from_succ(ln, succ);
1165 self.merge_from_succ(ln, r_succ, false);
1166
1167 self.propagate_through_expr(&l, ln)
1168 }
1169
1170 hir::ExprKind::Index(ref l, ref r) |
1171 hir::ExprKind::Binary(_, ref l, ref r) => {
1172 let r_succ = self.propagate_through_expr(&r, succ);
1173 self.propagate_through_expr(&l, r_succ)
1174 }
1175
1176 hir::ExprKind::Box(ref e) |
1177 hir::ExprKind::AddrOf(_, _, ref e) |
1178 hir::ExprKind::Cast(ref e, _) |
1179 hir::ExprKind::Type(ref e, _) |
1180 hir::ExprKind::DropTemps(ref e) |
1181 hir::ExprKind::Unary(_, ref e) |
1182 hir::ExprKind::Yield(ref e, _) |
1183 hir::ExprKind::Repeat(ref e, _) => {
1184 self.propagate_through_expr(&e, succ)
1185 }
1186
1187 hir::ExprKind::InlineAsm(ref asm) => {
1188 let ia = &asm.inner;
1189 let outputs = &asm.outputs_exprs;
1190 let inputs = &asm.inputs_exprs;
1191 let succ = ia.outputs.iter().zip(outputs).rev().fold(succ, |succ, (o, output)| {
1192 // see comment on places
1193 // in propagate_through_place_components()
1194 if o.is_indirect {
1195 self.propagate_through_expr(output, succ)
1196 } else {
1197 let acc = if o.is_rw { ACC_WRITE|ACC_READ } else { ACC_WRITE };
1198 let succ = self.write_place(output, succ, acc);
1199 self.propagate_through_place_components(output, succ)
1200 }
1201 });
1202
1203 // Inputs are executed first. Propagate last because of rev order
1204 self.propagate_through_exprs(inputs, succ)
1205 }
1206
1207 hir::ExprKind::Lit(..) | hir::ExprKind::Err |
1208 hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => {
1209 succ
1210 }
1211
1212 // Note that labels have been resolved, so we don't need to look
1213 // at the label ident
1214 hir::ExprKind::Block(ref blk, _) => {
1215 self.propagate_through_block(&blk, succ)
1216 }
1217 }
1218 }
1219
1220 fn propagate_through_place_components(&mut self,
1221 expr: &Expr,
1222 succ: LiveNode)
1223 -> LiveNode {
1224 // # Places
1225 //
1226 // In general, the full flow graph structure for an
1227 // assignment/move/etc can be handled in one of two ways,
1228 // depending on whether what is being assigned is a "tracked
1229 // value" or not. A tracked value is basically a local
1230 // variable or argument.
1231 //
1232 // The two kinds of graphs are:
1233 //
1234 // Tracked place Untracked place
1235 // ----------------------++-----------------------
1236 // ||
1237 // | || |
1238 // v || v
1239 // (rvalue) || (rvalue)
1240 // | || |
1241 // v || v
1242 // (write of place) || (place components)
1243 // | || |
1244 // v || v
1245 // (succ) || (succ)
1246 // ||
1247 // ----------------------++-----------------------
1248 //
1249 // I will cover the two cases in turn:
1250 //
1251 // # Tracked places
1252 //
1253 // A tracked place is a local variable/argument `x`. In
1254 // these cases, the link_node where the write occurs is linked
1255 // to node id of `x`. The `write_place()` routine generates
1256 // the contents of this node. There are no subcomponents to
1257 // consider.
1258 //
1259 // # Non-tracked places
1260 //
1261 // These are places like `x[5]` or `x.f`. In that case, we
1262 // basically ignore the value which is written to but generate
1263 // reads for the components---`x` in these two examples. The
1264 // components reads are generated by
1265 // `propagate_through_place_components()` (this fn).
1266 //
1267 // # Illegal places
1268 //
1269 // It is still possible to observe assignments to non-places;
1270 // these errors are detected in the later pass borrowck. We
1271 // just ignore such cases and treat them as reads.
1272
1273 match expr.kind {
1274 hir::ExprKind::Path(_) => succ,
1275 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
1276 _ => self.propagate_through_expr(expr, succ)
1277 }
1278 }
1279
1280 // see comment on propagate_through_place()
1281 fn write_place(&mut self, expr: &Expr, succ: LiveNode, acc: u32) -> LiveNode {
1282 match expr.kind {
1283 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1284 self.access_path(expr.hir_id, path, succ, acc)
1285 }
1286
1287 // We do not track other places, so just propagate through
1288 // to their subcomponents. Also, it may happen that
1289 // non-places occur here, because those are detected in the
1290 // later pass borrowck.
1291 _ => succ
1292 }
1293 }
1294
1295 fn access_var(&mut self, hir_id: HirId, var_hid: HirId, succ: LiveNode, acc: u32, span: Span)
1296 -> LiveNode {
1297 let ln = self.live_node(hir_id, span);
1298 if acc != 0 {
1299 self.init_from_succ(ln, succ);
1300 let var = self.variable(var_hid, span);
1301 self.acc(ln, var, acc);
1302 }
1303 ln
1304 }
1305
1306 fn access_path(&mut self, hir_id: HirId, path: &hir::Path, succ: LiveNode, acc: u32)
1307 -> LiveNode {
1308 match path.res {
1309 Res::Local(hid) => {
1310 let upvars = self.ir.tcx.upvars(self.ir.body_owner);
1311 if !upvars.map_or(false, |upvars| upvars.contains_key(&hid)) {
1312 self.access_var(hir_id, hid, succ, acc, path.span)
1313 } else {
1314 succ
1315 }
1316 }
1317 _ => succ
1318 }
1319 }
1320
1321 fn propagate_through_loop(
1322 &mut self,
1323 expr: &Expr,
1324 body: &hir::Block,
1325 succ: LiveNode
1326 ) -> LiveNode {
1327 /*
1328 We model control flow like this:
1329
1330 (expr) <-+
1331 | |
1332 v |
1333 (body) --+
1334
1335 Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
1336 Meanwhile, a `break` expression will have a successor of `succ`.
1337 */
1338
1339 // first iteration:
1340 let mut first_merge = true;
1341 let ln = self.live_node(expr.hir_id, expr.span);
1342 self.init_empty(ln, succ);
1343 debug!("propagate_through_loop: using id for loop body {} {}",
1344 expr.hir_id, self.ir.tcx.hir().hir_to_pretty_string(body.hir_id));
1345
1346 self.break_ln.insert(expr.hir_id, succ);
1347
1348 self.cont_ln.insert(expr.hir_id, ln);
1349
1350 let body_ln = self.propagate_through_block(body, ln);
1351
1352 // repeat until fixed point is reached:
1353 while self.merge_from_succ(ln, body_ln, first_merge) {
1354 first_merge = false;
1355 assert_eq!(body_ln, self.propagate_through_block(body, ln));
1356 }
1357
1358 ln
1359 }
1360 }
1361
1362 // _______________________________________________________________________
1363 // Checking for error conditions
1364
1365 impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
1366 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
1367 NestedVisitorMap::None
1368 }
1369
1370 fn visit_local(&mut self, local: &'tcx hir::Local) {
1371 self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| {
1372 if local.init.is_some() {
1373 self.warn_about_dead_assign(spans, hir_id, ln, var);
1374 }
1375 });
1376
1377 intravisit::walk_local(self, local);
1378 }
1379
1380 fn visit_expr(&mut self, ex: &'tcx Expr) {
1381 check_expr(self, ex);
1382 }
1383
1384 fn visit_arm(&mut self, arm: &'tcx hir::Arm) {
1385 self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {});
1386 intravisit::walk_arm(self, arm);
1387 }
1388 }
1389
1390 fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr) {
1391 match expr.kind {
1392 hir::ExprKind::Assign(ref l, _) => {
1393 this.check_place(&l);
1394 }
1395
1396 hir::ExprKind::AssignOp(_, ref l, _) => {
1397 if !this.tables.is_method_call(expr) {
1398 this.check_place(&l);
1399 }
1400 }
1401
1402 hir::ExprKind::InlineAsm(ref asm) => {
1403 for input in &asm.inputs_exprs {
1404 this.visit_expr(input);
1405 }
1406
1407 // Output operands must be places
1408 for (o, output) in asm.inner.outputs.iter().zip(&asm.outputs_exprs) {
1409 if !o.is_indirect {
1410 this.check_place(output);
1411 }
1412 this.visit_expr(output);
1413 }
1414 }
1415
1416 // no correctness conditions related to liveness
1417 hir::ExprKind::Call(..) | hir::ExprKind::MethodCall(..) |
1418 hir::ExprKind::Match(..) | hir::ExprKind::Loop(..) |
1419 hir::ExprKind::Index(..) | hir::ExprKind::Field(..) |
1420 hir::ExprKind::Array(..) | hir::ExprKind::Tup(..) | hir::ExprKind::Binary(..) |
1421 hir::ExprKind::Cast(..) | hir::ExprKind::DropTemps(..) | hir::ExprKind::Unary(..) |
1422 hir::ExprKind::Ret(..) | hir::ExprKind::Break(..) | hir::ExprKind::Continue(..) |
1423 hir::ExprKind::Lit(_) | hir::ExprKind::Block(..) | hir::ExprKind::AddrOf(..) |
1424 hir::ExprKind::Struct(..) | hir::ExprKind::Repeat(..) |
1425 hir::ExprKind::Closure(..) | hir::ExprKind::Path(_) | hir::ExprKind::Yield(..) |
1426 hir::ExprKind::Box(..) | hir::ExprKind::Type(..) | hir::ExprKind::Err => {}
1427 }
1428
1429 intravisit::walk_expr(this, expr);
1430 }
1431
1432 impl<'tcx> Liveness<'_, 'tcx> {
1433 fn check_place(&mut self, expr: &'tcx Expr) {
1434 match expr.kind {
1435 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1436 if let Res::Local(var_hid) = path.res {
1437 let upvars = self.ir.tcx.upvars(self.ir.body_owner);
1438 if !upvars.map_or(false, |upvars| upvars.contains_key(&var_hid)) {
1439 // Assignment to an immutable variable or argument: only legal
1440 // if there is no later assignment. If this local is actually
1441 // mutable, then check for a reassignment to flag the mutability
1442 // as being used.
1443 let ln = self.live_node(expr.hir_id, expr.span);
1444 let var = self.variable(var_hid, expr.span);
1445 self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var);
1446 }
1447 }
1448 }
1449 _ => {
1450 // For other kinds of places, no checks are required,
1451 // and any embedded expressions are actually rvalues
1452 intravisit::walk_expr(self, expr);
1453 }
1454 }
1455 }
1456
1457 fn should_warn(&self, var: Variable) -> Option<String> {
1458 let name = self.ir.variable_name(var);
1459 if name.is_empty() || name.as_bytes()[0] == b'_' {
1460 None
1461 } else {
1462 Some(name)
1463 }
1464 }
1465
1466 fn warn_about_unused_args(&self, body: &hir::Body, entry_ln: LiveNode) {
1467 for p in &body.params {
1468 self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| {
1469 if self.live_on_entry(ln, var).is_none() {
1470 self.report_dead_assign(hir_id, spans, var, true);
1471 }
1472 });
1473 }
1474 }
1475
1476 fn check_unused_vars_in_pat(
1477 &self,
1478 pat: &hir::Pat,
1479 entry_ln: Option<LiveNode>,
1480 on_used_on_entry: impl Fn(Vec<Span>, HirId, LiveNode, Variable),
1481 ) {
1482 // In an or-pattern, only consider the variable; any later patterns must have the same
1483 // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
1484 // However, we should take the spans of variables with the same name from the later
1485 // patterns so the suggestions to prefix with underscores will apply to those too.
1486 let mut vars: FxIndexMap<String, (LiveNode, Variable, HirId, Vec<Span>)> = <_>::default();
1487
1488 pat.each_binding(|_, hir_id, pat_sp, ident| {
1489 let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp));
1490 let var = self.variable(hir_id, ident.span);
1491 vars.entry(self.ir.variable_name(var))
1492 .and_modify(|(.., spans)| spans.push(ident.span))
1493 .or_insert_with(|| (ln, var, hir_id, vec![ident.span]));
1494 });
1495
1496 for (_, (ln, var, id, spans)) in vars {
1497 if self.used_on_entry(ln, var) {
1498 on_used_on_entry(spans, id, ln, var);
1499 } else {
1500 self.report_unused(spans, id, ln, var);
1501 }
1502 }
1503 }
1504
1505 fn report_unused(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
1506 if let Some(name) = self.should_warn(var).filter(|name| name != "self") {
1507 // annoying: for parameters in funcs like `fn(x: i32)
1508 // {ret}`, there is only one node, so asking about
1509 // assigned_on_exit() is not meaningful.
1510 let is_assigned = if ln == self.s.exit_ln {
1511 false
1512 } else {
1513 self.assigned_on_exit(ln, var).is_some()
1514 };
1515
1516 if is_assigned {
1517 self.ir.tcx.lint_hir_note(
1518 lint::builtin::UNUSED_VARIABLES,
1519 hir_id,
1520 spans,
1521 &format!("variable `{}` is assigned to, but never used", name),
1522 &format!("consider using `_{}` instead", name),
1523 );
1524 } else {
1525 let mut err = self.ir.tcx.struct_span_lint_hir(
1526 lint::builtin::UNUSED_VARIABLES,
1527 hir_id,
1528 spans.clone(),
1529 &format!("unused variable: `{}`", name),
1530 );
1531
1532 if self.ir.variable_is_shorthand(var) {
1533 if let Node::Binding(pat) = self.ir.tcx.hir().get(hir_id) {
1534 // Handle `ref` and `ref mut`.
1535 let spans = spans.iter()
1536 .map(|_span| (pat.span, format!("{}: _", name)))
1537 .collect();
1538
1539 err.multipart_suggestion(
1540 "try ignoring the field",
1541 spans,
1542 Applicability::MachineApplicable,
1543 );
1544 }
1545 } else {
1546 err.multipart_suggestion(
1547 "consider prefixing with an underscore",
1548 spans.iter().map(|span| (*span, format!("_{}", name))).collect(),
1549 Applicability::MachineApplicable,
1550 );
1551 }
1552
1553 err.emit()
1554 }
1555 }
1556 }
1557
1558 fn warn_about_dead_assign(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
1559 if self.live_on_exit(ln, var).is_none() {
1560 self.report_dead_assign(hir_id, spans, var, false);
1561 }
1562 }
1563
1564 fn report_dead_assign(&self, hir_id: HirId, spans: Vec<Span>, var: Variable, is_param: bool) {
1565 if let Some(name) = self.should_warn(var) {
1566 if is_param {
1567 self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, spans,
1568 &format!("value passed to `{}` is never read", name))
1569 .help("maybe it is overwritten before being read?")
1570 .emit();
1571 } else {
1572 self.ir.tcx.struct_span_lint_hir(lint::builtin::UNUSED_ASSIGNMENTS, hir_id, spans,
1573 &format!("value assigned to `{}` is never read", name))
1574 .help("maybe it is overwritten before being read?")
1575 .emit();
1576 }
1577 }
1578 }
1579 }