]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_passes/src/liveness.rs
New upstream version 1.49.0~beta.4+dfsg1
[rustc.git] / compiler / rustc_passes / src / liveness.rs
1 //! A classic liveness analysis based on dataflow over the AST. Computes,
2 //! for each local variable in a function, whether that variable is live
3 //! at a given point. Program execution points are identified by their
4 //! IDs.
5 //!
6 //! # Basic idea
7 //!
8 //! The basic model is that each local variable is assigned an index. We
9 //! represent sets of local variables using a vector indexed by this
10 //! index. The value in the vector is either 0, indicating the variable
11 //! is dead, or the ID of an expression that uses the variable.
12 //!
13 //! We conceptually walk over the AST in reverse execution order. If we
14 //! find a use of a variable, we add it to the set of live variables. If
15 //! we find an assignment to a variable, we remove it from the set of live
16 //! variables. When we have to merge two flows, we take the union of
17 //! those two flows -- if the variable is live on both paths, we simply
18 //! pick one ID. In the event of loops, we continue doing this until a
19 //! fixed point is reached.
20 //!
21 //! ## Checking initialization
22 //!
23 //! At the function entry point, all variables must be dead. If this is
24 //! not the case, we can report an error using the ID found in the set of
25 //! live variables, which identifies a use of the variable which is not
26 //! dominated by an assignment.
27 //!
28 //! ## Checking moves
29 //!
30 //! After each explicit move, the variable must be dead.
31 //!
32 //! ## Computing last uses
33 //!
34 //! Any use of the variable where the variable is dead afterwards is a
35 //! last use.
36 //!
37 //! # Implementation details
38 //!
39 //! The actual implementation contains two (nested) walks over the AST.
40 //! The outer walk has the job of building up the ir_maps instance for the
41 //! enclosing function. On the way down the tree, it identifies those AST
42 //! nodes and variable IDs that will be needed for the liveness analysis
43 //! and assigns them contiguous IDs. The liveness ID for an AST node is
44 //! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
45 //! is called a `variable` (another newtype'd `u32`).
46 //!
47 //! On the way back up the tree, as we are about to exit from a function
48 //! declaration we allocate a `liveness` instance. Now that we know
49 //! precisely how many nodes and variables we need, we can allocate all
50 //! the various arrays that we will need to precisely the right size. We then
51 //! perform the actual propagation on the `liveness` instance.
52 //!
53 //! This propagation is encoded in the various `propagate_through_*()`
54 //! methods. It effectively does a reverse walk of the AST; whenever we
55 //! reach a loop node, we iterate until a fixed point is reached.
56 //!
57 //! ## The `RWU` struct
58 //!
59 //! At each live node `N`, we track three pieces of information for each
60 //! variable `V` (these are encapsulated in the `RWU` struct):
61 //!
62 //! - `reader`: the `LiveNode` ID of some node which will read the value
63 //! that `V` holds on entry to `N`. Formally: a node `M` such
64 //! that there exists a path `P` from `N` to `M` where `P` does not
65 //! write `V`. If the `reader` is `None`, then the current
66 //! value will never be read (the variable is dead, essentially).
67 //!
68 //! - `writer`: the `LiveNode` ID of some node which will write the
69 //! variable `V` and which is reachable from `N`. Formally: a node `M`
70 //! such that there exists a path `P` from `N` to `M` and `M` writes
71 //! `V`. If the `writer` is `None`, then there is no writer
72 //! of `V` that follows `N`.
73 //!
74 //! - `used`: a boolean value indicating whether `V` is *used*. We
75 //! distinguish a *read* from a *use* in that a *use* is some read that
76 //! is not just used to generate a new value. For example, `x += 1` is
77 //! a read but not a use. This is used to generate better warnings.
78 //!
79 //! ## Special nodes and variables
80 //!
81 //! We generate various special nodes for various, well, special purposes.
82 //! These are described in the `Liveness` struct.
83
84 use self::LiveNodeKind::*;
85 use self::VarKind::*;
86
87 use rustc_ast::InlineAsmOptions;
88 use rustc_data_structures::fx::FxIndexMap;
89 use rustc_errors::Applicability;
90 use rustc_hir as hir;
91 use rustc_hir::def::*;
92 use rustc_hir::def_id::LocalDefId;
93 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
94 use rustc_hir::{Expr, HirId, HirIdMap, HirIdSet};
95 use rustc_index::vec::IndexVec;
96 use rustc_middle::hir::map::Map;
97 use rustc_middle::ty::query::Providers;
98 use rustc_middle::ty::{self, DefIdTree, TyCtxt};
99 use rustc_session::lint;
100 use rustc_span::symbol::{kw, sym, Symbol};
101 use rustc_span::Span;
102
103 use std::collections::VecDeque;
104 use std::io;
105 use std::io::prelude::*;
106 use std::rc::Rc;
107
108 rustc_index::newtype_index! {
109 pub struct Variable {
110 DEBUG_FORMAT = "v({})",
111 }
112 }
113
114 rustc_index::newtype_index! {
115 pub struct LiveNode {
116 DEBUG_FORMAT = "ln({})",
117 }
118 }
119
120 #[derive(Copy, Clone, PartialEq, Debug)]
121 enum LiveNodeKind {
122 UpvarNode(Span),
123 ExprNode(Span),
124 VarDefNode(Span),
125 ClosureNode,
126 ExitNode,
127 }
128
129 fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
130 let sm = tcx.sess.source_map();
131 match lnk {
132 UpvarNode(s) => format!("Upvar node [{}]", sm.span_to_string(s)),
133 ExprNode(s) => format!("Expr node [{}]", sm.span_to_string(s)),
134 VarDefNode(s) => format!("Var def node [{}]", sm.span_to_string(s)),
135 ClosureNode => "Closure node".to_owned(),
136 ExitNode => "Exit node".to_owned(),
137 }
138 }
139
140 fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
141 tcx.hir().visit_item_likes_in_module(module_def_id, &mut IrMaps::new(tcx).as_deep_visitor());
142 }
143
144 pub fn provide(providers: &mut Providers) {
145 *providers = Providers { check_mod_liveness, ..*providers };
146 }
147
148 // ______________________________________________________________________
149 // Creating ir_maps
150 //
151 // This is the first pass and the one that drives the main
152 // computation. It walks up and down the IR once. On the way down,
153 // we count for each function the number of variables as well as
154 // liveness nodes. A liveness node is basically an expression or
155 // capture clause that does something of interest: either it has
156 // interesting control flow or it uses/defines a local variable.
157 //
158 // On the way back up, at each function node we create liveness sets
159 // (we now know precisely how big to make our various vectors and so
160 // forth) and then do the data-flow propagation to compute the set
161 // of live variables at each program point.
162 //
163 // Finally, we run back over the IR one last time and, using the
164 // computed liveness, check various safety conditions. For example,
165 // there must be no live nodes at the definition site for a variable
166 // unless it has an initializer. Similarly, each non-mutable local
167 // variable must not be assigned if there is some successor
168 // assignment. And so forth.
169
170 struct CaptureInfo {
171 ln: LiveNode,
172 var_hid: HirId,
173 }
174
175 #[derive(Copy, Clone, Debug)]
176 struct LocalInfo {
177 id: HirId,
178 name: Symbol,
179 is_shorthand: bool,
180 }
181
182 #[derive(Copy, Clone, Debug)]
183 enum VarKind {
184 Param(HirId, Symbol),
185 Local(LocalInfo),
186 Upvar(HirId, Symbol),
187 }
188
189 struct IrMaps<'tcx> {
190 tcx: TyCtxt<'tcx>,
191 live_node_map: HirIdMap<LiveNode>,
192 variable_map: HirIdMap<Variable>,
193 capture_info_map: HirIdMap<Rc<Vec<CaptureInfo>>>,
194 var_kinds: IndexVec<Variable, VarKind>,
195 lnks: IndexVec<LiveNode, LiveNodeKind>,
196 }
197
198 impl IrMaps<'tcx> {
199 fn new(tcx: TyCtxt<'tcx>) -> IrMaps<'tcx> {
200 IrMaps {
201 tcx,
202 live_node_map: HirIdMap::default(),
203 variable_map: HirIdMap::default(),
204 capture_info_map: Default::default(),
205 var_kinds: IndexVec::new(),
206 lnks: IndexVec::new(),
207 }
208 }
209
210 fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
211 let ln = self.lnks.push(lnk);
212
213 debug!("{:?} is of kind {}", ln, live_node_kind_to_string(lnk, self.tcx));
214
215 ln
216 }
217
218 fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
219 let ln = self.add_live_node(lnk);
220 self.live_node_map.insert(hir_id, ln);
221
222 debug!("{:?} is node {:?}", ln, hir_id);
223 }
224
225 fn add_variable(&mut self, vk: VarKind) -> Variable {
226 let v = self.var_kinds.push(vk);
227
228 match vk {
229 Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) | Upvar(node_id, _) => {
230 self.variable_map.insert(node_id, v);
231 }
232 }
233
234 debug!("{:?} is {:?}", v, vk);
235
236 v
237 }
238
239 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
240 match self.variable_map.get(&hir_id) {
241 Some(&var) => var,
242 None => {
243 span_bug!(span, "no variable registered for id {:?}", hir_id);
244 }
245 }
246 }
247
248 fn variable_name(&self, var: Variable) -> Symbol {
249 match self.var_kinds[var] {
250 Local(LocalInfo { name, .. }) | Param(_, name) | Upvar(_, name) => name,
251 }
252 }
253
254 fn variable_is_shorthand(&self, var: Variable) -> bool {
255 match self.var_kinds[var] {
256 Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
257 Param(..) | Upvar(..) => false,
258 }
259 }
260
261 fn set_captures(&mut self, hir_id: HirId, cs: Vec<CaptureInfo>) {
262 self.capture_info_map.insert(hir_id, Rc::new(cs));
263 }
264
265 fn add_from_pat(&mut self, pat: &hir::Pat<'tcx>) {
266 // For struct patterns, take note of which fields used shorthand
267 // (`x` rather than `x: x`).
268 let mut shorthand_field_ids = HirIdSet::default();
269 let mut pats = VecDeque::new();
270 pats.push_back(pat);
271 while let Some(pat) = pats.pop_front() {
272 use rustc_hir::PatKind::*;
273 match &pat.kind {
274 Binding(.., inner_pat) => {
275 pats.extend(inner_pat.iter());
276 }
277 Struct(_, fields, _) => {
278 let ids = fields.iter().filter(|f| f.is_shorthand).map(|f| f.pat.hir_id);
279 shorthand_field_ids.extend(ids);
280 }
281 Ref(inner_pat, _) | Box(inner_pat) => {
282 pats.push_back(inner_pat);
283 }
284 TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => {
285 pats.extend(inner_pats.iter());
286 }
287 Slice(pre_pats, inner_pat, post_pats) => {
288 pats.extend(pre_pats.iter());
289 pats.extend(inner_pat.iter());
290 pats.extend(post_pats.iter());
291 }
292 _ => {}
293 }
294 }
295
296 pat.each_binding(|_, hir_id, _, ident| {
297 self.add_live_node_for_node(hir_id, VarDefNode(ident.span));
298 self.add_variable(Local(LocalInfo {
299 id: hir_id,
300 name: ident.name,
301 is_shorthand: shorthand_field_ids.contains(&hir_id),
302 }));
303 });
304 }
305 }
306
307 impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
308 type Map = Map<'tcx>;
309
310 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
311 NestedVisitorMap::OnlyBodies(self.tcx.hir())
312 }
313
314 fn visit_body(&mut self, body: &'tcx hir::Body<'tcx>) {
315 debug!("visit_body {:?}", body.id());
316
317 // swap in a new set of IR maps for this body
318 let mut maps = IrMaps::new(self.tcx);
319 let hir_id = maps.tcx.hir().body_owner(body.id());
320 let def_id = maps.tcx.hir().local_def_id(hir_id);
321
322 // Don't run unused pass for #[derive()]
323 if let Some(parent) = self.tcx.parent(def_id.to_def_id()) {
324 if let DefKind::Impl = self.tcx.def_kind(parent.expect_local()) {
325 if self.tcx.has_attr(parent, sym::automatically_derived) {
326 return;
327 }
328 }
329 }
330
331 if let Some(upvars) = maps.tcx.upvars_mentioned(def_id) {
332 for (&var_hir_id, _upvar) in upvars {
333 let var_name = maps.tcx.hir().name(var_hir_id);
334 maps.add_variable(Upvar(var_hir_id, var_name));
335 }
336 }
337
338 // gather up the various local variables, significant expressions,
339 // and so forth:
340 intravisit::walk_body(&mut maps, body);
341
342 // compute liveness
343 let mut lsets = Liveness::new(&mut maps, def_id);
344 let entry_ln = lsets.compute(&body, hir_id);
345 lsets.log_liveness(entry_ln, body.id().hir_id);
346
347 // check for various error conditions
348 lsets.visit_body(body);
349 lsets.warn_about_unused_upvars(entry_ln);
350 lsets.warn_about_unused_args(body, entry_ln);
351 }
352
353 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
354 self.add_from_pat(&local.pat);
355 intravisit::walk_local(self, local);
356 }
357
358 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
359 self.add_from_pat(&arm.pat);
360 intravisit::walk_arm(self, arm);
361 }
362
363 fn visit_param(&mut self, param: &'tcx hir::Param<'tcx>) {
364 let is_shorthand = match param.pat.kind {
365 rustc_hir::PatKind::Struct(..) => true,
366 _ => false,
367 };
368 param.pat.each_binding(|_bm, hir_id, _x, ident| {
369 let var = if is_shorthand {
370 Local(LocalInfo { id: hir_id, name: ident.name, is_shorthand: true })
371 } else {
372 Param(hir_id, ident.name)
373 };
374 self.add_variable(var);
375 });
376 intravisit::walk_param(self, param);
377 }
378
379 fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
380 match expr.kind {
381 // live nodes required for uses or definitions of variables:
382 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
383 debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res);
384 if let Res::Local(_var_hir_id) = path.res {
385 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
386 }
387 intravisit::walk_expr(self, expr);
388 }
389 hir::ExprKind::Closure(..) => {
390 // Interesting control flow (for loops can contain labeled
391 // breaks or continues)
392 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
393
394 // Make a live_node for each captured variable, with the span
395 // being the location that the variable is used. This results
396 // in better error messages than just pointing at the closure
397 // construction site.
398 let mut call_caps = Vec::new();
399 let closure_def_id = self.tcx.hir().local_def_id(expr.hir_id);
400 if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
401 call_caps.extend(upvars.iter().map(|(&var_id, upvar)| {
402 let upvar_ln = self.add_live_node(UpvarNode(upvar.span));
403 CaptureInfo { ln: upvar_ln, var_hid: var_id }
404 }));
405 }
406 self.set_captures(expr.hir_id, call_caps);
407 intravisit::walk_expr(self, expr);
408 }
409
410 // live nodes required for interesting control flow:
411 hir::ExprKind::Match(..) | hir::ExprKind::Loop(..) => {
412 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
413 intravisit::walk_expr(self, expr);
414 }
415 hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
416 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
417 intravisit::walk_expr(self, expr);
418 }
419
420 // otherwise, live nodes are not required:
421 hir::ExprKind::Index(..)
422 | hir::ExprKind::Field(..)
423 | hir::ExprKind::Array(..)
424 | hir::ExprKind::Call(..)
425 | hir::ExprKind::MethodCall(..)
426 | hir::ExprKind::Tup(..)
427 | hir::ExprKind::Binary(..)
428 | hir::ExprKind::AddrOf(..)
429 | hir::ExprKind::Cast(..)
430 | hir::ExprKind::DropTemps(..)
431 | hir::ExprKind::Unary(..)
432 | hir::ExprKind::Break(..)
433 | hir::ExprKind::Continue(_)
434 | hir::ExprKind::Lit(_)
435 | hir::ExprKind::ConstBlock(..)
436 | hir::ExprKind::Ret(..)
437 | hir::ExprKind::Block(..)
438 | hir::ExprKind::Assign(..)
439 | hir::ExprKind::AssignOp(..)
440 | hir::ExprKind::Struct(..)
441 | hir::ExprKind::Repeat(..)
442 | hir::ExprKind::InlineAsm(..)
443 | hir::ExprKind::LlvmInlineAsm(..)
444 | hir::ExprKind::Box(..)
445 | hir::ExprKind::Yield(..)
446 | hir::ExprKind::Type(..)
447 | hir::ExprKind::Err
448 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
449 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => {
450 intravisit::walk_expr(self, expr);
451 }
452 }
453 }
454 }
455
456 // ______________________________________________________________________
457 // Computing liveness sets
458 //
459 // Actually we compute just a bit more than just liveness, but we use
460 // the same basic propagation framework in all cases.
461
462 #[derive(Clone, Copy)]
463 struct RWU {
464 reader: Option<LiveNode>,
465 writer: Option<LiveNode>,
466 used: bool,
467 }
468
469 /// Conceptually, this is like a `Vec<RWU>`. But the number of `RWU`s can get
470 /// very large, so it uses a more compact representation that takes advantage
471 /// of the fact that when the number of `RWU`s is large, most of them have an
472 /// invalid reader and an invalid writer.
473 struct RWUTable {
474 /// Each entry in `packed_rwus` is either INV_INV_FALSE, INV_INV_TRUE, or
475 /// an index into `unpacked_rwus`. In the common cases, this compacts the
476 /// 65 bits of data into 32; in the uncommon cases, it expands the 65 bits
477 /// in 96.
478 ///
479 /// More compact representations are possible -- e.g., use only 2 bits per
480 /// packed `RWU` and make the secondary table a HashMap that maps from
481 /// indices to `RWU`s -- but this one strikes a good balance between size
482 /// and speed.
483 packed_rwus: Vec<u32>,
484 unpacked_rwus: Vec<RWU>,
485 }
486
487 // A constant representing `RWU { reader: None; writer: None; used: false }`.
488 const INV_INV_FALSE: u32 = u32::MAX;
489
490 // A constant representing `RWU { reader: None; writer: None; used: true }`.
491 const INV_INV_TRUE: u32 = u32::MAX - 1;
492
493 impl RWUTable {
494 fn new(num_rwus: usize) -> RWUTable {
495 Self { packed_rwus: vec![INV_INV_FALSE; num_rwus], unpacked_rwus: vec![] }
496 }
497
498 fn get(&self, idx: usize) -> RWU {
499 let packed_rwu = self.packed_rwus[idx];
500 match packed_rwu {
501 INV_INV_FALSE => RWU { reader: None, writer: None, used: false },
502 INV_INV_TRUE => RWU { reader: None, writer: None, used: true },
503 _ => self.unpacked_rwus[packed_rwu as usize],
504 }
505 }
506
507 fn get_reader(&self, idx: usize) -> Option<LiveNode> {
508 let packed_rwu = self.packed_rwus[idx];
509 match packed_rwu {
510 INV_INV_FALSE | INV_INV_TRUE => None,
511 _ => self.unpacked_rwus[packed_rwu as usize].reader,
512 }
513 }
514
515 fn get_writer(&self, idx: usize) -> Option<LiveNode> {
516 let packed_rwu = self.packed_rwus[idx];
517 match packed_rwu {
518 INV_INV_FALSE | INV_INV_TRUE => None,
519 _ => self.unpacked_rwus[packed_rwu as usize].writer,
520 }
521 }
522
523 fn get_used(&self, idx: usize) -> bool {
524 let packed_rwu = self.packed_rwus[idx];
525 match packed_rwu {
526 INV_INV_FALSE => false,
527 INV_INV_TRUE => true,
528 _ => self.unpacked_rwus[packed_rwu as usize].used,
529 }
530 }
531
532 #[inline]
533 fn copy_packed(&mut self, dst_idx: usize, src_idx: usize) {
534 self.packed_rwus[dst_idx] = self.packed_rwus[src_idx];
535 }
536
537 fn assign_unpacked(&mut self, idx: usize, rwu: RWU) {
538 if rwu.reader == None && rwu.writer == None {
539 // When we overwrite an indexing entry in `self.packed_rwus` with
540 // `INV_INV_{TRUE,FALSE}` we don't remove the corresponding entry
541 // from `self.unpacked_rwus`; it's not worth the effort, and we
542 // can't have entries shifting around anyway.
543 self.packed_rwus[idx] = if rwu.used { INV_INV_TRUE } else { INV_INV_FALSE }
544 } else {
545 // Add a new RWU to `unpacked_rwus` and make `packed_rwus[idx]`
546 // point to it.
547 self.packed_rwus[idx] = self.unpacked_rwus.len() as u32;
548 self.unpacked_rwus.push(rwu);
549 }
550 }
551
552 fn assign_inv_inv(&mut self, idx: usize) {
553 self.packed_rwus[idx] = if self.get_used(idx) { INV_INV_TRUE } else { INV_INV_FALSE };
554 }
555 }
556
557 const ACC_READ: u32 = 1;
558 const ACC_WRITE: u32 = 2;
559 const ACC_USE: u32 = 4;
560
561 struct Liveness<'a, 'tcx> {
562 ir: &'a mut IrMaps<'tcx>,
563 body_owner: LocalDefId,
564 typeck_results: &'a ty::TypeckResults<'tcx>,
565 param_env: ty::ParamEnv<'tcx>,
566 upvars: Option<&'tcx FxIndexMap<hir::HirId, hir::Upvar>>,
567 successors: IndexVec<LiveNode, Option<LiveNode>>,
568 rwu_table: RWUTable,
569
570 /// A live node representing a point of execution before closure entry &
571 /// after closure exit. Used to calculate liveness of captured variables
572 /// through calls to the same closure. Used for Fn & FnMut closures only.
573 closure_ln: LiveNode,
574 /// A live node representing every 'exit' from the function, whether it be
575 /// by explicit return, panic, or other means.
576 exit_ln: LiveNode,
577
578 // mappings from loop node ID to LiveNode
579 // ("break" label should map to loop node ID,
580 // it probably doesn't now)
581 break_ln: HirIdMap<LiveNode>,
582 cont_ln: HirIdMap<LiveNode>,
583 }
584
585 impl<'a, 'tcx> Liveness<'a, 'tcx> {
586 fn new(ir: &'a mut IrMaps<'tcx>, body_owner: LocalDefId) -> Liveness<'a, 'tcx> {
587 let typeck_results = ir.tcx.typeck(body_owner);
588 let param_env = ir.tcx.param_env(body_owner);
589 let upvars = ir.tcx.upvars_mentioned(body_owner);
590
591 let closure_ln = ir.add_live_node(ClosureNode);
592 let exit_ln = ir.add_live_node(ExitNode);
593
594 let num_live_nodes = ir.lnks.len();
595 let num_vars = ir.var_kinds.len();
596
597 Liveness {
598 ir,
599 body_owner,
600 typeck_results,
601 param_env,
602 upvars,
603 successors: IndexVec::from_elem_n(None, num_live_nodes),
604 rwu_table: RWUTable::new(num_live_nodes * num_vars),
605 closure_ln,
606 exit_ln,
607 break_ln: Default::default(),
608 cont_ln: Default::default(),
609 }
610 }
611
612 fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
613 match self.ir.live_node_map.get(&hir_id) {
614 Some(&ln) => ln,
615 None => {
616 // This must be a mismatch between the ir_map construction
617 // above and the propagation code below; the two sets of
618 // code have to agree about which AST nodes are worth
619 // creating liveness nodes for.
620 span_bug!(span, "no live node registered for node {:?}", hir_id);
621 }
622 }
623 }
624
625 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
626 self.ir.variable(hir_id, span)
627 }
628
629 fn define_bindings_in_pat(&mut self, pat: &hir::Pat<'_>, mut succ: LiveNode) -> LiveNode {
630 // In an or-pattern, only consider the first pattern; any later patterns
631 // must have the same bindings, and we also consider the first pattern
632 // to be the "authoritative" set of ids.
633 pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| {
634 let ln = self.live_node(hir_id, pat_sp);
635 let var = self.variable(hir_id, ident.span);
636 self.init_from_succ(ln, succ);
637 self.define(ln, var);
638 succ = ln;
639 });
640 succ
641 }
642
643 fn idx(&self, ln: LiveNode, var: Variable) -> usize {
644 ln.index() * self.ir.var_kinds.len() + var.index()
645 }
646
647 fn live_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
648 if let Some(reader) = self.rwu_table.get_reader(self.idx(ln, var)) {
649 Some(self.ir.lnks[reader])
650 } else {
651 None
652 }
653 }
654
655 // Is this variable live on entry to any of its successor nodes?
656 fn live_on_exit(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
657 let successor = self.successors[ln].unwrap();
658 self.live_on_entry(successor, var)
659 }
660
661 fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
662 self.rwu_table.get_used(self.idx(ln, var))
663 }
664
665 fn assigned_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
666 if let Some(writer) = self.rwu_table.get_writer(self.idx(ln, var)) {
667 Some(self.ir.lnks[writer])
668 } else {
669 None
670 }
671 }
672
673 fn assigned_on_exit(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
674 let successor = self.successors[ln].unwrap();
675 self.assigned_on_entry(successor, var)
676 }
677
678 fn indices2<F>(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F)
679 where
680 F: FnMut(&mut Liveness<'a, 'tcx>, usize, usize),
681 {
682 let node_base_idx = self.idx(ln, Variable::from(0u32));
683 let succ_base_idx = self.idx(succ_ln, Variable::from(0u32));
684 for var_idx in 0..self.ir.var_kinds.len() {
685 op(self, node_base_idx + var_idx, succ_base_idx + var_idx);
686 }
687 }
688
689 fn write_vars<F>(&self, wr: &mut dyn Write, ln: LiveNode, mut test: F) -> io::Result<()>
690 where
691 F: FnMut(usize) -> bool,
692 {
693 let node_base_idx = self.idx(ln, Variable::from(0u32));
694 for var_idx in 0..self.ir.var_kinds.len() {
695 let idx = node_base_idx + var_idx;
696 if test(idx) {
697 write!(wr, " {:?}", Variable::from(var_idx))?;
698 }
699 }
700 Ok(())
701 }
702
703 #[allow(unused_must_use)]
704 fn ln_str(&self, ln: LiveNode) -> String {
705 let mut wr = Vec::new();
706 {
707 let wr = &mut wr as &mut dyn Write;
708 write!(wr, "[{:?} of kind {:?} reads", ln, self.ir.lnks[ln]);
709 self.write_vars(wr, ln, |idx| self.rwu_table.get_reader(idx).is_some());
710 write!(wr, " writes");
711 self.write_vars(wr, ln, |idx| self.rwu_table.get_writer(idx).is_some());
712 write!(wr, " uses");
713 self.write_vars(wr, ln, |idx| self.rwu_table.get_used(idx));
714
715 write!(wr, " precedes {:?}]", self.successors[ln]);
716 }
717 String::from_utf8(wr).unwrap()
718 }
719
720 fn log_liveness(&self, entry_ln: LiveNode, hir_id: hir::HirId) {
721 // hack to skip the loop unless debug! is enabled:
722 debug!(
723 "^^ liveness computation results for body {} (entry={:?})",
724 {
725 for ln_idx in 0..self.ir.lnks.len() {
726 debug!("{:?}", self.ln_str(LiveNode::from(ln_idx)));
727 }
728 hir_id
729 },
730 entry_ln
731 );
732 }
733
734 fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
735 self.successors[ln] = Some(succ_ln);
736
737 // It is not necessary to initialize the RWUs here because they are all
738 // set to INV_INV_FALSE when they are created, and the sets only grow
739 // during iterations.
740 }
741
742 fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
743 // more efficient version of init_empty() / merge_from_succ()
744 self.successors[ln] = Some(succ_ln);
745
746 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
747 this.rwu_table.copy_packed(idx, succ_idx);
748 });
749 debug!("init_from_succ(ln={}, succ={})", self.ln_str(ln), self.ln_str(succ_ln));
750 }
751
752 fn merge_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode, first_merge: bool) -> bool {
753 if ln == succ_ln {
754 return false;
755 }
756
757 let mut any_changed = false;
758 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
759 // This is a special case, pulled out from the code below, where we
760 // don't have to do anything. It occurs about 60-70% of the time.
761 if this.rwu_table.packed_rwus[succ_idx] == INV_INV_FALSE {
762 return;
763 }
764
765 let mut changed = false;
766 let mut rwu = this.rwu_table.get(idx);
767 let succ_rwu = this.rwu_table.get(succ_idx);
768 if succ_rwu.reader.is_some() && rwu.reader.is_none() {
769 rwu.reader = succ_rwu.reader;
770 changed = true
771 }
772
773 if succ_rwu.writer.is_some() && rwu.writer.is_none() {
774 rwu.writer = succ_rwu.writer;
775 changed = true
776 }
777
778 if succ_rwu.used && !rwu.used {
779 rwu.used = true;
780 changed = true;
781 }
782
783 if changed {
784 this.rwu_table.assign_unpacked(idx, rwu);
785 any_changed = true;
786 }
787 });
788
789 debug!(
790 "merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})",
791 ln,
792 self.ln_str(succ_ln),
793 first_merge,
794 any_changed
795 );
796 any_changed
797 }
798
799 // Indicates that a local variable was *defined*; we know that no
800 // uses of the variable can precede the definition (resolve checks
801 // this) so we just clear out all the data.
802 fn define(&mut self, writer: LiveNode, var: Variable) {
803 let idx = self.idx(writer, var);
804 self.rwu_table.assign_inv_inv(idx);
805
806 debug!("{:?} defines {:?} (idx={}): {}", writer, var, idx, self.ln_str(writer));
807 }
808
809 // Either read, write, or both depending on the acc bitset
810 fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
811 debug!("{:?} accesses[{:x}] {:?}: {}", ln, acc, var, self.ln_str(ln));
812
813 let idx = self.idx(ln, var);
814 let mut rwu = self.rwu_table.get(idx);
815
816 if (acc & ACC_WRITE) != 0 {
817 rwu.reader = None;
818 rwu.writer = Some(ln);
819 }
820
821 // Important: if we both read/write, must do read second
822 // or else the write will override.
823 if (acc & ACC_READ) != 0 {
824 rwu.reader = Some(ln);
825 }
826
827 if (acc & ACC_USE) != 0 {
828 rwu.used = true;
829 }
830
831 self.rwu_table.assign_unpacked(idx, rwu);
832 }
833
834 fn compute(&mut self, body: &hir::Body<'_>, hir_id: HirId) -> LiveNode {
835 debug!("compute: for body {:?}", body.id().hir_id);
836
837 // # Liveness of captured variables
838 //
839 // When computing the liveness for captured variables we take into
840 // account how variable is captured (ByRef vs ByValue) and what is the
841 // closure kind (Generator / FnOnce vs Fn / FnMut).
842 //
843 // Variables captured by reference are assumed to be used on the exit
844 // from the closure.
845 //
846 // In FnOnce closures, variables captured by value are known to be dead
847 // on exit since it is impossible to call the closure again.
848 //
849 // In Fn / FnMut closures, variables captured by value are live on exit
850 // if they are live on the entry to the closure, since only the closure
851 // itself can access them on subsequent calls.
852
853 if let Some(upvars) = self.upvars {
854 // Mark upvars captured by reference as used after closure exits.
855 for (&var_hir_id, upvar) in upvars.iter().rev() {
856 let upvar_id = ty::UpvarId {
857 var_path: ty::UpvarPath { hir_id: var_hir_id },
858 closure_expr_id: self.body_owner,
859 };
860 match self.typeck_results.upvar_capture(upvar_id) {
861 ty::UpvarCapture::ByRef(_) => {
862 let var = self.variable(var_hir_id, upvar.span);
863 self.acc(self.exit_ln, var, ACC_READ | ACC_USE);
864 }
865 ty::UpvarCapture::ByValue(_) => {}
866 }
867 }
868 }
869
870 let succ = self.propagate_through_expr(&body.value, self.exit_ln);
871
872 if self.upvars.is_none() {
873 // Either not a closure, or closure without any captured variables.
874 // No need to determine liveness of captured variables, since there
875 // are none.
876 return succ;
877 }
878
879 let ty = self.typeck_results.node_type(hir_id);
880 match ty.kind() {
881 ty::Closure(_def_id, substs) => match substs.as_closure().kind() {
882 ty::ClosureKind::Fn => {}
883 ty::ClosureKind::FnMut => {}
884 ty::ClosureKind::FnOnce => return succ,
885 },
886 ty::Generator(..) => return succ,
887 _ => {
888 span_bug!(
889 body.value.span,
890 "{} has upvars so it should have a closure type: {:?}",
891 hir_id,
892 ty
893 );
894 }
895 };
896
897 // Propagate through calls to the closure.
898 let mut first_merge = true;
899 loop {
900 self.init_from_succ(self.closure_ln, succ);
901 for param in body.params {
902 param.pat.each_binding(|_bm, hir_id, _x, ident| {
903 let var = self.variable(hir_id, ident.span);
904 self.define(self.closure_ln, var);
905 })
906 }
907
908 if !self.merge_from_succ(self.exit_ln, self.closure_ln, first_merge) {
909 break;
910 }
911 first_merge = false;
912 assert_eq!(succ, self.propagate_through_expr(&body.value, self.exit_ln));
913 }
914
915 succ
916 }
917
918 fn propagate_through_block(&mut self, blk: &hir::Block<'_>, succ: LiveNode) -> LiveNode {
919 if blk.targeted_by_break {
920 self.break_ln.insert(blk.hir_id, succ);
921 }
922 let succ = self.propagate_through_opt_expr(blk.expr.as_deref(), succ);
923 blk.stmts.iter().rev().fold(succ, |succ, stmt| self.propagate_through_stmt(stmt, succ))
924 }
925
926 fn propagate_through_stmt(&mut self, stmt: &hir::Stmt<'_>, succ: LiveNode) -> LiveNode {
927 match stmt.kind {
928 hir::StmtKind::Local(ref local) => {
929 // Note: we mark the variable as defined regardless of whether
930 // there is an initializer. Initially I had thought to only mark
931 // the live variable as defined if it was initialized, and then we
932 // could check for uninit variables just by scanning what is live
933 // at the start of the function. But that doesn't work so well for
934 // immutable variables defined in a loop:
935 // loop { let x; x = 5; }
936 // because the "assignment" loops back around and generates an error.
937 //
938 // So now we just check that variables defined w/o an
939 // initializer are not live at the point of their
940 // initialization, which is mildly more complex than checking
941 // once at the func header but otherwise equivalent.
942
943 let succ = self.propagate_through_opt_expr(local.init.as_deref(), succ);
944 self.define_bindings_in_pat(&local.pat, succ)
945 }
946 hir::StmtKind::Item(..) => succ,
947 hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
948 self.propagate_through_expr(&expr, succ)
949 }
950 }
951 }
952
953 fn propagate_through_exprs(&mut self, exprs: &[Expr<'_>], succ: LiveNode) -> LiveNode {
954 exprs.iter().rev().fold(succ, |succ, expr| self.propagate_through_expr(&expr, succ))
955 }
956
957 fn propagate_through_opt_expr(
958 &mut self,
959 opt_expr: Option<&Expr<'_>>,
960 succ: LiveNode,
961 ) -> LiveNode {
962 opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
963 }
964
965 fn propagate_through_expr(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
966 debug!("propagate_through_expr: {:?}", expr);
967
968 match expr.kind {
969 // Interesting cases with control flow or which gen/kill
970 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
971 self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
972 }
973
974 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
975
976 hir::ExprKind::Closure(..) => {
977 debug!("{:?} is an ExprKind::Closure", expr);
978
979 // the construction of a closure itself is not important,
980 // but we have to consider the closed over variables.
981 let caps = self
982 .ir
983 .capture_info_map
984 .get(&expr.hir_id)
985 .cloned()
986 .unwrap_or_else(|| span_bug!(expr.span, "no registered caps"));
987
988 caps.iter().rev().fold(succ, |succ, cap| {
989 self.init_from_succ(cap.ln, succ);
990 let var = self.variable(cap.var_hid, expr.span);
991 self.acc(cap.ln, var, ACC_READ | ACC_USE);
992 cap.ln
993 })
994 }
995
996 // Note that labels have been resolved, so we don't need to look
997 // at the label ident
998 hir::ExprKind::Loop(ref blk, _, _) => self.propagate_through_loop(expr, &blk, succ),
999
1000 hir::ExprKind::Match(ref e, arms, _) => {
1001 //
1002 // (e)
1003 // |
1004 // v
1005 // (expr)
1006 // / | \
1007 // | | |
1008 // v v v
1009 // (..arms..)
1010 // | | |
1011 // v v v
1012 // ( succ )
1013 //
1014 //
1015 let ln = self.live_node(expr.hir_id, expr.span);
1016 self.init_empty(ln, succ);
1017 let mut first_merge = true;
1018 for arm in arms {
1019 let body_succ = self.propagate_through_expr(&arm.body, succ);
1020
1021 let guard_succ = self.propagate_through_opt_expr(
1022 arm.guard.as_ref().map(|hir::Guard::If(e)| *e),
1023 body_succ,
1024 );
1025 let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ);
1026 self.merge_from_succ(ln, arm_succ, first_merge);
1027 first_merge = false;
1028 }
1029 self.propagate_through_expr(&e, ln)
1030 }
1031
1032 hir::ExprKind::Ret(ref o_e) => {
1033 // Ignore succ and subst exit_ln.
1034 self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), self.exit_ln)
1035 }
1036
1037 hir::ExprKind::Break(label, ref opt_expr) => {
1038 // Find which label this break jumps to
1039 let target = match label.target_id {
1040 Ok(hir_id) => self.break_ln.get(&hir_id),
1041 Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
1042 }
1043 .cloned();
1044
1045 // Now that we know the label we're going to,
1046 // look it up in the break loop nodes table
1047
1048 match target {
1049 Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
1050 None => span_bug!(expr.span, "`break` to unknown label"),
1051 }
1052 }
1053
1054 hir::ExprKind::Continue(label) => {
1055 // Find which label this expr continues to
1056 let sc = label
1057 .target_id
1058 .unwrap_or_else(|err| span_bug!(expr.span, "loop scope error: {}", err));
1059
1060 // Now that we know the label we're going to,
1061 // look it up in the continue loop nodes table
1062 self.cont_ln
1063 .get(&sc)
1064 .cloned()
1065 .unwrap_or_else(|| span_bug!(expr.span, "continue to unknown label"))
1066 }
1067
1068 hir::ExprKind::Assign(ref l, ref r, _) => {
1069 // see comment on places in
1070 // propagate_through_place_components()
1071 let succ = self.write_place(&l, succ, ACC_WRITE);
1072 let succ = self.propagate_through_place_components(&l, succ);
1073 self.propagate_through_expr(&r, succ)
1074 }
1075
1076 hir::ExprKind::AssignOp(_, ref l, ref r) => {
1077 // an overloaded assign op is like a method call
1078 if self.typeck_results.is_method_call(expr) {
1079 let succ = self.propagate_through_expr(&l, succ);
1080 self.propagate_through_expr(&r, succ)
1081 } else {
1082 // see comment on places in
1083 // propagate_through_place_components()
1084 let succ = self.write_place(&l, succ, ACC_WRITE | ACC_READ);
1085 let succ = self.propagate_through_expr(&r, succ);
1086 self.propagate_through_place_components(&l, succ)
1087 }
1088 }
1089
1090 // Uninteresting cases: just propagate in rev exec order
1091 hir::ExprKind::Array(ref exprs) => self.propagate_through_exprs(exprs, succ),
1092
1093 hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
1094 let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
1095 fields
1096 .iter()
1097 .rev()
1098 .fold(succ, |succ, field| self.propagate_through_expr(&field.expr, succ))
1099 }
1100
1101 hir::ExprKind::Call(ref f, ref args) => {
1102 let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
1103 let succ = if self.ir.tcx.is_ty_uninhabited_from(
1104 m,
1105 self.typeck_results.expr_ty(expr),
1106 self.param_env,
1107 ) {
1108 self.exit_ln
1109 } else {
1110 succ
1111 };
1112 let succ = self.propagate_through_exprs(args, succ);
1113 self.propagate_through_expr(&f, succ)
1114 }
1115
1116 hir::ExprKind::MethodCall(.., ref args, _) => {
1117 let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
1118 let succ = if self.ir.tcx.is_ty_uninhabited_from(
1119 m,
1120 self.typeck_results.expr_ty(expr),
1121 self.param_env,
1122 ) {
1123 self.exit_ln
1124 } else {
1125 succ
1126 };
1127
1128 self.propagate_through_exprs(args, succ)
1129 }
1130
1131 hir::ExprKind::Tup(ref exprs) => self.propagate_through_exprs(exprs, succ),
1132
1133 hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
1134 let r_succ = self.propagate_through_expr(&r, succ);
1135
1136 let ln = self.live_node(expr.hir_id, expr.span);
1137 self.init_from_succ(ln, succ);
1138 self.merge_from_succ(ln, r_succ, false);
1139
1140 self.propagate_through_expr(&l, ln)
1141 }
1142
1143 hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) => {
1144 let r_succ = self.propagate_through_expr(&r, succ);
1145 self.propagate_through_expr(&l, r_succ)
1146 }
1147
1148 hir::ExprKind::Box(ref e)
1149 | hir::ExprKind::AddrOf(_, _, ref e)
1150 | hir::ExprKind::Cast(ref e, _)
1151 | hir::ExprKind::Type(ref e, _)
1152 | hir::ExprKind::DropTemps(ref e)
1153 | hir::ExprKind::Unary(_, ref e)
1154 | hir::ExprKind::Yield(ref e, _)
1155 | hir::ExprKind::Repeat(ref e, _) => self.propagate_through_expr(&e, succ),
1156
1157 hir::ExprKind::InlineAsm(ref asm) => {
1158 // Handle non-returning asm
1159 let mut succ = if asm.options.contains(InlineAsmOptions::NORETURN) {
1160 self.exit_ln
1161 } else {
1162 succ
1163 };
1164
1165 // Do a first pass for writing outputs only
1166 for op in asm.operands.iter().rev() {
1167 match op {
1168 hir::InlineAsmOperand::In { .. }
1169 | hir::InlineAsmOperand::Const { .. }
1170 | hir::InlineAsmOperand::Sym { .. } => {}
1171 hir::InlineAsmOperand::Out { expr, .. } => {
1172 if let Some(expr) = expr {
1173 succ = self.write_place(expr, succ, ACC_WRITE);
1174 }
1175 }
1176 hir::InlineAsmOperand::InOut { expr, .. } => {
1177 succ = self.write_place(expr, succ, ACC_READ | ACC_WRITE | ACC_USE);
1178 }
1179 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1180 if let Some(expr) = out_expr {
1181 succ = self.write_place(expr, succ, ACC_WRITE);
1182 }
1183 }
1184 }
1185 }
1186
1187 // Then do a second pass for inputs
1188 let mut succ = succ;
1189 for op in asm.operands.iter().rev() {
1190 match op {
1191 hir::InlineAsmOperand::In { expr, .. }
1192 | hir::InlineAsmOperand::Const { expr, .. }
1193 | hir::InlineAsmOperand::Sym { expr, .. } => {
1194 succ = self.propagate_through_expr(expr, succ)
1195 }
1196 hir::InlineAsmOperand::Out { expr, .. } => {
1197 if let Some(expr) = expr {
1198 succ = self.propagate_through_place_components(expr, succ);
1199 }
1200 }
1201 hir::InlineAsmOperand::InOut { expr, .. } => {
1202 succ = self.propagate_through_place_components(expr, succ);
1203 }
1204 hir::InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
1205 if let Some(expr) = out_expr {
1206 succ = self.propagate_through_place_components(expr, succ);
1207 }
1208 succ = self.propagate_through_expr(in_expr, succ);
1209 }
1210 }
1211 }
1212 succ
1213 }
1214
1215 hir::ExprKind::LlvmInlineAsm(ref asm) => {
1216 let ia = &asm.inner;
1217 let outputs = asm.outputs_exprs;
1218 let inputs = asm.inputs_exprs;
1219 let succ = ia.outputs.iter().zip(outputs).rev().fold(succ, |succ, (o, output)| {
1220 // see comment on places
1221 // in propagate_through_place_components()
1222 if o.is_indirect {
1223 self.propagate_through_expr(output, succ)
1224 } else {
1225 let acc = if o.is_rw { ACC_WRITE | ACC_READ } else { ACC_WRITE };
1226 let succ = self.write_place(output, succ, acc);
1227 self.propagate_through_place_components(output, succ)
1228 }
1229 });
1230
1231 // Inputs are executed first. Propagate last because of rev order
1232 self.propagate_through_exprs(inputs, succ)
1233 }
1234
1235 hir::ExprKind::Lit(..)
1236 | hir::ExprKind::ConstBlock(..)
1237 | hir::ExprKind::Err
1238 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
1239 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => succ,
1240
1241 // Note that labels have been resolved, so we don't need to look
1242 // at the label ident
1243 hir::ExprKind::Block(ref blk, _) => self.propagate_through_block(&blk, succ),
1244 }
1245 }
1246
1247 fn propagate_through_place_components(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
1248 // # Places
1249 //
1250 // In general, the full flow graph structure for an
1251 // assignment/move/etc can be handled in one of two ways,
1252 // depending on whether what is being assigned is a "tracked
1253 // value" or not. A tracked value is basically a local
1254 // variable or argument.
1255 //
1256 // The two kinds of graphs are:
1257 //
1258 // Tracked place Untracked place
1259 // ----------------------++-----------------------
1260 // ||
1261 // | || |
1262 // v || v
1263 // (rvalue) || (rvalue)
1264 // | || |
1265 // v || v
1266 // (write of place) || (place components)
1267 // | || |
1268 // v || v
1269 // (succ) || (succ)
1270 // ||
1271 // ----------------------++-----------------------
1272 //
1273 // I will cover the two cases in turn:
1274 //
1275 // # Tracked places
1276 //
1277 // A tracked place is a local variable/argument `x`. In
1278 // these cases, the link_node where the write occurs is linked
1279 // to node id of `x`. The `write_place()` routine generates
1280 // the contents of this node. There are no subcomponents to
1281 // consider.
1282 //
1283 // # Non-tracked places
1284 //
1285 // These are places like `x[5]` or `x.f`. In that case, we
1286 // basically ignore the value which is written to but generate
1287 // reads for the components---`x` in these two examples. The
1288 // components reads are generated by
1289 // `propagate_through_place_components()` (this fn).
1290 //
1291 // # Illegal places
1292 //
1293 // It is still possible to observe assignments to non-places;
1294 // these errors are detected in the later pass borrowck. We
1295 // just ignore such cases and treat them as reads.
1296
1297 match expr.kind {
1298 hir::ExprKind::Path(_) => succ,
1299 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
1300 _ => self.propagate_through_expr(expr, succ),
1301 }
1302 }
1303
1304 // see comment on propagate_through_place()
1305 fn write_place(&mut self, expr: &Expr<'_>, succ: LiveNode, acc: u32) -> LiveNode {
1306 match expr.kind {
1307 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1308 self.access_path(expr.hir_id, path, succ, acc)
1309 }
1310
1311 // We do not track other places, so just propagate through
1312 // to their subcomponents. Also, it may happen that
1313 // non-places occur here, because those are detected in the
1314 // later pass borrowck.
1315 _ => succ,
1316 }
1317 }
1318
1319 fn access_var(
1320 &mut self,
1321 hir_id: HirId,
1322 var_hid: HirId,
1323 succ: LiveNode,
1324 acc: u32,
1325 span: Span,
1326 ) -> LiveNode {
1327 let ln = self.live_node(hir_id, span);
1328 if acc != 0 {
1329 self.init_from_succ(ln, succ);
1330 let var = self.variable(var_hid, span);
1331 self.acc(ln, var, acc);
1332 }
1333 ln
1334 }
1335
1336 fn access_path(
1337 &mut self,
1338 hir_id: HirId,
1339 path: &hir::Path<'_>,
1340 succ: LiveNode,
1341 acc: u32,
1342 ) -> LiveNode {
1343 match path.res {
1344 Res::Local(hid) => self.access_var(hir_id, hid, succ, acc, path.span),
1345 _ => succ,
1346 }
1347 }
1348
1349 fn propagate_through_loop(
1350 &mut self,
1351 expr: &Expr<'_>,
1352 body: &hir::Block<'_>,
1353 succ: LiveNode,
1354 ) -> LiveNode {
1355 /*
1356 We model control flow like this:
1357
1358 (expr) <-+
1359 | |
1360 v |
1361 (body) --+
1362
1363 Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
1364 Meanwhile, a `break` expression will have a successor of `succ`.
1365 */
1366
1367 // first iteration:
1368 let mut first_merge = true;
1369 let ln = self.live_node(expr.hir_id, expr.span);
1370 self.init_empty(ln, succ);
1371 debug!("propagate_through_loop: using id for loop body {} {:?}", expr.hir_id, body);
1372
1373 self.break_ln.insert(expr.hir_id, succ);
1374
1375 self.cont_ln.insert(expr.hir_id, ln);
1376
1377 let body_ln = self.propagate_through_block(body, ln);
1378
1379 // repeat until fixed point is reached:
1380 while self.merge_from_succ(ln, body_ln, first_merge) {
1381 first_merge = false;
1382 assert_eq!(body_ln, self.propagate_through_block(body, ln));
1383 }
1384
1385 ln
1386 }
1387 }
1388
1389 // _______________________________________________________________________
1390 // Checking for error conditions
1391
1392 impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
1393 type Map = intravisit::ErasedMap<'tcx>;
1394
1395 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
1396 NestedVisitorMap::None
1397 }
1398
1399 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
1400 self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| {
1401 if local.init.is_some() {
1402 self.warn_about_dead_assign(spans, hir_id, ln, var);
1403 }
1404 });
1405
1406 intravisit::walk_local(self, local);
1407 }
1408
1409 fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
1410 check_expr(self, ex);
1411 }
1412
1413 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
1414 self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {});
1415 intravisit::walk_arm(self, arm);
1416 }
1417 }
1418
1419 fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr<'tcx>) {
1420 match expr.kind {
1421 hir::ExprKind::Assign(ref l, ..) => {
1422 this.check_place(&l);
1423 }
1424
1425 hir::ExprKind::AssignOp(_, ref l, _) => {
1426 if !this.typeck_results.is_method_call(expr) {
1427 this.check_place(&l);
1428 }
1429 }
1430
1431 hir::ExprKind::InlineAsm(ref asm) => {
1432 for op in asm.operands {
1433 match op {
1434 hir::InlineAsmOperand::Out { expr, .. } => {
1435 if let Some(expr) = expr {
1436 this.check_place(expr);
1437 }
1438 }
1439 hir::InlineAsmOperand::InOut { expr, .. } => {
1440 this.check_place(expr);
1441 }
1442 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1443 if let Some(out_expr) = out_expr {
1444 this.check_place(out_expr);
1445 }
1446 }
1447 _ => {}
1448 }
1449 }
1450 }
1451
1452 hir::ExprKind::LlvmInlineAsm(ref asm) => {
1453 for input in asm.inputs_exprs {
1454 this.visit_expr(input);
1455 }
1456
1457 // Output operands must be places
1458 for (o, output) in asm.inner.outputs.iter().zip(asm.outputs_exprs) {
1459 if !o.is_indirect {
1460 this.check_place(output);
1461 }
1462 this.visit_expr(output);
1463 }
1464 }
1465
1466 // no correctness conditions related to liveness
1467 hir::ExprKind::Call(..)
1468 | hir::ExprKind::MethodCall(..)
1469 | hir::ExprKind::Match(..)
1470 | hir::ExprKind::Loop(..)
1471 | hir::ExprKind::Index(..)
1472 | hir::ExprKind::Field(..)
1473 | hir::ExprKind::Array(..)
1474 | hir::ExprKind::Tup(..)
1475 | hir::ExprKind::Binary(..)
1476 | hir::ExprKind::Cast(..)
1477 | hir::ExprKind::DropTemps(..)
1478 | hir::ExprKind::Unary(..)
1479 | hir::ExprKind::Ret(..)
1480 | hir::ExprKind::Break(..)
1481 | hir::ExprKind::Continue(..)
1482 | hir::ExprKind::Lit(_)
1483 | hir::ExprKind::ConstBlock(..)
1484 | hir::ExprKind::Block(..)
1485 | hir::ExprKind::AddrOf(..)
1486 | hir::ExprKind::Struct(..)
1487 | hir::ExprKind::Repeat(..)
1488 | hir::ExprKind::Closure(..)
1489 | hir::ExprKind::Path(_)
1490 | hir::ExprKind::Yield(..)
1491 | hir::ExprKind::Box(..)
1492 | hir::ExprKind::Type(..)
1493 | hir::ExprKind::Err => {}
1494 }
1495
1496 intravisit::walk_expr(this, expr);
1497 }
1498
1499 impl<'tcx> Liveness<'_, 'tcx> {
1500 fn check_place(&mut self, expr: &'tcx Expr<'tcx>) {
1501 match expr.kind {
1502 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1503 if let Res::Local(var_hid) = path.res {
1504 // Assignment to an immutable variable or argument: only legal
1505 // if there is no later assignment. If this local is actually
1506 // mutable, then check for a reassignment to flag the mutability
1507 // as being used.
1508 let ln = self.live_node(expr.hir_id, expr.span);
1509 let var = self.variable(var_hid, expr.span);
1510 self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var);
1511 }
1512 }
1513 _ => {
1514 // For other kinds of places, no checks are required,
1515 // and any embedded expressions are actually rvalues
1516 intravisit::walk_expr(self, expr);
1517 }
1518 }
1519 }
1520
1521 fn should_warn(&self, var: Variable) -> Option<String> {
1522 let name = self.ir.variable_name(var);
1523 if name == kw::Invalid {
1524 return None;
1525 }
1526 let name: &str = &name.as_str();
1527 if name.as_bytes()[0] == b'_' {
1528 return None;
1529 }
1530 Some(name.to_owned())
1531 }
1532
1533 fn warn_about_unused_upvars(&self, entry_ln: LiveNode) {
1534 let upvars = match self.upvars {
1535 None => return,
1536 Some(upvars) => upvars,
1537 };
1538 for (&var_hir_id, upvar) in upvars.iter() {
1539 let var = self.variable(var_hir_id, upvar.span);
1540 let upvar_id = ty::UpvarId {
1541 var_path: ty::UpvarPath { hir_id: var_hir_id },
1542 closure_expr_id: self.body_owner,
1543 };
1544 match self.typeck_results.upvar_capture(upvar_id) {
1545 ty::UpvarCapture::ByValue(_) => {}
1546 ty::UpvarCapture::ByRef(..) => continue,
1547 };
1548 if self.used_on_entry(entry_ln, var) {
1549 if self.live_on_entry(entry_ln, var).is_none() {
1550 if let Some(name) = self.should_warn(var) {
1551 self.ir.tcx.struct_span_lint_hir(
1552 lint::builtin::UNUSED_ASSIGNMENTS,
1553 var_hir_id,
1554 vec![upvar.span],
1555 |lint| {
1556 lint.build(&format!("value captured by `{}` is never read", name))
1557 .help("did you mean to capture by reference instead?")
1558 .emit();
1559 },
1560 );
1561 }
1562 }
1563 } else {
1564 if let Some(name) = self.should_warn(var) {
1565 self.ir.tcx.struct_span_lint_hir(
1566 lint::builtin::UNUSED_VARIABLES,
1567 var_hir_id,
1568 vec![upvar.span],
1569 |lint| {
1570 lint.build(&format!("unused variable: `{}`", name))
1571 .help("did you mean to capture by reference instead?")
1572 .emit();
1573 },
1574 );
1575 }
1576 }
1577 }
1578 }
1579
1580 fn warn_about_unused_args(&self, body: &hir::Body<'_>, entry_ln: LiveNode) {
1581 for p in body.params {
1582 self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| {
1583 if self.live_on_entry(ln, var).is_none() {
1584 self.report_unsed_assign(hir_id, spans, var, |name| {
1585 format!("value passed to `{}` is never read", name)
1586 });
1587 }
1588 });
1589 }
1590 }
1591
1592 fn check_unused_vars_in_pat(
1593 &self,
1594 pat: &hir::Pat<'_>,
1595 entry_ln: Option<LiveNode>,
1596 on_used_on_entry: impl Fn(Vec<Span>, HirId, LiveNode, Variable),
1597 ) {
1598 // In an or-pattern, only consider the variable; any later patterns must have the same
1599 // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
1600 // However, we should take the ids and spans of variables with the same name from the later
1601 // patterns so the suggestions to prefix with underscores will apply to those too.
1602 let mut vars: FxIndexMap<Symbol, (LiveNode, Variable, Vec<(HirId, Span)>)> = <_>::default();
1603
1604 pat.each_binding(|_, hir_id, pat_sp, ident| {
1605 let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp));
1606 let var = self.variable(hir_id, ident.span);
1607 let id_and_sp = (hir_id, pat_sp);
1608 vars.entry(self.ir.variable_name(var))
1609 .and_modify(|(.., hir_ids_and_spans)| hir_ids_and_spans.push(id_and_sp))
1610 .or_insert_with(|| (ln, var, vec![id_and_sp]));
1611 });
1612
1613 for (_, (ln, var, hir_ids_and_spans)) in vars {
1614 if self.used_on_entry(ln, var) {
1615 let id = hir_ids_and_spans[0].0;
1616 let spans = hir_ids_and_spans.into_iter().map(|(_, sp)| sp).collect();
1617 on_used_on_entry(spans, id, ln, var);
1618 } else {
1619 self.report_unused(hir_ids_and_spans, ln, var);
1620 }
1621 }
1622 }
1623
1624 fn report_unused(&self, hir_ids_and_spans: Vec<(HirId, Span)>, ln: LiveNode, var: Variable) {
1625 let first_hir_id = hir_ids_and_spans[0].0;
1626
1627 if let Some(name) = self.should_warn(var).filter(|name| name != "self") {
1628 // annoying: for parameters in funcs like `fn(x: i32)
1629 // {ret}`, there is only one node, so asking about
1630 // assigned_on_exit() is not meaningful.
1631 let is_assigned =
1632 if ln == self.exit_ln { false } else { self.assigned_on_exit(ln, var).is_some() };
1633
1634 if is_assigned {
1635 self.ir.tcx.struct_span_lint_hir(
1636 lint::builtin::UNUSED_VARIABLES,
1637 first_hir_id,
1638 hir_ids_and_spans.into_iter().map(|(_, sp)| sp).collect::<Vec<_>>(),
1639 |lint| {
1640 lint.build(&format!("variable `{}` is assigned to, but never used", name))
1641 .note(&format!("consider using `_{}` instead", name))
1642 .emit();
1643 },
1644 )
1645 } else {
1646 self.ir.tcx.struct_span_lint_hir(
1647 lint::builtin::UNUSED_VARIABLES,
1648 first_hir_id,
1649 hir_ids_and_spans.iter().map(|(_, sp)| *sp).collect::<Vec<_>>(),
1650 |lint| {
1651 let mut err = lint.build(&format!("unused variable: `{}`", name));
1652
1653 let (shorthands, non_shorthands): (Vec<_>, Vec<_>) =
1654 hir_ids_and_spans.into_iter().partition(|(hir_id, span)| {
1655 let var = self.variable(*hir_id, *span);
1656 self.ir.variable_is_shorthand(var)
1657 });
1658
1659 let mut shorthands = shorthands
1660 .into_iter()
1661 .map(|(_, span)| (span, format!("{}: _", name)))
1662 .collect::<Vec<_>>();
1663
1664 // If we have both shorthand and non-shorthand, prefer the "try ignoring
1665 // the field" message, and suggest `_` for the non-shorthands. If we only
1666 // have non-shorthand, then prefix with an underscore instead.
1667 if !shorthands.is_empty() {
1668 shorthands.extend(
1669 non_shorthands
1670 .into_iter()
1671 .map(|(_, span)| (span, "_".to_string()))
1672 .collect::<Vec<_>>(),
1673 );
1674
1675 err.multipart_suggestion(
1676 "try ignoring the field",
1677 shorthands,
1678 Applicability::MachineApplicable,
1679 );
1680 } else {
1681 err.multipart_suggestion(
1682 "if this is intentional, prefix it with an underscore",
1683 non_shorthands
1684 .into_iter()
1685 .map(|(_, span)| (span, format!("_{}", name)))
1686 .collect::<Vec<_>>(),
1687 Applicability::MachineApplicable,
1688 );
1689 }
1690
1691 err.emit()
1692 },
1693 );
1694 }
1695 }
1696 }
1697
1698 fn warn_about_dead_assign(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
1699 if self.live_on_exit(ln, var).is_none() {
1700 self.report_unsed_assign(hir_id, spans, var, |name| {
1701 format!("value assigned to `{}` is never read", name)
1702 });
1703 }
1704 }
1705
1706 fn report_unsed_assign(
1707 &self,
1708 hir_id: HirId,
1709 spans: Vec<Span>,
1710 var: Variable,
1711 message: impl Fn(&str) -> String,
1712 ) {
1713 if let Some(name) = self.should_warn(var) {
1714 self.ir.tcx.struct_span_lint_hir(
1715 lint::builtin::UNUSED_ASSIGNMENTS,
1716 hir_id,
1717 spans,
1718 |lint| {
1719 lint.build(&message(&name))
1720 .help("maybe it is overwritten before being read?")
1721 .emit();
1722 },
1723 )
1724 }
1725 }
1726 }