]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_passes/src/liveness.rs
New upstream version 1.53.0+dfsg1
[rustc.git] / compiler / rustc_passes / src / liveness.rs
1 //! A classic liveness analysis based on dataflow over the AST. Computes,
2 //! for each local variable in a function, whether that variable is live
3 //! at a given point. Program execution points are identified by their
4 //! IDs.
5 //!
6 //! # Basic idea
7 //!
8 //! The basic model is that each local variable is assigned an index. We
9 //! represent sets of local variables using a vector indexed by this
10 //! index. The value in the vector is either 0, indicating the variable
11 //! is dead, or the ID of an expression that uses the variable.
12 //!
13 //! We conceptually walk over the AST in reverse execution order. If we
14 //! find a use of a variable, we add it to the set of live variables. If
15 //! we find an assignment to a variable, we remove it from the set of live
16 //! variables. When we have to merge two flows, we take the union of
17 //! those two flows -- if the variable is live on both paths, we simply
18 //! pick one ID. In the event of loops, we continue doing this until a
19 //! fixed point is reached.
20 //!
21 //! ## Checking initialization
22 //!
23 //! At the function entry point, all variables must be dead. If this is
24 //! not the case, we can report an error using the ID found in the set of
25 //! live variables, which identifies a use of the variable which is not
26 //! dominated by an assignment.
27 //!
28 //! ## Checking moves
29 //!
30 //! After each explicit move, the variable must be dead.
31 //!
32 //! ## Computing last uses
33 //!
34 //! Any use of the variable where the variable is dead afterwards is a
35 //! last use.
36 //!
37 //! # Implementation details
38 //!
39 //! The actual implementation contains two (nested) walks over the AST.
40 //! The outer walk has the job of building up the ir_maps instance for the
41 //! enclosing function. On the way down the tree, it identifies those AST
42 //! nodes and variable IDs that will be needed for the liveness analysis
43 //! and assigns them contiguous IDs. The liveness ID for an AST node is
44 //! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
45 //! is called a `variable` (another newtype'd `u32`).
46 //!
47 //! On the way back up the tree, as we are about to exit from a function
48 //! declaration we allocate a `liveness` instance. Now that we know
49 //! precisely how many nodes and variables we need, we can allocate all
50 //! the various arrays that we will need to precisely the right size. We then
51 //! perform the actual propagation on the `liveness` instance.
52 //!
53 //! This propagation is encoded in the various `propagate_through_*()`
54 //! methods. It effectively does a reverse walk of the AST; whenever we
55 //! reach a loop node, we iterate until a fixed point is reached.
56 //!
57 //! ## The `RWU` struct
58 //!
59 //! At each live node `N`, we track three pieces of information for each
60 //! variable `V` (these are encapsulated in the `RWU` struct):
61 //!
62 //! - `reader`: the `LiveNode` ID of some node which will read the value
63 //! that `V` holds on entry to `N`. Formally: a node `M` such
64 //! that there exists a path `P` from `N` to `M` where `P` does not
65 //! write `V`. If the `reader` is `None`, then the current
66 //! value will never be read (the variable is dead, essentially).
67 //!
68 //! - `writer`: the `LiveNode` ID of some node which will write the
69 //! variable `V` and which is reachable from `N`. Formally: a node `M`
70 //! such that there exists a path `P` from `N` to `M` and `M` writes
71 //! `V`. If the `writer` is `None`, then there is no writer
72 //! of `V` that follows `N`.
73 //!
74 //! - `used`: a boolean value indicating whether `V` is *used*. We
75 //! distinguish a *read* from a *use* in that a *use* is some read that
76 //! is not just used to generate a new value. For example, `x += 1` is
77 //! a read but not a use. This is used to generate better warnings.
78 //!
79 //! ## Special nodes and variables
80 //!
81 //! We generate various special nodes for various, well, special purposes.
82 //! These are described in the `Liveness` struct.
83
84 use self::LiveNodeKind::*;
85 use self::VarKind::*;
86
87 use rustc_ast::InlineAsmOptions;
88 use rustc_data_structures::fx::FxIndexMap;
89 use rustc_errors::Applicability;
90 use rustc_hir as hir;
91 use rustc_hir::def::*;
92 use rustc_hir::def_id::LocalDefId;
93 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
94 use rustc_hir::{Expr, HirId, HirIdMap, HirIdSet};
95 use rustc_index::vec::IndexVec;
96 use rustc_middle::hir::map::Map;
97 use rustc_middle::ty::query::Providers;
98 use rustc_middle::ty::{self, DefIdTree, RootVariableMinCaptureList, TyCtxt};
99 use rustc_session::lint;
100 use rustc_span::symbol::{kw, sym, Symbol};
101 use rustc_span::Span;
102
103 use std::collections::VecDeque;
104 use std::io;
105 use std::io::prelude::*;
106 use std::iter;
107 use std::rc::Rc;
108
109 mod rwu_table;
110
111 rustc_index::newtype_index! {
112 pub struct Variable {
113 DEBUG_FORMAT = "v({})",
114 }
115 }
116
117 rustc_index::newtype_index! {
118 pub struct LiveNode {
119 DEBUG_FORMAT = "ln({})",
120 }
121 }
122
123 #[derive(Copy, Clone, PartialEq, Debug)]
124 enum LiveNodeKind {
125 UpvarNode(Span),
126 ExprNode(Span),
127 VarDefNode(Span),
128 ClosureNode,
129 ExitNode,
130 }
131
132 fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
133 let sm = tcx.sess.source_map();
134 match lnk {
135 UpvarNode(s) => format!("Upvar node [{}]", sm.span_to_string(s)),
136 ExprNode(s) => format!("Expr node [{}]", sm.span_to_string(s)),
137 VarDefNode(s) => format!("Var def node [{}]", sm.span_to_string(s)),
138 ClosureNode => "Closure node".to_owned(),
139 ExitNode => "Exit node".to_owned(),
140 }
141 }
142
143 fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
144 tcx.hir().visit_item_likes_in_module(module_def_id, &mut IrMaps::new(tcx).as_deep_visitor());
145 }
146
147 pub fn provide(providers: &mut Providers) {
148 *providers = Providers { check_mod_liveness, ..*providers };
149 }
150
151 // ______________________________________________________________________
152 // Creating ir_maps
153 //
154 // This is the first pass and the one that drives the main
155 // computation. It walks up and down the IR once. On the way down,
156 // we count for each function the number of variables as well as
157 // liveness nodes. A liveness node is basically an expression or
158 // capture clause that does something of interest: either it has
159 // interesting control flow or it uses/defines a local variable.
160 //
161 // On the way back up, at each function node we create liveness sets
162 // (we now know precisely how big to make our various vectors and so
163 // forth) and then do the data-flow propagation to compute the set
164 // of live variables at each program point.
165 //
166 // Finally, we run back over the IR one last time and, using the
167 // computed liveness, check various safety conditions. For example,
168 // there must be no live nodes at the definition site for a variable
169 // unless it has an initializer. Similarly, each non-mutable local
170 // variable must not be assigned if there is some successor
171 // assignment. And so forth.
172
173 struct CaptureInfo {
174 ln: LiveNode,
175 var_hid: HirId,
176 }
177
178 #[derive(Copy, Clone, Debug)]
179 struct LocalInfo {
180 id: HirId,
181 name: Symbol,
182 is_shorthand: bool,
183 }
184
185 #[derive(Copy, Clone, Debug)]
186 enum VarKind {
187 Param(HirId, Symbol),
188 Local(LocalInfo),
189 Upvar(HirId, Symbol),
190 }
191
192 struct IrMaps<'tcx> {
193 tcx: TyCtxt<'tcx>,
194 live_node_map: HirIdMap<LiveNode>,
195 variable_map: HirIdMap<Variable>,
196 capture_info_map: HirIdMap<Rc<Vec<CaptureInfo>>>,
197 var_kinds: IndexVec<Variable, VarKind>,
198 lnks: IndexVec<LiveNode, LiveNodeKind>,
199 }
200
201 impl IrMaps<'tcx> {
202 fn new(tcx: TyCtxt<'tcx>) -> IrMaps<'tcx> {
203 IrMaps {
204 tcx,
205 live_node_map: HirIdMap::default(),
206 variable_map: HirIdMap::default(),
207 capture_info_map: Default::default(),
208 var_kinds: IndexVec::new(),
209 lnks: IndexVec::new(),
210 }
211 }
212
213 fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
214 let ln = self.lnks.push(lnk);
215
216 debug!("{:?} is of kind {}", ln, live_node_kind_to_string(lnk, self.tcx));
217
218 ln
219 }
220
221 fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
222 let ln = self.add_live_node(lnk);
223 self.live_node_map.insert(hir_id, ln);
224
225 debug!("{:?} is node {:?}", ln, hir_id);
226 }
227
228 fn add_variable(&mut self, vk: VarKind) -> Variable {
229 let v = self.var_kinds.push(vk);
230
231 match vk {
232 Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) | Upvar(node_id, _) => {
233 self.variable_map.insert(node_id, v);
234 }
235 }
236
237 debug!("{:?} is {:?}", v, vk);
238
239 v
240 }
241
242 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
243 match self.variable_map.get(&hir_id) {
244 Some(&var) => var,
245 None => {
246 span_bug!(span, "no variable registered for id {:?}", hir_id);
247 }
248 }
249 }
250
251 fn variable_name(&self, var: Variable) -> Symbol {
252 match self.var_kinds[var] {
253 Local(LocalInfo { name, .. }) | Param(_, name) | Upvar(_, name) => name,
254 }
255 }
256
257 fn variable_is_shorthand(&self, var: Variable) -> bool {
258 match self.var_kinds[var] {
259 Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
260 Param(..) | Upvar(..) => false,
261 }
262 }
263
264 fn set_captures(&mut self, hir_id: HirId, cs: Vec<CaptureInfo>) {
265 self.capture_info_map.insert(hir_id, Rc::new(cs));
266 }
267
268 fn add_from_pat(&mut self, pat: &hir::Pat<'tcx>) {
269 // For struct patterns, take note of which fields used shorthand
270 // (`x` rather than `x: x`).
271 let mut shorthand_field_ids = HirIdSet::default();
272 let mut pats = VecDeque::new();
273 pats.push_back(pat);
274 while let Some(pat) = pats.pop_front() {
275 use rustc_hir::PatKind::*;
276 match &pat.kind {
277 Binding(.., inner_pat) => {
278 pats.extend(inner_pat.iter());
279 }
280 Struct(_, fields, _) => {
281 let ids = fields.iter().filter(|f| f.is_shorthand).map(|f| f.pat.hir_id);
282 shorthand_field_ids.extend(ids);
283 }
284 Ref(inner_pat, _) | Box(inner_pat) => {
285 pats.push_back(inner_pat);
286 }
287 TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => {
288 pats.extend(inner_pats.iter());
289 }
290 Slice(pre_pats, inner_pat, post_pats) => {
291 pats.extend(pre_pats.iter());
292 pats.extend(inner_pat.iter());
293 pats.extend(post_pats.iter());
294 }
295 _ => {}
296 }
297 }
298
299 pat.each_binding(|_, hir_id, _, ident| {
300 self.add_live_node_for_node(hir_id, VarDefNode(ident.span));
301 self.add_variable(Local(LocalInfo {
302 id: hir_id,
303 name: ident.name,
304 is_shorthand: shorthand_field_ids.contains(&hir_id),
305 }));
306 });
307 }
308 }
309
310 impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
311 type Map = Map<'tcx>;
312
313 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
314 NestedVisitorMap::OnlyBodies(self.tcx.hir())
315 }
316
317 fn visit_body(&mut self, body: &'tcx hir::Body<'tcx>) {
318 debug!("visit_body {:?}", body.id());
319
320 // swap in a new set of IR maps for this body
321 let mut maps = IrMaps::new(self.tcx);
322 let hir_id = maps.tcx.hir().body_owner(body.id());
323 let local_def_id = maps.tcx.hir().local_def_id(hir_id);
324 let def_id = local_def_id.to_def_id();
325
326 // Don't run unused pass for #[derive()]
327 if let Some(parent) = self.tcx.parent(def_id) {
328 if let DefKind::Impl = self.tcx.def_kind(parent.expect_local()) {
329 if self.tcx.has_attr(parent, sym::automatically_derived) {
330 return;
331 }
332 }
333 }
334
335 if let Some(captures) = maps.tcx.typeck(local_def_id).closure_min_captures.get(&def_id) {
336 for &var_hir_id in captures.keys() {
337 let var_name = maps.tcx.hir().name(var_hir_id);
338 maps.add_variable(Upvar(var_hir_id, var_name));
339 }
340 }
341
342 // gather up the various local variables, significant expressions,
343 // and so forth:
344 intravisit::walk_body(&mut maps, body);
345
346 // compute liveness
347 let mut lsets = Liveness::new(&mut maps, local_def_id);
348 let entry_ln = lsets.compute(&body, hir_id);
349 lsets.log_liveness(entry_ln, body.id().hir_id);
350
351 // check for various error conditions
352 lsets.visit_body(body);
353 lsets.warn_about_unused_upvars(entry_ln);
354 lsets.warn_about_unused_args(body, entry_ln);
355 }
356
357 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
358 self.add_from_pat(&local.pat);
359 intravisit::walk_local(self, local);
360 }
361
362 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
363 self.add_from_pat(&arm.pat);
364 if let Some(hir::Guard::IfLet(ref pat, _)) = arm.guard {
365 self.add_from_pat(pat);
366 }
367 intravisit::walk_arm(self, arm);
368 }
369
370 fn visit_param(&mut self, param: &'tcx hir::Param<'tcx>) {
371 param.pat.each_binding(|_bm, hir_id, _x, ident| {
372 let var = match param.pat.kind {
373 rustc_hir::PatKind::Struct(_, fields, _) => Local(LocalInfo {
374 id: hir_id,
375 name: ident.name,
376 is_shorthand: fields
377 .iter()
378 .find(|f| f.ident == ident)
379 .map_or(false, |f| f.is_shorthand),
380 }),
381 _ => Param(hir_id, ident.name),
382 };
383 self.add_variable(var);
384 });
385 intravisit::walk_param(self, param);
386 }
387
388 fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
389 match expr.kind {
390 // live nodes required for uses or definitions of variables:
391 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
392 debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res);
393 if let Res::Local(_var_hir_id) = path.res {
394 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
395 }
396 intravisit::walk_expr(self, expr);
397 }
398 hir::ExprKind::Closure(..) => {
399 // Interesting control flow (for loops can contain labeled
400 // breaks or continues)
401 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
402
403 // Make a live_node for each captured variable, with the span
404 // being the location that the variable is used. This results
405 // in better error messages than just pointing at the closure
406 // construction site.
407 let mut call_caps = Vec::new();
408 let closure_def_id = self.tcx.hir().local_def_id(expr.hir_id);
409 if let Some(captures) = self
410 .tcx
411 .typeck(closure_def_id)
412 .closure_min_captures
413 .get(&closure_def_id.to_def_id())
414 {
415 // If closure_min_captures is Some, upvars_mentioned must also be Some
416 let upvars = self.tcx.upvars_mentioned(closure_def_id).unwrap();
417 call_caps.extend(captures.keys().map(|var_id| {
418 let upvar = upvars[var_id];
419 let upvar_ln = self.add_live_node(UpvarNode(upvar.span));
420 CaptureInfo { ln: upvar_ln, var_hid: *var_id }
421 }));
422 }
423 self.set_captures(expr.hir_id, call_caps);
424 intravisit::walk_expr(self, expr);
425 }
426
427 // live nodes required for interesting control flow:
428 hir::ExprKind::If(..) | hir::ExprKind::Match(..) | hir::ExprKind::Loop(..) => {
429 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
430 intravisit::walk_expr(self, expr);
431 }
432 hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
433 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
434 intravisit::walk_expr(self, expr);
435 }
436
437 // otherwise, live nodes are not required:
438 hir::ExprKind::Index(..)
439 | hir::ExprKind::Field(..)
440 | hir::ExprKind::Array(..)
441 | hir::ExprKind::Call(..)
442 | hir::ExprKind::MethodCall(..)
443 | hir::ExprKind::Tup(..)
444 | hir::ExprKind::Binary(..)
445 | hir::ExprKind::AddrOf(..)
446 | hir::ExprKind::Cast(..)
447 | hir::ExprKind::DropTemps(..)
448 | hir::ExprKind::Unary(..)
449 | hir::ExprKind::Break(..)
450 | hir::ExprKind::Continue(_)
451 | hir::ExprKind::Lit(_)
452 | hir::ExprKind::ConstBlock(..)
453 | hir::ExprKind::Ret(..)
454 | hir::ExprKind::Block(..)
455 | hir::ExprKind::Assign(..)
456 | hir::ExprKind::AssignOp(..)
457 | hir::ExprKind::Struct(..)
458 | hir::ExprKind::Repeat(..)
459 | hir::ExprKind::InlineAsm(..)
460 | hir::ExprKind::LlvmInlineAsm(..)
461 | hir::ExprKind::Box(..)
462 | hir::ExprKind::Yield(..)
463 | hir::ExprKind::Type(..)
464 | hir::ExprKind::Err
465 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
466 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => {
467 intravisit::walk_expr(self, expr);
468 }
469 }
470 }
471 }
472
473 // ______________________________________________________________________
474 // Computing liveness sets
475 //
476 // Actually we compute just a bit more than just liveness, but we use
477 // the same basic propagation framework in all cases.
478
479 const ACC_READ: u32 = 1;
480 const ACC_WRITE: u32 = 2;
481 const ACC_USE: u32 = 4;
482
483 struct Liveness<'a, 'tcx> {
484 ir: &'a mut IrMaps<'tcx>,
485 typeck_results: &'a ty::TypeckResults<'tcx>,
486 param_env: ty::ParamEnv<'tcx>,
487 upvars: Option<&'tcx FxIndexMap<hir::HirId, hir::Upvar>>,
488 closure_min_captures: Option<&'tcx RootVariableMinCaptureList<'tcx>>,
489 successors: IndexVec<LiveNode, Option<LiveNode>>,
490 rwu_table: rwu_table::RWUTable,
491
492 /// A live node representing a point of execution before closure entry &
493 /// after closure exit. Used to calculate liveness of captured variables
494 /// through calls to the same closure. Used for Fn & FnMut closures only.
495 closure_ln: LiveNode,
496 /// A live node representing every 'exit' from the function, whether it be
497 /// by explicit return, panic, or other means.
498 exit_ln: LiveNode,
499
500 // mappings from loop node ID to LiveNode
501 // ("break" label should map to loop node ID,
502 // it probably doesn't now)
503 break_ln: HirIdMap<LiveNode>,
504 cont_ln: HirIdMap<LiveNode>,
505 }
506
507 impl<'a, 'tcx> Liveness<'a, 'tcx> {
508 fn new(ir: &'a mut IrMaps<'tcx>, body_owner: LocalDefId) -> Liveness<'a, 'tcx> {
509 let typeck_results = ir.tcx.typeck(body_owner);
510 let param_env = ir.tcx.param_env(body_owner);
511 let upvars = ir.tcx.upvars_mentioned(body_owner);
512 let closure_min_captures = typeck_results.closure_min_captures.get(&body_owner.to_def_id());
513 let closure_ln = ir.add_live_node(ClosureNode);
514 let exit_ln = ir.add_live_node(ExitNode);
515
516 let num_live_nodes = ir.lnks.len();
517 let num_vars = ir.var_kinds.len();
518
519 Liveness {
520 ir,
521 typeck_results,
522 param_env,
523 upvars,
524 closure_min_captures,
525 successors: IndexVec::from_elem_n(None, num_live_nodes),
526 rwu_table: rwu_table::RWUTable::new(num_live_nodes, num_vars),
527 closure_ln,
528 exit_ln,
529 break_ln: Default::default(),
530 cont_ln: Default::default(),
531 }
532 }
533
534 fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
535 match self.ir.live_node_map.get(&hir_id) {
536 Some(&ln) => ln,
537 None => {
538 // This must be a mismatch between the ir_map construction
539 // above and the propagation code below; the two sets of
540 // code have to agree about which AST nodes are worth
541 // creating liveness nodes for.
542 span_bug!(span, "no live node registered for node {:?}", hir_id);
543 }
544 }
545 }
546
547 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
548 self.ir.variable(hir_id, span)
549 }
550
551 fn define_bindings_in_pat(&mut self, pat: &hir::Pat<'_>, mut succ: LiveNode) -> LiveNode {
552 // In an or-pattern, only consider the first pattern; any later patterns
553 // must have the same bindings, and we also consider the first pattern
554 // to be the "authoritative" set of ids.
555 pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| {
556 let ln = self.live_node(hir_id, pat_sp);
557 let var = self.variable(hir_id, ident.span);
558 self.init_from_succ(ln, succ);
559 self.define(ln, var);
560 succ = ln;
561 });
562 succ
563 }
564
565 fn live_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
566 self.rwu_table.get_reader(ln, var)
567 }
568
569 // Is this variable live on entry to any of its successor nodes?
570 fn live_on_exit(&self, ln: LiveNode, var: Variable) -> bool {
571 let successor = self.successors[ln].unwrap();
572 self.live_on_entry(successor, var)
573 }
574
575 fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
576 self.rwu_table.get_used(ln, var)
577 }
578
579 fn assigned_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
580 self.rwu_table.get_writer(ln, var)
581 }
582
583 fn assigned_on_exit(&self, ln: LiveNode, var: Variable) -> bool {
584 let successor = self.successors[ln].unwrap();
585 self.assigned_on_entry(successor, var)
586 }
587
588 fn write_vars<F>(&self, wr: &mut dyn Write, mut test: F) -> io::Result<()>
589 where
590 F: FnMut(Variable) -> bool,
591 {
592 for var_idx in 0..self.ir.var_kinds.len() {
593 let var = Variable::from(var_idx);
594 if test(var) {
595 write!(wr, " {:?}", var)?;
596 }
597 }
598 Ok(())
599 }
600
601 #[allow(unused_must_use)]
602 fn ln_str(&self, ln: LiveNode) -> String {
603 let mut wr = Vec::new();
604 {
605 let wr = &mut wr as &mut dyn Write;
606 write!(wr, "[{:?} of kind {:?} reads", ln, self.ir.lnks[ln]);
607 self.write_vars(wr, |var| self.rwu_table.get_reader(ln, var));
608 write!(wr, " writes");
609 self.write_vars(wr, |var| self.rwu_table.get_writer(ln, var));
610 write!(wr, " uses");
611 self.write_vars(wr, |var| self.rwu_table.get_used(ln, var));
612
613 write!(wr, " precedes {:?}]", self.successors[ln]);
614 }
615 String::from_utf8(wr).unwrap()
616 }
617
618 fn log_liveness(&self, entry_ln: LiveNode, hir_id: hir::HirId) {
619 // hack to skip the loop unless debug! is enabled:
620 debug!(
621 "^^ liveness computation results for body {} (entry={:?})",
622 {
623 for ln_idx in 0..self.ir.lnks.len() {
624 debug!("{:?}", self.ln_str(LiveNode::from(ln_idx)));
625 }
626 hir_id
627 },
628 entry_ln
629 );
630 }
631
632 fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
633 self.successors[ln] = Some(succ_ln);
634
635 // It is not necessary to initialize the RWUs here because they are all
636 // empty when created, and the sets only grow during iterations.
637 }
638
639 fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
640 // more efficient version of init_empty() / merge_from_succ()
641 self.successors[ln] = Some(succ_ln);
642 self.rwu_table.copy(ln, succ_ln);
643 debug!("init_from_succ(ln={}, succ={})", self.ln_str(ln), self.ln_str(succ_ln));
644 }
645
646 fn merge_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) -> bool {
647 if ln == succ_ln {
648 return false;
649 }
650
651 let changed = self.rwu_table.union(ln, succ_ln);
652 debug!("merge_from_succ(ln={:?}, succ={}, changed={})", ln, self.ln_str(succ_ln), changed);
653 changed
654 }
655
656 // Indicates that a local variable was *defined*; we know that no
657 // uses of the variable can precede the definition (resolve checks
658 // this) so we just clear out all the data.
659 fn define(&mut self, writer: LiveNode, var: Variable) {
660 let used = self.rwu_table.get_used(writer, var);
661 self.rwu_table.set(writer, var, rwu_table::RWU { reader: false, writer: false, used });
662 debug!("{:?} defines {:?}: {}", writer, var, self.ln_str(writer));
663 }
664
665 // Either read, write, or both depending on the acc bitset
666 fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
667 debug!("{:?} accesses[{:x}] {:?}: {}", ln, acc, var, self.ln_str(ln));
668
669 let mut rwu = self.rwu_table.get(ln, var);
670
671 if (acc & ACC_WRITE) != 0 {
672 rwu.reader = false;
673 rwu.writer = true;
674 }
675
676 // Important: if we both read/write, must do read second
677 // or else the write will override.
678 if (acc & ACC_READ) != 0 {
679 rwu.reader = true;
680 }
681
682 if (acc & ACC_USE) != 0 {
683 rwu.used = true;
684 }
685
686 self.rwu_table.set(ln, var, rwu);
687 }
688
689 fn compute(&mut self, body: &hir::Body<'_>, hir_id: HirId) -> LiveNode {
690 debug!("compute: for body {:?}", body.id().hir_id);
691
692 // # Liveness of captured variables
693 //
694 // When computing the liveness for captured variables we take into
695 // account how variable is captured (ByRef vs ByValue) and what is the
696 // closure kind (Generator / FnOnce vs Fn / FnMut).
697 //
698 // Variables captured by reference are assumed to be used on the exit
699 // from the closure.
700 //
701 // In FnOnce closures, variables captured by value are known to be dead
702 // on exit since it is impossible to call the closure again.
703 //
704 // In Fn / FnMut closures, variables captured by value are live on exit
705 // if they are live on the entry to the closure, since only the closure
706 // itself can access them on subsequent calls.
707
708 if let Some(closure_min_captures) = self.closure_min_captures {
709 // Mark upvars captured by reference as used after closure exits.
710 for (&var_hir_id, min_capture_list) in closure_min_captures {
711 for captured_place in min_capture_list {
712 match captured_place.info.capture_kind {
713 ty::UpvarCapture::ByRef(_) => {
714 let var = self.variable(
715 var_hir_id,
716 captured_place.get_capture_kind_span(self.ir.tcx),
717 );
718 self.acc(self.exit_ln, var, ACC_READ | ACC_USE);
719 }
720 ty::UpvarCapture::ByValue(_) => {}
721 }
722 }
723 }
724 }
725
726 let succ = self.propagate_through_expr(&body.value, self.exit_ln);
727
728 if self.closure_min_captures.is_none() {
729 // Either not a closure, or closure without any captured variables.
730 // No need to determine liveness of captured variables, since there
731 // are none.
732 return succ;
733 }
734
735 let ty = self.typeck_results.node_type(hir_id);
736 match ty.kind() {
737 ty::Closure(_def_id, substs) => match substs.as_closure().kind() {
738 ty::ClosureKind::Fn => {}
739 ty::ClosureKind::FnMut => {}
740 ty::ClosureKind::FnOnce => return succ,
741 },
742 ty::Generator(..) => return succ,
743 _ => {
744 span_bug!(
745 body.value.span,
746 "{} has upvars so it should have a closure type: {:?}",
747 hir_id,
748 ty
749 );
750 }
751 };
752
753 // Propagate through calls to the closure.
754 loop {
755 self.init_from_succ(self.closure_ln, succ);
756 for param in body.params {
757 param.pat.each_binding(|_bm, hir_id, _x, ident| {
758 let var = self.variable(hir_id, ident.span);
759 self.define(self.closure_ln, var);
760 })
761 }
762
763 if !self.merge_from_succ(self.exit_ln, self.closure_ln) {
764 break;
765 }
766 assert_eq!(succ, self.propagate_through_expr(&body.value, self.exit_ln));
767 }
768
769 succ
770 }
771
772 fn propagate_through_block(&mut self, blk: &hir::Block<'_>, succ: LiveNode) -> LiveNode {
773 if blk.targeted_by_break {
774 self.break_ln.insert(blk.hir_id, succ);
775 }
776 let succ = self.propagate_through_opt_expr(blk.expr.as_deref(), succ);
777 blk.stmts.iter().rev().fold(succ, |succ, stmt| self.propagate_through_stmt(stmt, succ))
778 }
779
780 fn propagate_through_stmt(&mut self, stmt: &hir::Stmt<'_>, succ: LiveNode) -> LiveNode {
781 match stmt.kind {
782 hir::StmtKind::Local(ref local) => {
783 // Note: we mark the variable as defined regardless of whether
784 // there is an initializer. Initially I had thought to only mark
785 // the live variable as defined if it was initialized, and then we
786 // could check for uninit variables just by scanning what is live
787 // at the start of the function. But that doesn't work so well for
788 // immutable variables defined in a loop:
789 // loop { let x; x = 5; }
790 // because the "assignment" loops back around and generates an error.
791 //
792 // So now we just check that variables defined w/o an
793 // initializer are not live at the point of their
794 // initialization, which is mildly more complex than checking
795 // once at the func header but otherwise equivalent.
796
797 let succ = self.propagate_through_opt_expr(local.init.as_deref(), succ);
798 self.define_bindings_in_pat(&local.pat, succ)
799 }
800 hir::StmtKind::Item(..) => succ,
801 hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
802 self.propagate_through_expr(&expr, succ)
803 }
804 }
805 }
806
807 fn propagate_through_exprs(&mut self, exprs: &[Expr<'_>], succ: LiveNode) -> LiveNode {
808 exprs.iter().rev().fold(succ, |succ, expr| self.propagate_through_expr(&expr, succ))
809 }
810
811 fn propagate_through_opt_expr(
812 &mut self,
813 opt_expr: Option<&Expr<'_>>,
814 succ: LiveNode,
815 ) -> LiveNode {
816 opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
817 }
818
819 fn propagate_through_expr(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
820 debug!("propagate_through_expr: {:?}", expr);
821
822 match expr.kind {
823 // Interesting cases with control flow or which gen/kill
824 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
825 self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
826 }
827
828 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
829
830 hir::ExprKind::Closure(..) => {
831 debug!("{:?} is an ExprKind::Closure", expr);
832
833 // the construction of a closure itself is not important,
834 // but we have to consider the closed over variables.
835 let caps = self
836 .ir
837 .capture_info_map
838 .get(&expr.hir_id)
839 .cloned()
840 .unwrap_or_else(|| span_bug!(expr.span, "no registered caps"));
841
842 caps.iter().rev().fold(succ, |succ, cap| {
843 self.init_from_succ(cap.ln, succ);
844 let var = self.variable(cap.var_hid, expr.span);
845 self.acc(cap.ln, var, ACC_READ | ACC_USE);
846 cap.ln
847 })
848 }
849
850 // Note that labels have been resolved, so we don't need to look
851 // at the label ident
852 hir::ExprKind::Loop(ref blk, ..) => self.propagate_through_loop(expr, &blk, succ),
853
854 hir::ExprKind::If(ref cond, ref then, ref else_opt) => {
855 //
856 // (cond)
857 // |
858 // v
859 // (expr)
860 // / \
861 // | |
862 // v v
863 // (then)(els)
864 // | |
865 // v v
866 // ( succ )
867 //
868 let else_ln =
869 self.propagate_through_opt_expr(else_opt.as_ref().map(|e| &**e), succ);
870 let then_ln = self.propagate_through_expr(&then, succ);
871 let ln = self.live_node(expr.hir_id, expr.span);
872 self.init_from_succ(ln, else_ln);
873 self.merge_from_succ(ln, then_ln);
874 self.propagate_through_expr(&cond, ln)
875 }
876
877 hir::ExprKind::Match(ref e, arms, _) => {
878 //
879 // (e)
880 // |
881 // v
882 // (expr)
883 // / | \
884 // | | |
885 // v v v
886 // (..arms..)
887 // | | |
888 // v v v
889 // ( succ )
890 //
891 //
892 let ln = self.live_node(expr.hir_id, expr.span);
893 self.init_empty(ln, succ);
894 for arm in arms {
895 let body_succ = self.propagate_through_expr(&arm.body, succ);
896
897 let guard_succ = arm.guard.as_ref().map_or(body_succ, |g| match g {
898 hir::Guard::If(e) => self.propagate_through_expr(e, body_succ),
899 hir::Guard::IfLet(pat, e) => {
900 let let_bind = self.define_bindings_in_pat(pat, body_succ);
901 self.propagate_through_expr(e, let_bind)
902 }
903 });
904 let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ);
905 self.merge_from_succ(ln, arm_succ);
906 }
907 self.propagate_through_expr(&e, ln)
908 }
909
910 hir::ExprKind::Ret(ref o_e) => {
911 // Ignore succ and subst exit_ln.
912 self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), self.exit_ln)
913 }
914
915 hir::ExprKind::Break(label, ref opt_expr) => {
916 // Find which label this break jumps to
917 let target = match label.target_id {
918 Ok(hir_id) => self.break_ln.get(&hir_id),
919 Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
920 }
921 .cloned();
922
923 // Now that we know the label we're going to,
924 // look it up in the break loop nodes table
925
926 match target {
927 Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
928 None => span_bug!(expr.span, "`break` to unknown label"),
929 }
930 }
931
932 hir::ExprKind::Continue(label) => {
933 // Find which label this expr continues to
934 let sc = label
935 .target_id
936 .unwrap_or_else(|err| span_bug!(expr.span, "loop scope error: {}", err));
937
938 // Now that we know the label we're going to,
939 // look it up in the continue loop nodes table
940 self.cont_ln
941 .get(&sc)
942 .cloned()
943 .unwrap_or_else(|| span_bug!(expr.span, "continue to unknown label"))
944 }
945
946 hir::ExprKind::Assign(ref l, ref r, _) => {
947 // see comment on places in
948 // propagate_through_place_components()
949 let succ = self.write_place(&l, succ, ACC_WRITE);
950 let succ = self.propagate_through_place_components(&l, succ);
951 self.propagate_through_expr(&r, succ)
952 }
953
954 hir::ExprKind::AssignOp(_, ref l, ref r) => {
955 // an overloaded assign op is like a method call
956 if self.typeck_results.is_method_call(expr) {
957 let succ = self.propagate_through_expr(&l, succ);
958 self.propagate_through_expr(&r, succ)
959 } else {
960 // see comment on places in
961 // propagate_through_place_components()
962 let succ = self.write_place(&l, succ, ACC_WRITE | ACC_READ);
963 let succ = self.propagate_through_expr(&r, succ);
964 self.propagate_through_place_components(&l, succ)
965 }
966 }
967
968 // Uninteresting cases: just propagate in rev exec order
969 hir::ExprKind::Array(ref exprs) => self.propagate_through_exprs(exprs, succ),
970
971 hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
972 let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
973 fields
974 .iter()
975 .rev()
976 .fold(succ, |succ, field| self.propagate_through_expr(&field.expr, succ))
977 }
978
979 hir::ExprKind::Call(ref f, ref args) => {
980 let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
981 let succ = if self.ir.tcx.is_ty_uninhabited_from(
982 m,
983 self.typeck_results.expr_ty(expr),
984 self.param_env,
985 ) {
986 self.exit_ln
987 } else {
988 succ
989 };
990 let succ = self.propagate_through_exprs(args, succ);
991 self.propagate_through_expr(&f, succ)
992 }
993
994 hir::ExprKind::MethodCall(.., ref args, _) => {
995 let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
996 let succ = if self.ir.tcx.is_ty_uninhabited_from(
997 m,
998 self.typeck_results.expr_ty(expr),
999 self.param_env,
1000 ) {
1001 self.exit_ln
1002 } else {
1003 succ
1004 };
1005
1006 self.propagate_through_exprs(args, succ)
1007 }
1008
1009 hir::ExprKind::Tup(ref exprs) => self.propagate_through_exprs(exprs, succ),
1010
1011 hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
1012 let r_succ = self.propagate_through_expr(&r, succ);
1013
1014 let ln = self.live_node(expr.hir_id, expr.span);
1015 self.init_from_succ(ln, succ);
1016 self.merge_from_succ(ln, r_succ);
1017
1018 self.propagate_through_expr(&l, ln)
1019 }
1020
1021 hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) => {
1022 let r_succ = self.propagate_through_expr(&r, succ);
1023 self.propagate_through_expr(&l, r_succ)
1024 }
1025
1026 hir::ExprKind::Box(ref e)
1027 | hir::ExprKind::AddrOf(_, _, ref e)
1028 | hir::ExprKind::Cast(ref e, _)
1029 | hir::ExprKind::Type(ref e, _)
1030 | hir::ExprKind::DropTemps(ref e)
1031 | hir::ExprKind::Unary(_, ref e)
1032 | hir::ExprKind::Yield(ref e, _)
1033 | hir::ExprKind::Repeat(ref e, _) => self.propagate_through_expr(&e, succ),
1034
1035 hir::ExprKind::InlineAsm(ref asm) => {
1036 // Handle non-returning asm
1037 let mut succ = if asm.options.contains(InlineAsmOptions::NORETURN) {
1038 self.exit_ln
1039 } else {
1040 succ
1041 };
1042
1043 // Do a first pass for writing outputs only
1044 for (op, _op_sp) in asm.operands.iter().rev() {
1045 match op {
1046 hir::InlineAsmOperand::In { .. }
1047 | hir::InlineAsmOperand::Const { .. }
1048 | hir::InlineAsmOperand::Sym { .. } => {}
1049 hir::InlineAsmOperand::Out { expr, .. } => {
1050 if let Some(expr) = expr {
1051 succ = self.write_place(expr, succ, ACC_WRITE);
1052 }
1053 }
1054 hir::InlineAsmOperand::InOut { expr, .. } => {
1055 succ = self.write_place(expr, succ, ACC_READ | ACC_WRITE | ACC_USE);
1056 }
1057 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1058 if let Some(expr) = out_expr {
1059 succ = self.write_place(expr, succ, ACC_WRITE);
1060 }
1061 }
1062 }
1063 }
1064
1065 // Then do a second pass for inputs
1066 let mut succ = succ;
1067 for (op, _op_sp) in asm.operands.iter().rev() {
1068 match op {
1069 hir::InlineAsmOperand::In { expr, .. }
1070 | hir::InlineAsmOperand::Sym { expr, .. } => {
1071 succ = self.propagate_through_expr(expr, succ)
1072 }
1073 hir::InlineAsmOperand::Out { expr, .. } => {
1074 if let Some(expr) = expr {
1075 succ = self.propagate_through_place_components(expr, succ);
1076 }
1077 }
1078 hir::InlineAsmOperand::InOut { expr, .. } => {
1079 succ = self.propagate_through_place_components(expr, succ);
1080 }
1081 hir::InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
1082 if let Some(expr) = out_expr {
1083 succ = self.propagate_through_place_components(expr, succ);
1084 }
1085 succ = self.propagate_through_expr(in_expr, succ);
1086 }
1087 hir::InlineAsmOperand::Const { .. } => {}
1088 }
1089 }
1090 succ
1091 }
1092
1093 hir::ExprKind::LlvmInlineAsm(ref asm) => {
1094 let ia = &asm.inner;
1095 let outputs = asm.outputs_exprs;
1096 let inputs = asm.inputs_exprs;
1097 let succ = iter::zip(&ia.outputs, outputs).rev().fold(succ, |succ, (o, output)| {
1098 // see comment on places
1099 // in propagate_through_place_components()
1100 if o.is_indirect {
1101 self.propagate_through_expr(output, succ)
1102 } else {
1103 let acc = if o.is_rw { ACC_WRITE | ACC_READ } else { ACC_WRITE };
1104 let succ = self.write_place(output, succ, acc);
1105 self.propagate_through_place_components(output, succ)
1106 }
1107 });
1108
1109 // Inputs are executed first. Propagate last because of rev order
1110 self.propagate_through_exprs(inputs, succ)
1111 }
1112
1113 hir::ExprKind::Lit(..)
1114 | hir::ExprKind::ConstBlock(..)
1115 | hir::ExprKind::Err
1116 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
1117 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => succ,
1118
1119 // Note that labels have been resolved, so we don't need to look
1120 // at the label ident
1121 hir::ExprKind::Block(ref blk, _) => self.propagate_through_block(&blk, succ),
1122 }
1123 }
1124
1125 fn propagate_through_place_components(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
1126 // # Places
1127 //
1128 // In general, the full flow graph structure for an
1129 // assignment/move/etc can be handled in one of two ways,
1130 // depending on whether what is being assigned is a "tracked
1131 // value" or not. A tracked value is basically a local
1132 // variable or argument.
1133 //
1134 // The two kinds of graphs are:
1135 //
1136 // Tracked place Untracked place
1137 // ----------------------++-----------------------
1138 // ||
1139 // | || |
1140 // v || v
1141 // (rvalue) || (rvalue)
1142 // | || |
1143 // v || v
1144 // (write of place) || (place components)
1145 // | || |
1146 // v || v
1147 // (succ) || (succ)
1148 // ||
1149 // ----------------------++-----------------------
1150 //
1151 // I will cover the two cases in turn:
1152 //
1153 // # Tracked places
1154 //
1155 // A tracked place is a local variable/argument `x`. In
1156 // these cases, the link_node where the write occurs is linked
1157 // to node id of `x`. The `write_place()` routine generates
1158 // the contents of this node. There are no subcomponents to
1159 // consider.
1160 //
1161 // # Non-tracked places
1162 //
1163 // These are places like `x[5]` or `x.f`. In that case, we
1164 // basically ignore the value which is written to but generate
1165 // reads for the components---`x` in these two examples. The
1166 // components reads are generated by
1167 // `propagate_through_place_components()` (this fn).
1168 //
1169 // # Illegal places
1170 //
1171 // It is still possible to observe assignments to non-places;
1172 // these errors are detected in the later pass borrowck. We
1173 // just ignore such cases and treat them as reads.
1174
1175 match expr.kind {
1176 hir::ExprKind::Path(_) => succ,
1177 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
1178 _ => self.propagate_through_expr(expr, succ),
1179 }
1180 }
1181
1182 // see comment on propagate_through_place()
1183 fn write_place(&mut self, expr: &Expr<'_>, succ: LiveNode, acc: u32) -> LiveNode {
1184 match expr.kind {
1185 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1186 self.access_path(expr.hir_id, path, succ, acc)
1187 }
1188
1189 // We do not track other places, so just propagate through
1190 // to their subcomponents. Also, it may happen that
1191 // non-places occur here, because those are detected in the
1192 // later pass borrowck.
1193 _ => succ,
1194 }
1195 }
1196
1197 fn access_var(
1198 &mut self,
1199 hir_id: HirId,
1200 var_hid: HirId,
1201 succ: LiveNode,
1202 acc: u32,
1203 span: Span,
1204 ) -> LiveNode {
1205 let ln = self.live_node(hir_id, span);
1206 if acc != 0 {
1207 self.init_from_succ(ln, succ);
1208 let var = self.variable(var_hid, span);
1209 self.acc(ln, var, acc);
1210 }
1211 ln
1212 }
1213
1214 fn access_path(
1215 &mut self,
1216 hir_id: HirId,
1217 path: &hir::Path<'_>,
1218 succ: LiveNode,
1219 acc: u32,
1220 ) -> LiveNode {
1221 match path.res {
1222 Res::Local(hid) => {
1223 let in_upvars = self.upvars.map_or(false, |u| u.contains_key(&hid));
1224 let in_captures = self.closure_min_captures.map_or(false, |c| c.contains_key(&hid));
1225
1226 match (in_upvars, in_captures) {
1227 (false, _) | (true, true) => self.access_var(hir_id, hid, succ, acc, path.span),
1228 (true, false) => {
1229 // This case is possible when with RFC-2229, a wild pattern
1230 // is used within a closure.
1231 // eg: `let _ = x`. The closure doesn't capture x here,
1232 // even though it's mentioned in the closure.
1233 succ
1234 }
1235 }
1236 }
1237 _ => succ,
1238 }
1239 }
1240
1241 fn propagate_through_loop(
1242 &mut self,
1243 expr: &Expr<'_>,
1244 body: &hir::Block<'_>,
1245 succ: LiveNode,
1246 ) -> LiveNode {
1247 /*
1248 We model control flow like this:
1249
1250 (expr) <-+
1251 | |
1252 v |
1253 (body) --+
1254
1255 Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
1256 Meanwhile, a `break` expression will have a successor of `succ`.
1257 */
1258
1259 // first iteration:
1260 let ln = self.live_node(expr.hir_id, expr.span);
1261 self.init_empty(ln, succ);
1262 debug!("propagate_through_loop: using id for loop body {} {:?}", expr.hir_id, body);
1263
1264 self.break_ln.insert(expr.hir_id, succ);
1265
1266 self.cont_ln.insert(expr.hir_id, ln);
1267
1268 let body_ln = self.propagate_through_block(body, ln);
1269
1270 // repeat until fixed point is reached:
1271 while self.merge_from_succ(ln, body_ln) {
1272 assert_eq!(body_ln, self.propagate_through_block(body, ln));
1273 }
1274
1275 ln
1276 }
1277 }
1278
1279 // _______________________________________________________________________
1280 // Checking for error conditions
1281
1282 impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
1283 type Map = intravisit::ErasedMap<'tcx>;
1284
1285 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
1286 NestedVisitorMap::None
1287 }
1288
1289 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
1290 self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| {
1291 if local.init.is_some() {
1292 self.warn_about_dead_assign(spans, hir_id, ln, var);
1293 }
1294 });
1295
1296 intravisit::walk_local(self, local);
1297 }
1298
1299 fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
1300 check_expr(self, ex);
1301 }
1302
1303 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
1304 self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {});
1305 intravisit::walk_arm(self, arm);
1306 }
1307 }
1308
1309 fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr<'tcx>) {
1310 match expr.kind {
1311 hir::ExprKind::Assign(ref l, ..) => {
1312 this.check_place(&l);
1313 }
1314
1315 hir::ExprKind::AssignOp(_, ref l, _) => {
1316 if !this.typeck_results.is_method_call(expr) {
1317 this.check_place(&l);
1318 }
1319 }
1320
1321 hir::ExprKind::InlineAsm(ref asm) => {
1322 for (op, _op_sp) in asm.operands {
1323 match op {
1324 hir::InlineAsmOperand::Out { expr, .. } => {
1325 if let Some(expr) = expr {
1326 this.check_place(expr);
1327 }
1328 }
1329 hir::InlineAsmOperand::InOut { expr, .. } => {
1330 this.check_place(expr);
1331 }
1332 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1333 if let Some(out_expr) = out_expr {
1334 this.check_place(out_expr);
1335 }
1336 }
1337 _ => {}
1338 }
1339 }
1340 }
1341
1342 hir::ExprKind::LlvmInlineAsm(ref asm) => {
1343 for input in asm.inputs_exprs {
1344 this.visit_expr(input);
1345 }
1346
1347 // Output operands must be places
1348 for (o, output) in iter::zip(&asm.inner.outputs, asm.outputs_exprs) {
1349 if !o.is_indirect {
1350 this.check_place(output);
1351 }
1352 this.visit_expr(output);
1353 }
1354 }
1355
1356 // no correctness conditions related to liveness
1357 hir::ExprKind::Call(..)
1358 | hir::ExprKind::MethodCall(..)
1359 | hir::ExprKind::Match(..)
1360 | hir::ExprKind::Loop(..)
1361 | hir::ExprKind::Index(..)
1362 | hir::ExprKind::Field(..)
1363 | hir::ExprKind::Array(..)
1364 | hir::ExprKind::Tup(..)
1365 | hir::ExprKind::Binary(..)
1366 | hir::ExprKind::Cast(..)
1367 | hir::ExprKind::If(..)
1368 | hir::ExprKind::DropTemps(..)
1369 | hir::ExprKind::Unary(..)
1370 | hir::ExprKind::Ret(..)
1371 | hir::ExprKind::Break(..)
1372 | hir::ExprKind::Continue(..)
1373 | hir::ExprKind::Lit(_)
1374 | hir::ExprKind::ConstBlock(..)
1375 | hir::ExprKind::Block(..)
1376 | hir::ExprKind::AddrOf(..)
1377 | hir::ExprKind::Struct(..)
1378 | hir::ExprKind::Repeat(..)
1379 | hir::ExprKind::Closure(..)
1380 | hir::ExprKind::Path(_)
1381 | hir::ExprKind::Yield(..)
1382 | hir::ExprKind::Box(..)
1383 | hir::ExprKind::Type(..)
1384 | hir::ExprKind::Err => {}
1385 }
1386
1387 intravisit::walk_expr(this, expr);
1388 }
1389
1390 impl<'tcx> Liveness<'_, 'tcx> {
1391 fn check_place(&mut self, expr: &'tcx Expr<'tcx>) {
1392 match expr.kind {
1393 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1394 if let Res::Local(var_hid) = path.res {
1395 // Assignment to an immutable variable or argument: only legal
1396 // if there is no later assignment. If this local is actually
1397 // mutable, then check for a reassignment to flag the mutability
1398 // as being used.
1399 let ln = self.live_node(expr.hir_id, expr.span);
1400 let var = self.variable(var_hid, expr.span);
1401 self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var);
1402 }
1403 }
1404 _ => {
1405 // For other kinds of places, no checks are required,
1406 // and any embedded expressions are actually rvalues
1407 intravisit::walk_expr(self, expr);
1408 }
1409 }
1410 }
1411
1412 fn should_warn(&self, var: Variable) -> Option<String> {
1413 let name = self.ir.variable_name(var);
1414 if name == kw::Empty {
1415 return None;
1416 }
1417 let name: &str = &name.as_str();
1418 if name.as_bytes()[0] == b'_' {
1419 return None;
1420 }
1421 Some(name.to_owned())
1422 }
1423
1424 fn warn_about_unused_upvars(&self, entry_ln: LiveNode) {
1425 let closure_min_captures = match self.closure_min_captures {
1426 None => return,
1427 Some(closure_min_captures) => closure_min_captures,
1428 };
1429
1430 // If closure_min_captures is Some(), upvars must be Some() too.
1431 for (&var_hir_id, min_capture_list) in closure_min_captures {
1432 for captured_place in min_capture_list {
1433 match captured_place.info.capture_kind {
1434 ty::UpvarCapture::ByValue(_) => {}
1435 ty::UpvarCapture::ByRef(..) => continue,
1436 };
1437 let span = captured_place.get_capture_kind_span(self.ir.tcx);
1438 let var = self.variable(var_hir_id, span);
1439 if self.used_on_entry(entry_ln, var) {
1440 if !self.live_on_entry(entry_ln, var) {
1441 if let Some(name) = self.should_warn(var) {
1442 self.ir.tcx.struct_span_lint_hir(
1443 lint::builtin::UNUSED_ASSIGNMENTS,
1444 var_hir_id,
1445 vec![span],
1446 |lint| {
1447 lint.build(&format!(
1448 "value captured by `{}` is never read",
1449 name
1450 ))
1451 .help("did you mean to capture by reference instead?")
1452 .emit();
1453 },
1454 );
1455 }
1456 }
1457 } else {
1458 if let Some(name) = self.should_warn(var) {
1459 self.ir.tcx.struct_span_lint_hir(
1460 lint::builtin::UNUSED_VARIABLES,
1461 var_hir_id,
1462 vec![span],
1463 |lint| {
1464 lint.build(&format!("unused variable: `{}`", name))
1465 .help("did you mean to capture by reference instead?")
1466 .emit();
1467 },
1468 );
1469 }
1470 }
1471 }
1472 }
1473 }
1474
1475 fn warn_about_unused_args(&self, body: &hir::Body<'_>, entry_ln: LiveNode) {
1476 for p in body.params {
1477 self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| {
1478 if !self.live_on_entry(ln, var) {
1479 self.report_unused_assign(hir_id, spans, var, |name| {
1480 format!("value passed to `{}` is never read", name)
1481 });
1482 }
1483 });
1484 }
1485 }
1486
1487 fn check_unused_vars_in_pat(
1488 &self,
1489 pat: &hir::Pat<'_>,
1490 entry_ln: Option<LiveNode>,
1491 on_used_on_entry: impl Fn(Vec<Span>, HirId, LiveNode, Variable),
1492 ) {
1493 // In an or-pattern, only consider the variable; any later patterns must have the same
1494 // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
1495 // However, we should take the ids and spans of variables with the same name from the later
1496 // patterns so the suggestions to prefix with underscores will apply to those too.
1497 let mut vars: FxIndexMap<Symbol, (LiveNode, Variable, Vec<(HirId, Span, Span)>)> =
1498 <_>::default();
1499
1500 pat.each_binding(|_, hir_id, pat_sp, ident| {
1501 let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp));
1502 let var = self.variable(hir_id, ident.span);
1503 let id_and_sp = (hir_id, pat_sp, ident.span);
1504 vars.entry(self.ir.variable_name(var))
1505 .and_modify(|(.., hir_ids_and_spans)| hir_ids_and_spans.push(id_and_sp))
1506 .or_insert_with(|| (ln, var, vec![id_and_sp]));
1507 });
1508
1509 for (_, (ln, var, hir_ids_and_spans)) in vars {
1510 if self.used_on_entry(ln, var) {
1511 let id = hir_ids_and_spans[0].0;
1512 let spans =
1513 hir_ids_and_spans.into_iter().map(|(_, _, ident_span)| ident_span).collect();
1514 on_used_on_entry(spans, id, ln, var);
1515 } else {
1516 self.report_unused(hir_ids_and_spans, ln, var);
1517 }
1518 }
1519 }
1520
1521 fn report_unused(
1522 &self,
1523 hir_ids_and_spans: Vec<(HirId, Span, Span)>,
1524 ln: LiveNode,
1525 var: Variable,
1526 ) {
1527 let first_hir_id = hir_ids_and_spans[0].0;
1528
1529 if let Some(name) = self.should_warn(var).filter(|name| name != "self") {
1530 // annoying: for parameters in funcs like `fn(x: i32)
1531 // {ret}`, there is only one node, so asking about
1532 // assigned_on_exit() is not meaningful.
1533 let is_assigned =
1534 if ln == self.exit_ln { false } else { self.assigned_on_exit(ln, var) };
1535
1536 if is_assigned {
1537 self.ir.tcx.struct_span_lint_hir(
1538 lint::builtin::UNUSED_VARIABLES,
1539 first_hir_id,
1540 hir_ids_and_spans
1541 .into_iter()
1542 .map(|(_, _, ident_span)| ident_span)
1543 .collect::<Vec<_>>(),
1544 |lint| {
1545 lint.build(&format!("variable `{}` is assigned to, but never used", name))
1546 .note(&format!("consider using `_{}` instead", name))
1547 .emit();
1548 },
1549 )
1550 } else {
1551 let (shorthands, non_shorthands): (Vec<_>, Vec<_>) =
1552 hir_ids_and_spans.iter().copied().partition(|(hir_id, _, ident_span)| {
1553 let var = self.variable(*hir_id, *ident_span);
1554 self.ir.variable_is_shorthand(var)
1555 });
1556
1557 // If we have both shorthand and non-shorthand, prefer the "try ignoring
1558 // the field" message, and suggest `_` for the non-shorthands. If we only
1559 // have non-shorthand, then prefix with an underscore instead.
1560 if !shorthands.is_empty() {
1561 let shorthands = shorthands
1562 .into_iter()
1563 .map(|(_, pat_span, _)| (pat_span, format!("{}: _", name)))
1564 .chain(
1565 non_shorthands
1566 .into_iter()
1567 .map(|(_, pat_span, _)| (pat_span, "_".to_string())),
1568 )
1569 .collect::<Vec<_>>();
1570
1571 self.ir.tcx.struct_span_lint_hir(
1572 lint::builtin::UNUSED_VARIABLES,
1573 first_hir_id,
1574 hir_ids_and_spans
1575 .iter()
1576 .map(|(_, pat_span, _)| *pat_span)
1577 .collect::<Vec<_>>(),
1578 |lint| {
1579 let mut err = lint.build(&format!("unused variable: `{}`", name));
1580 err.multipart_suggestion(
1581 "try ignoring the field",
1582 shorthands,
1583 Applicability::MachineApplicable,
1584 );
1585 err.emit()
1586 },
1587 );
1588 } else {
1589 let non_shorthands = non_shorthands
1590 .into_iter()
1591 .map(|(_, _, ident_span)| (ident_span, format!("_{}", name)))
1592 .collect::<Vec<_>>();
1593
1594 self.ir.tcx.struct_span_lint_hir(
1595 lint::builtin::UNUSED_VARIABLES,
1596 first_hir_id,
1597 hir_ids_and_spans
1598 .iter()
1599 .map(|(_, _, ident_span)| *ident_span)
1600 .collect::<Vec<_>>(),
1601 |lint| {
1602 let mut err = lint.build(&format!("unused variable: `{}`", name));
1603 err.multipart_suggestion(
1604 "if this is intentional, prefix it with an underscore",
1605 non_shorthands,
1606 Applicability::MachineApplicable,
1607 );
1608 err.emit()
1609 },
1610 );
1611 }
1612 }
1613 }
1614 }
1615
1616 fn warn_about_dead_assign(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
1617 if !self.live_on_exit(ln, var) {
1618 self.report_unused_assign(hir_id, spans, var, |name| {
1619 format!("value assigned to `{}` is never read", name)
1620 });
1621 }
1622 }
1623
1624 fn report_unused_assign(
1625 &self,
1626 hir_id: HirId,
1627 spans: Vec<Span>,
1628 var: Variable,
1629 message: impl Fn(&str) -> String,
1630 ) {
1631 if let Some(name) = self.should_warn(var) {
1632 self.ir.tcx.struct_span_lint_hir(
1633 lint::builtin::UNUSED_ASSIGNMENTS,
1634 hir_id,
1635 spans,
1636 |lint| {
1637 lint.build(&message(&name))
1638 .help("maybe it is overwritten before being read?")
1639 .emit();
1640 },
1641 )
1642 }
1643 }
1644 }