]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_passes/src/liveness.rs
New upstream version 1.61.0+dfsg1
[rustc.git] / compiler / rustc_passes / src / liveness.rs
1 //! A classic liveness analysis based on dataflow over the AST. Computes,
2 //! for each local variable in a function, whether that variable is live
3 //! at a given point. Program execution points are identified by their
4 //! IDs.
5 //!
6 //! # Basic idea
7 //!
8 //! The basic model is that each local variable is assigned an index. We
9 //! represent sets of local variables using a vector indexed by this
10 //! index. The value in the vector is either 0, indicating the variable
11 //! is dead, or the ID of an expression that uses the variable.
12 //!
13 //! We conceptually walk over the AST in reverse execution order. If we
14 //! find a use of a variable, we add it to the set of live variables. If
15 //! we find an assignment to a variable, we remove it from the set of live
16 //! variables. When we have to merge two flows, we take the union of
17 //! those two flows -- if the variable is live on both paths, we simply
18 //! pick one ID. In the event of loops, we continue doing this until a
19 //! fixed point is reached.
20 //!
21 //! ## Checking initialization
22 //!
23 //! At the function entry point, all variables must be dead. If this is
24 //! not the case, we can report an error using the ID found in the set of
25 //! live variables, which identifies a use of the variable which is not
26 //! dominated by an assignment.
27 //!
28 //! ## Checking moves
29 //!
30 //! After each explicit move, the variable must be dead.
31 //!
32 //! ## Computing last uses
33 //!
34 //! Any use of the variable where the variable is dead afterwards is a
35 //! last use.
36 //!
37 //! # Implementation details
38 //!
39 //! The actual implementation contains two (nested) walks over the AST.
40 //! The outer walk has the job of building up the ir_maps instance for the
41 //! enclosing function. On the way down the tree, it identifies those AST
42 //! nodes and variable IDs that will be needed for the liveness analysis
43 //! and assigns them contiguous IDs. The liveness ID for an AST node is
44 //! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
45 //! is called a `variable` (another newtype'd `u32`).
46 //!
47 //! On the way back up the tree, as we are about to exit from a function
48 //! declaration we allocate a `liveness` instance. Now that we know
49 //! precisely how many nodes and variables we need, we can allocate all
50 //! the various arrays that we will need to precisely the right size. We then
51 //! perform the actual propagation on the `liveness` instance.
52 //!
53 //! This propagation is encoded in the various `propagate_through_*()`
54 //! methods. It effectively does a reverse walk of the AST; whenever we
55 //! reach a loop node, we iterate until a fixed point is reached.
56 //!
57 //! ## The `RWU` struct
58 //!
59 //! At each live node `N`, we track three pieces of information for each
60 //! variable `V` (these are encapsulated in the `RWU` struct):
61 //!
62 //! - `reader`: the `LiveNode` ID of some node which will read the value
63 //! that `V` holds on entry to `N`. Formally: a node `M` such
64 //! that there exists a path `P` from `N` to `M` where `P` does not
65 //! write `V`. If the `reader` is `None`, then the current
66 //! value will never be read (the variable is dead, essentially).
67 //!
68 //! - `writer`: the `LiveNode` ID of some node which will write the
69 //! variable `V` and which is reachable from `N`. Formally: a node `M`
70 //! such that there exists a path `P` from `N` to `M` and `M` writes
71 //! `V`. If the `writer` is `None`, then there is no writer
72 //! of `V` that follows `N`.
73 //!
74 //! - `used`: a boolean value indicating whether `V` is *used*. We
75 //! distinguish a *read* from a *use* in that a *use* is some read that
76 //! is not just used to generate a new value. For example, `x += 1` is
77 //! a read but not a use. This is used to generate better warnings.
78 //!
79 //! ## Special nodes and variables
80 //!
81 //! We generate various special nodes for various, well, special purposes.
82 //! These are described in the `Liveness` struct.
83
84 use self::LiveNodeKind::*;
85 use self::VarKind::*;
86
87 use rustc_ast::InlineAsmOptions;
88 use rustc_data_structures::fx::FxIndexMap;
89 use rustc_errors::Applicability;
90 use rustc_hir as hir;
91 use rustc_hir::def::*;
92 use rustc_hir::def_id::LocalDefId;
93 use rustc_hir::intravisit::{self, Visitor};
94 use rustc_hir::{Expr, HirId, HirIdMap, HirIdSet};
95 use rustc_index::vec::IndexVec;
96 use rustc_middle::hir::nested_filter;
97 use rustc_middle::ty::query::Providers;
98 use rustc_middle::ty::{self, DefIdTree, RootVariableMinCaptureList, Ty, TyCtxt};
99 use rustc_session::lint;
100 use rustc_span::symbol::{kw, sym, Symbol};
101 use rustc_span::Span;
102
103 use std::collections::VecDeque;
104 use std::io;
105 use std::io::prelude::*;
106 use std::rc::Rc;
107
108 mod rwu_table;
109
110 rustc_index::newtype_index! {
111 pub struct Variable {
112 DEBUG_FORMAT = "v({})",
113 }
114 }
115
116 rustc_index::newtype_index! {
117 pub struct LiveNode {
118 DEBUG_FORMAT = "ln({})",
119 }
120 }
121
122 #[derive(Copy, Clone, PartialEq, Debug)]
123 enum LiveNodeKind {
124 UpvarNode(Span),
125 ExprNode(Span, HirId),
126 VarDefNode(Span, HirId),
127 ClosureNode,
128 ExitNode,
129 }
130
131 fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
132 let sm = tcx.sess.source_map();
133 match lnk {
134 UpvarNode(s) => format!("Upvar node [{}]", sm.span_to_diagnostic_string(s)),
135 ExprNode(s, _) => format!("Expr node [{}]", sm.span_to_diagnostic_string(s)),
136 VarDefNode(s, _) => format!("Var def node [{}]", sm.span_to_diagnostic_string(s)),
137 ClosureNode => "Closure node".to_owned(),
138 ExitNode => "Exit node".to_owned(),
139 }
140 }
141
142 fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
143 tcx.hir().visit_item_likes_in_module(module_def_id, &mut IrMaps::new(tcx).as_deep_visitor());
144 }
145
146 pub fn provide(providers: &mut Providers) {
147 *providers = Providers { check_mod_liveness, ..*providers };
148 }
149
150 // ______________________________________________________________________
151 // Creating ir_maps
152 //
153 // This is the first pass and the one that drives the main
154 // computation. It walks up and down the IR once. On the way down,
155 // we count for each function the number of variables as well as
156 // liveness nodes. A liveness node is basically an expression or
157 // capture clause that does something of interest: either it has
158 // interesting control flow or it uses/defines a local variable.
159 //
160 // On the way back up, at each function node we create liveness sets
161 // (we now know precisely how big to make our various vectors and so
162 // forth) and then do the data-flow propagation to compute the set
163 // of live variables at each program point.
164 //
165 // Finally, we run back over the IR one last time and, using the
166 // computed liveness, check various safety conditions. For example,
167 // there must be no live nodes at the definition site for a variable
168 // unless it has an initializer. Similarly, each non-mutable local
169 // variable must not be assigned if there is some successor
170 // assignment. And so forth.
171
172 struct CaptureInfo {
173 ln: LiveNode,
174 var_hid: HirId,
175 }
176
177 #[derive(Copy, Clone, Debug)]
178 struct LocalInfo {
179 id: HirId,
180 name: Symbol,
181 is_shorthand: bool,
182 }
183
184 #[derive(Copy, Clone, Debug)]
185 enum VarKind {
186 Param(HirId, Symbol),
187 Local(LocalInfo),
188 Upvar(HirId, Symbol),
189 }
190
191 struct IrMaps<'tcx> {
192 tcx: TyCtxt<'tcx>,
193 live_node_map: HirIdMap<LiveNode>,
194 variable_map: HirIdMap<Variable>,
195 capture_info_map: HirIdMap<Rc<Vec<CaptureInfo>>>,
196 var_kinds: IndexVec<Variable, VarKind>,
197 lnks: IndexVec<LiveNode, LiveNodeKind>,
198 }
199
200 impl<'tcx> IrMaps<'tcx> {
201 fn new(tcx: TyCtxt<'tcx>) -> IrMaps<'tcx> {
202 IrMaps {
203 tcx,
204 live_node_map: HirIdMap::default(),
205 variable_map: HirIdMap::default(),
206 capture_info_map: Default::default(),
207 var_kinds: IndexVec::new(),
208 lnks: IndexVec::new(),
209 }
210 }
211
212 fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
213 let ln = self.lnks.push(lnk);
214
215 debug!("{:?} is of kind {}", ln, live_node_kind_to_string(lnk, self.tcx));
216
217 ln
218 }
219
220 fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
221 let ln = self.add_live_node(lnk);
222 self.live_node_map.insert(hir_id, ln);
223
224 debug!("{:?} is node {:?}", ln, hir_id);
225 }
226
227 fn add_variable(&mut self, vk: VarKind) -> Variable {
228 let v = self.var_kinds.push(vk);
229
230 match vk {
231 Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) | Upvar(node_id, _) => {
232 self.variable_map.insert(node_id, v);
233 }
234 }
235
236 debug!("{:?} is {:?}", v, vk);
237
238 v
239 }
240
241 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
242 match self.variable_map.get(&hir_id) {
243 Some(&var) => var,
244 None => {
245 span_bug!(span, "no variable registered for id {:?}", hir_id);
246 }
247 }
248 }
249
250 fn variable_name(&self, var: Variable) -> Symbol {
251 match self.var_kinds[var] {
252 Local(LocalInfo { name, .. }) | Param(_, name) | Upvar(_, name) => name,
253 }
254 }
255
256 fn variable_is_shorthand(&self, var: Variable) -> bool {
257 match self.var_kinds[var] {
258 Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
259 Param(..) | Upvar(..) => false,
260 }
261 }
262
263 fn set_captures(&mut self, hir_id: HirId, cs: Vec<CaptureInfo>) {
264 self.capture_info_map.insert(hir_id, Rc::new(cs));
265 }
266
267 fn collect_shorthand_field_ids(&self, pat: &hir::Pat<'tcx>) -> HirIdSet {
268 // For struct patterns, take note of which fields used shorthand
269 // (`x` rather than `x: x`).
270 let mut shorthand_field_ids = HirIdSet::default();
271 let mut pats = VecDeque::new();
272 pats.push_back(pat);
273
274 while let Some(pat) = pats.pop_front() {
275 use rustc_hir::PatKind::*;
276 match &pat.kind {
277 Binding(.., inner_pat) => {
278 pats.extend(inner_pat.iter());
279 }
280 Struct(_, fields, _) => {
281 let (short, not_short): (Vec<&_>, Vec<&_>) =
282 fields.iter().partition(|f| f.is_shorthand);
283 shorthand_field_ids.extend(short.iter().map(|f| f.pat.hir_id));
284 pats.extend(not_short.iter().map(|f| f.pat));
285 }
286 Ref(inner_pat, _) | Box(inner_pat) => {
287 pats.push_back(inner_pat);
288 }
289 TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => {
290 pats.extend(inner_pats.iter());
291 }
292 Slice(pre_pats, inner_pat, post_pats) => {
293 pats.extend(pre_pats.iter());
294 pats.extend(inner_pat.iter());
295 pats.extend(post_pats.iter());
296 }
297 _ => {}
298 }
299 }
300
301 return shorthand_field_ids;
302 }
303
304 fn add_from_pat(&mut self, pat: &hir::Pat<'tcx>) {
305 let shorthand_field_ids = self.collect_shorthand_field_ids(pat);
306
307 pat.each_binding(|_, hir_id, _, ident| {
308 self.add_live_node_for_node(hir_id, VarDefNode(ident.span, hir_id));
309 self.add_variable(Local(LocalInfo {
310 id: hir_id,
311 name: ident.name,
312 is_shorthand: shorthand_field_ids.contains(&hir_id),
313 }));
314 });
315 }
316 }
317
318 impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
319 type NestedFilter = nested_filter::OnlyBodies;
320
321 fn nested_visit_map(&mut self) -> Self::Map {
322 self.tcx.hir()
323 }
324
325 fn visit_body(&mut self, body: &'tcx hir::Body<'tcx>) {
326 debug!("visit_body {:?}", body.id());
327
328 // swap in a new set of IR maps for this body
329 let mut maps = IrMaps::new(self.tcx);
330 let hir_id = maps.tcx.hir().body_owner(body.id());
331 let local_def_id = maps.tcx.hir().local_def_id(hir_id);
332 let def_id = local_def_id.to_def_id();
333
334 // Don't run unused pass for #[derive()]
335 if let Some(parent) = self.tcx.parent(def_id)
336 && let DefKind::Impl = self.tcx.def_kind(parent.expect_local())
337 && self.tcx.has_attr(parent, sym::automatically_derived)
338 {
339 return;
340 }
341
342 // Don't run unused pass for #[naked]
343 if self.tcx.has_attr(def_id, sym::naked) {
344 return;
345 }
346
347 if let Some(upvars) = maps.tcx.upvars_mentioned(def_id) {
348 for &var_hir_id in upvars.keys() {
349 let var_name = maps.tcx.hir().name(var_hir_id);
350 maps.add_variable(Upvar(var_hir_id, var_name));
351 }
352 }
353
354 // gather up the various local variables, significant expressions,
355 // and so forth:
356 intravisit::walk_body(&mut maps, body);
357
358 // compute liveness
359 let mut lsets = Liveness::new(&mut maps, local_def_id);
360 let entry_ln = lsets.compute(&body, hir_id);
361 lsets.log_liveness(entry_ln, body.id().hir_id);
362
363 // check for various error conditions
364 lsets.visit_body(body);
365 lsets.warn_about_unused_upvars(entry_ln);
366 lsets.warn_about_unused_args(body, entry_ln);
367 }
368
369 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
370 self.add_from_pat(&local.pat);
371 intravisit::walk_local(self, local);
372 }
373
374 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
375 self.add_from_pat(&arm.pat);
376 if let Some(hir::Guard::IfLet(ref pat, _)) = arm.guard {
377 self.add_from_pat(pat);
378 }
379 intravisit::walk_arm(self, arm);
380 }
381
382 fn visit_param(&mut self, param: &'tcx hir::Param<'tcx>) {
383 let shorthand_field_ids = self.collect_shorthand_field_ids(param.pat);
384 param.pat.each_binding(|_bm, hir_id, _x, ident| {
385 let var = match param.pat.kind {
386 rustc_hir::PatKind::Struct(..) => Local(LocalInfo {
387 id: hir_id,
388 name: ident.name,
389 is_shorthand: shorthand_field_ids.contains(&hir_id),
390 }),
391 _ => Param(hir_id, ident.name),
392 };
393 self.add_variable(var);
394 });
395 intravisit::walk_param(self, param);
396 }
397
398 fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
399 match expr.kind {
400 // live nodes required for uses or definitions of variables:
401 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
402 debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res);
403 if let Res::Local(_var_hir_id) = path.res {
404 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
405 }
406 intravisit::walk_expr(self, expr);
407 }
408 hir::ExprKind::Closure(..) => {
409 // Interesting control flow (for loops can contain labeled
410 // breaks or continues)
411 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
412
413 // Make a live_node for each mentioned variable, with the span
414 // being the location that the variable is used. This results
415 // in better error messages than just pointing at the closure
416 // construction site.
417 let mut call_caps = Vec::new();
418 let closure_def_id = self.tcx.hir().local_def_id(expr.hir_id);
419 if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
420 call_caps.extend(upvars.keys().map(|var_id| {
421 let upvar = upvars[var_id];
422 let upvar_ln = self.add_live_node(UpvarNode(upvar.span));
423 CaptureInfo { ln: upvar_ln, var_hid: *var_id }
424 }));
425 }
426 self.set_captures(expr.hir_id, call_caps);
427 intravisit::walk_expr(self, expr);
428 }
429
430 hir::ExprKind::Let(let_expr) => {
431 self.add_from_pat(let_expr.pat);
432 intravisit::walk_expr(self, expr);
433 }
434
435 // live nodes required for interesting control flow:
436 hir::ExprKind::If(..)
437 | hir::ExprKind::Match(..)
438 | hir::ExprKind::Loop(..)
439 | hir::ExprKind::Yield(..) => {
440 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
441 intravisit::walk_expr(self, expr);
442 }
443 hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
444 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
445 intravisit::walk_expr(self, expr);
446 }
447
448 // otherwise, live nodes are not required:
449 hir::ExprKind::Index(..)
450 | hir::ExprKind::Field(..)
451 | hir::ExprKind::Array(..)
452 | hir::ExprKind::Call(..)
453 | hir::ExprKind::MethodCall(..)
454 | hir::ExprKind::Tup(..)
455 | hir::ExprKind::Binary(..)
456 | hir::ExprKind::AddrOf(..)
457 | hir::ExprKind::Cast(..)
458 | hir::ExprKind::DropTemps(..)
459 | hir::ExprKind::Unary(..)
460 | hir::ExprKind::Break(..)
461 | hir::ExprKind::Continue(_)
462 | hir::ExprKind::Lit(_)
463 | hir::ExprKind::ConstBlock(..)
464 | hir::ExprKind::Ret(..)
465 | hir::ExprKind::Block(..)
466 | hir::ExprKind::Assign(..)
467 | hir::ExprKind::AssignOp(..)
468 | hir::ExprKind::Struct(..)
469 | hir::ExprKind::Repeat(..)
470 | hir::ExprKind::InlineAsm(..)
471 | hir::ExprKind::Box(..)
472 | hir::ExprKind::Type(..)
473 | hir::ExprKind::Err
474 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
475 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => {
476 intravisit::walk_expr(self, expr);
477 }
478 }
479 }
480 }
481
482 // ______________________________________________________________________
483 // Computing liveness sets
484 //
485 // Actually we compute just a bit more than just liveness, but we use
486 // the same basic propagation framework in all cases.
487
488 const ACC_READ: u32 = 1;
489 const ACC_WRITE: u32 = 2;
490 const ACC_USE: u32 = 4;
491
492 struct Liveness<'a, 'tcx> {
493 ir: &'a mut IrMaps<'tcx>,
494 typeck_results: &'a ty::TypeckResults<'tcx>,
495 param_env: ty::ParamEnv<'tcx>,
496 closure_min_captures: Option<&'tcx RootVariableMinCaptureList<'tcx>>,
497 successors: IndexVec<LiveNode, Option<LiveNode>>,
498 rwu_table: rwu_table::RWUTable,
499
500 /// A live node representing a point of execution before closure entry &
501 /// after closure exit. Used to calculate liveness of captured variables
502 /// through calls to the same closure. Used for Fn & FnMut closures only.
503 closure_ln: LiveNode,
504 /// A live node representing every 'exit' from the function, whether it be
505 /// by explicit return, panic, or other means.
506 exit_ln: LiveNode,
507
508 // mappings from loop node ID to LiveNode
509 // ("break" label should map to loop node ID,
510 // it probably doesn't now)
511 break_ln: HirIdMap<LiveNode>,
512 cont_ln: HirIdMap<LiveNode>,
513 }
514
515 impl<'a, 'tcx> Liveness<'a, 'tcx> {
516 fn new(ir: &'a mut IrMaps<'tcx>, body_owner: LocalDefId) -> Liveness<'a, 'tcx> {
517 let typeck_results = ir.tcx.typeck(body_owner);
518 let param_env = ir.tcx.param_env(body_owner);
519 let closure_min_captures = typeck_results.closure_min_captures.get(&body_owner.to_def_id());
520 let closure_ln = ir.add_live_node(ClosureNode);
521 let exit_ln = ir.add_live_node(ExitNode);
522
523 let num_live_nodes = ir.lnks.len();
524 let num_vars = ir.var_kinds.len();
525
526 Liveness {
527 ir,
528 typeck_results,
529 param_env,
530 closure_min_captures,
531 successors: IndexVec::from_elem_n(None, num_live_nodes),
532 rwu_table: rwu_table::RWUTable::new(num_live_nodes, num_vars),
533 closure_ln,
534 exit_ln,
535 break_ln: Default::default(),
536 cont_ln: Default::default(),
537 }
538 }
539
540 fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
541 match self.ir.live_node_map.get(&hir_id) {
542 Some(&ln) => ln,
543 None => {
544 // This must be a mismatch between the ir_map construction
545 // above and the propagation code below; the two sets of
546 // code have to agree about which AST nodes are worth
547 // creating liveness nodes for.
548 span_bug!(span, "no live node registered for node {:?}", hir_id);
549 }
550 }
551 }
552
553 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
554 self.ir.variable(hir_id, span)
555 }
556
557 fn define_bindings_in_pat(&mut self, pat: &hir::Pat<'_>, mut succ: LiveNode) -> LiveNode {
558 // In an or-pattern, only consider the first pattern; any later patterns
559 // must have the same bindings, and we also consider the first pattern
560 // to be the "authoritative" set of ids.
561 pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| {
562 let ln = self.live_node(hir_id, pat_sp);
563 let var = self.variable(hir_id, ident.span);
564 self.init_from_succ(ln, succ);
565 self.define(ln, var);
566 succ = ln;
567 });
568 succ
569 }
570
571 fn live_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
572 self.rwu_table.get_reader(ln, var)
573 }
574
575 // Is this variable live on entry to any of its successor nodes?
576 fn live_on_exit(&self, ln: LiveNode, var: Variable) -> bool {
577 let successor = self.successors[ln].unwrap();
578 self.live_on_entry(successor, var)
579 }
580
581 fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
582 self.rwu_table.get_used(ln, var)
583 }
584
585 fn assigned_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
586 self.rwu_table.get_writer(ln, var)
587 }
588
589 fn assigned_on_exit(&self, ln: LiveNode, var: Variable) -> bool {
590 let successor = self.successors[ln].unwrap();
591 self.assigned_on_entry(successor, var)
592 }
593
594 fn write_vars<F>(&self, wr: &mut dyn Write, mut test: F) -> io::Result<()>
595 where
596 F: FnMut(Variable) -> bool,
597 {
598 for var_idx in 0..self.ir.var_kinds.len() {
599 let var = Variable::from(var_idx);
600 if test(var) {
601 write!(wr, " {:?}", var)?;
602 }
603 }
604 Ok(())
605 }
606
607 #[allow(unused_must_use)]
608 fn ln_str(&self, ln: LiveNode) -> String {
609 let mut wr = Vec::new();
610 {
611 let wr = &mut wr as &mut dyn Write;
612 write!(wr, "[{:?} of kind {:?} reads", ln, self.ir.lnks[ln]);
613 self.write_vars(wr, |var| self.rwu_table.get_reader(ln, var));
614 write!(wr, " writes");
615 self.write_vars(wr, |var| self.rwu_table.get_writer(ln, var));
616 write!(wr, " uses");
617 self.write_vars(wr, |var| self.rwu_table.get_used(ln, var));
618
619 write!(wr, " precedes {:?}]", self.successors[ln]);
620 }
621 String::from_utf8(wr).unwrap()
622 }
623
624 fn log_liveness(&self, entry_ln: LiveNode, hir_id: hir::HirId) {
625 // hack to skip the loop unless debug! is enabled:
626 debug!(
627 "^^ liveness computation results for body {} (entry={:?})",
628 {
629 for ln_idx in 0..self.ir.lnks.len() {
630 debug!("{:?}", self.ln_str(LiveNode::from(ln_idx)));
631 }
632 hir_id
633 },
634 entry_ln
635 );
636 }
637
638 fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
639 self.successors[ln] = Some(succ_ln);
640
641 // It is not necessary to initialize the RWUs here because they are all
642 // empty when created, and the sets only grow during iterations.
643 }
644
645 fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
646 // more efficient version of init_empty() / merge_from_succ()
647 self.successors[ln] = Some(succ_ln);
648 self.rwu_table.copy(ln, succ_ln);
649 debug!("init_from_succ(ln={}, succ={})", self.ln_str(ln), self.ln_str(succ_ln));
650 }
651
652 fn merge_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) -> bool {
653 if ln == succ_ln {
654 return false;
655 }
656
657 let changed = self.rwu_table.union(ln, succ_ln);
658 debug!("merge_from_succ(ln={:?}, succ={}, changed={})", ln, self.ln_str(succ_ln), changed);
659 changed
660 }
661
662 // Indicates that a local variable was *defined*; we know that no
663 // uses of the variable can precede the definition (resolve checks
664 // this) so we just clear out all the data.
665 fn define(&mut self, writer: LiveNode, var: Variable) {
666 let used = self.rwu_table.get_used(writer, var);
667 self.rwu_table.set(writer, var, rwu_table::RWU { reader: false, writer: false, used });
668 debug!("{:?} defines {:?}: {}", writer, var, self.ln_str(writer));
669 }
670
671 // Either read, write, or both depending on the acc bitset
672 fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
673 debug!("{:?} accesses[{:x}] {:?}: {}", ln, acc, var, self.ln_str(ln));
674
675 let mut rwu = self.rwu_table.get(ln, var);
676
677 if (acc & ACC_WRITE) != 0 {
678 rwu.reader = false;
679 rwu.writer = true;
680 }
681
682 // Important: if we both read/write, must do read second
683 // or else the write will override.
684 if (acc & ACC_READ) != 0 {
685 rwu.reader = true;
686 }
687
688 if (acc & ACC_USE) != 0 {
689 rwu.used = true;
690 }
691
692 self.rwu_table.set(ln, var, rwu);
693 }
694
695 fn compute(&mut self, body: &hir::Body<'_>, hir_id: HirId) -> LiveNode {
696 debug!("compute: for body {:?}", body.id().hir_id);
697
698 // # Liveness of captured variables
699 //
700 // When computing the liveness for captured variables we take into
701 // account how variable is captured (ByRef vs ByValue) and what is the
702 // closure kind (Generator / FnOnce vs Fn / FnMut).
703 //
704 // Variables captured by reference are assumed to be used on the exit
705 // from the closure.
706 //
707 // In FnOnce closures, variables captured by value are known to be dead
708 // on exit since it is impossible to call the closure again.
709 //
710 // In Fn / FnMut closures, variables captured by value are live on exit
711 // if they are live on the entry to the closure, since only the closure
712 // itself can access them on subsequent calls.
713
714 if let Some(closure_min_captures) = self.closure_min_captures {
715 // Mark upvars captured by reference as used after closure exits.
716 for (&var_hir_id, min_capture_list) in closure_min_captures {
717 for captured_place in min_capture_list {
718 match captured_place.info.capture_kind {
719 ty::UpvarCapture::ByRef(_) => {
720 let var = self.variable(
721 var_hir_id,
722 captured_place.get_capture_kind_span(self.ir.tcx),
723 );
724 self.acc(self.exit_ln, var, ACC_READ | ACC_USE);
725 }
726 ty::UpvarCapture::ByValue => {}
727 }
728 }
729 }
730 }
731
732 let succ = self.propagate_through_expr(&body.value, self.exit_ln);
733
734 if self.closure_min_captures.is_none() {
735 // Either not a closure, or closure without any captured variables.
736 // No need to determine liveness of captured variables, since there
737 // are none.
738 return succ;
739 }
740
741 let ty = self.typeck_results.node_type(hir_id);
742 match ty.kind() {
743 ty::Closure(_def_id, substs) => match substs.as_closure().kind() {
744 ty::ClosureKind::Fn => {}
745 ty::ClosureKind::FnMut => {}
746 ty::ClosureKind::FnOnce => return succ,
747 },
748 ty::Generator(..) => return succ,
749 _ => {
750 span_bug!(
751 body.value.span,
752 "{} has upvars so it should have a closure type: {:?}",
753 hir_id,
754 ty
755 );
756 }
757 };
758
759 // Propagate through calls to the closure.
760 loop {
761 self.init_from_succ(self.closure_ln, succ);
762 for param in body.params {
763 param.pat.each_binding(|_bm, hir_id, _x, ident| {
764 let var = self.variable(hir_id, ident.span);
765 self.define(self.closure_ln, var);
766 })
767 }
768
769 if !self.merge_from_succ(self.exit_ln, self.closure_ln) {
770 break;
771 }
772 assert_eq!(succ, self.propagate_through_expr(&body.value, self.exit_ln));
773 }
774
775 succ
776 }
777
778 fn propagate_through_block(&mut self, blk: &hir::Block<'_>, succ: LiveNode) -> LiveNode {
779 if blk.targeted_by_break {
780 self.break_ln.insert(blk.hir_id, succ);
781 }
782 let succ = self.propagate_through_opt_expr(blk.expr, succ);
783 blk.stmts.iter().rev().fold(succ, |succ, stmt| self.propagate_through_stmt(stmt, succ))
784 }
785
786 fn propagate_through_stmt(&mut self, stmt: &hir::Stmt<'_>, succ: LiveNode) -> LiveNode {
787 match stmt.kind {
788 hir::StmtKind::Local(ref local) => {
789 // Note: we mark the variable as defined regardless of whether
790 // there is an initializer. Initially I had thought to only mark
791 // the live variable as defined if it was initialized, and then we
792 // could check for uninit variables just by scanning what is live
793 // at the start of the function. But that doesn't work so well for
794 // immutable variables defined in a loop:
795 // loop { let x; x = 5; }
796 // because the "assignment" loops back around and generates an error.
797 //
798 // So now we just check that variables defined w/o an
799 // initializer are not live at the point of their
800 // initialization, which is mildly more complex than checking
801 // once at the func header but otherwise equivalent.
802
803 let succ = self.propagate_through_opt_expr(local.init, succ);
804 self.define_bindings_in_pat(&local.pat, succ)
805 }
806 hir::StmtKind::Item(..) => succ,
807 hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
808 self.propagate_through_expr(&expr, succ)
809 }
810 }
811 }
812
813 fn propagate_through_exprs(&mut self, exprs: &[Expr<'_>], succ: LiveNode) -> LiveNode {
814 exprs.iter().rev().fold(succ, |succ, expr| self.propagate_through_expr(&expr, succ))
815 }
816
817 fn propagate_through_opt_expr(
818 &mut self,
819 opt_expr: Option<&Expr<'_>>,
820 succ: LiveNode,
821 ) -> LiveNode {
822 opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
823 }
824
825 fn propagate_through_expr(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
826 debug!("propagate_through_expr: {:?}", expr);
827
828 match expr.kind {
829 // Interesting cases with control flow or which gen/kill
830 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
831 self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
832 }
833
834 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
835
836 hir::ExprKind::Closure(..) => {
837 debug!("{:?} is an ExprKind::Closure", expr);
838
839 // the construction of a closure itself is not important,
840 // but we have to consider the closed over variables.
841 let caps = self
842 .ir
843 .capture_info_map
844 .get(&expr.hir_id)
845 .cloned()
846 .unwrap_or_else(|| span_bug!(expr.span, "no registered caps"));
847
848 caps.iter().rev().fold(succ, |succ, cap| {
849 self.init_from_succ(cap.ln, succ);
850 let var = self.variable(cap.var_hid, expr.span);
851 self.acc(cap.ln, var, ACC_READ | ACC_USE);
852 cap.ln
853 })
854 }
855
856 hir::ExprKind::Let(let_expr) => {
857 let succ = self.propagate_through_expr(let_expr.init, succ);
858 self.define_bindings_in_pat(let_expr.pat, succ)
859 }
860
861 // Note that labels have been resolved, so we don't need to look
862 // at the label ident
863 hir::ExprKind::Loop(ref blk, ..) => self.propagate_through_loop(expr, &blk, succ),
864
865 hir::ExprKind::Yield(ref e, ..) => {
866 let yield_ln = self.live_node(expr.hir_id, expr.span);
867 self.init_from_succ(yield_ln, succ);
868 self.merge_from_succ(yield_ln, self.exit_ln);
869 self.propagate_through_expr(e, yield_ln)
870 }
871
872 hir::ExprKind::If(ref cond, ref then, ref else_opt) => {
873 //
874 // (cond)
875 // |
876 // v
877 // (expr)
878 // / \
879 // | |
880 // v v
881 // (then)(els)
882 // | |
883 // v v
884 // ( succ )
885 //
886 let else_ln =
887 self.propagate_through_opt_expr(else_opt.as_ref().map(|e| &**e), succ);
888 let then_ln = self.propagate_through_expr(&then, succ);
889 let ln = self.live_node(expr.hir_id, expr.span);
890 self.init_from_succ(ln, else_ln);
891 self.merge_from_succ(ln, then_ln);
892 self.propagate_through_expr(&cond, ln)
893 }
894
895 hir::ExprKind::Match(ref e, arms, _) => {
896 //
897 // (e)
898 // |
899 // v
900 // (expr)
901 // / | \
902 // | | |
903 // v v v
904 // (..arms..)
905 // | | |
906 // v v v
907 // ( succ )
908 //
909 //
910 let ln = self.live_node(expr.hir_id, expr.span);
911 self.init_empty(ln, succ);
912 for arm in arms {
913 let body_succ = self.propagate_through_expr(&arm.body, succ);
914
915 let guard_succ = arm.guard.as_ref().map_or(body_succ, |g| match g {
916 hir::Guard::If(e) => self.propagate_through_expr(e, body_succ),
917 hir::Guard::IfLet(pat, e) => {
918 let let_bind = self.define_bindings_in_pat(pat, body_succ);
919 self.propagate_through_expr(e, let_bind)
920 }
921 });
922 let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ);
923 self.merge_from_succ(ln, arm_succ);
924 }
925 self.propagate_through_expr(&e, ln)
926 }
927
928 hir::ExprKind::Ret(ref o_e) => {
929 // Ignore succ and subst exit_ln.
930 self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), self.exit_ln)
931 }
932
933 hir::ExprKind::Break(label, ref opt_expr) => {
934 // Find which label this break jumps to
935 let target = match label.target_id {
936 Ok(hir_id) => self.break_ln.get(&hir_id),
937 Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
938 }
939 .cloned();
940
941 // Now that we know the label we're going to,
942 // look it up in the break loop nodes table
943
944 match target {
945 Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
946 None => span_bug!(expr.span, "`break` to unknown label"),
947 }
948 }
949
950 hir::ExprKind::Continue(label) => {
951 // Find which label this expr continues to
952 let sc = label
953 .target_id
954 .unwrap_or_else(|err| span_bug!(expr.span, "loop scope error: {}", err));
955
956 // Now that we know the label we're going to,
957 // look it up in the continue loop nodes table
958 self.cont_ln
959 .get(&sc)
960 .cloned()
961 .unwrap_or_else(|| span_bug!(expr.span, "continue to unknown label"))
962 }
963
964 hir::ExprKind::Assign(ref l, ref r, _) => {
965 // see comment on places in
966 // propagate_through_place_components()
967 let succ = self.write_place(&l, succ, ACC_WRITE);
968 let succ = self.propagate_through_place_components(&l, succ);
969 self.propagate_through_expr(&r, succ)
970 }
971
972 hir::ExprKind::AssignOp(_, ref l, ref r) => {
973 // an overloaded assign op is like a method call
974 if self.typeck_results.is_method_call(expr) {
975 let succ = self.propagate_through_expr(&l, succ);
976 self.propagate_through_expr(&r, succ)
977 } else {
978 // see comment on places in
979 // propagate_through_place_components()
980 let succ = self.write_place(&l, succ, ACC_WRITE | ACC_READ);
981 let succ = self.propagate_through_expr(&r, succ);
982 self.propagate_through_place_components(&l, succ)
983 }
984 }
985
986 // Uninteresting cases: just propagate in rev exec order
987 hir::ExprKind::Array(ref exprs) => self.propagate_through_exprs(exprs, succ),
988
989 hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
990 let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
991 fields
992 .iter()
993 .rev()
994 .fold(succ, |succ, field| self.propagate_through_expr(&field.expr, succ))
995 }
996
997 hir::ExprKind::Call(ref f, ref args) => {
998 let succ = self.check_is_ty_uninhabited(expr, succ);
999 let succ = self.propagate_through_exprs(args, succ);
1000 self.propagate_through_expr(&f, succ)
1001 }
1002
1003 hir::ExprKind::MethodCall(.., ref args, _) => {
1004 let succ = self.check_is_ty_uninhabited(expr, succ);
1005 self.propagate_through_exprs(args, succ)
1006 }
1007
1008 hir::ExprKind::Tup(ref exprs) => self.propagate_through_exprs(exprs, succ),
1009
1010 hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
1011 let r_succ = self.propagate_through_expr(&r, succ);
1012
1013 let ln = self.live_node(expr.hir_id, expr.span);
1014 self.init_from_succ(ln, succ);
1015 self.merge_from_succ(ln, r_succ);
1016
1017 self.propagate_through_expr(&l, ln)
1018 }
1019
1020 hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) => {
1021 let r_succ = self.propagate_through_expr(&r, succ);
1022 self.propagate_through_expr(&l, r_succ)
1023 }
1024
1025 hir::ExprKind::Box(ref e)
1026 | hir::ExprKind::AddrOf(_, _, ref e)
1027 | hir::ExprKind::Cast(ref e, _)
1028 | hir::ExprKind::Type(ref e, _)
1029 | hir::ExprKind::DropTemps(ref e)
1030 | hir::ExprKind::Unary(_, ref e)
1031 | hir::ExprKind::Repeat(ref e, _) => self.propagate_through_expr(&e, succ),
1032
1033 hir::ExprKind::InlineAsm(ref asm) => {
1034 // Handle non-returning asm
1035 let mut succ = if asm.options.contains(InlineAsmOptions::NORETURN) {
1036 self.exit_ln
1037 } else {
1038 succ
1039 };
1040
1041 // Do a first pass for writing outputs only
1042 for (op, _op_sp) in asm.operands.iter().rev() {
1043 match op {
1044 hir::InlineAsmOperand::In { .. }
1045 | hir::InlineAsmOperand::Const { .. }
1046 | hir::InlineAsmOperand::Sym { .. } => {}
1047 hir::InlineAsmOperand::Out { expr, .. } => {
1048 if let Some(expr) = expr {
1049 succ = self.write_place(expr, succ, ACC_WRITE);
1050 }
1051 }
1052 hir::InlineAsmOperand::InOut { expr, .. } => {
1053 succ = self.write_place(expr, succ, ACC_READ | ACC_WRITE | ACC_USE);
1054 }
1055 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1056 if let Some(expr) = out_expr {
1057 succ = self.write_place(expr, succ, ACC_WRITE);
1058 }
1059 }
1060 }
1061 }
1062
1063 // Then do a second pass for inputs
1064 let mut succ = succ;
1065 for (op, _op_sp) in asm.operands.iter().rev() {
1066 match op {
1067 hir::InlineAsmOperand::In { expr, .. }
1068 | hir::InlineAsmOperand::Sym { expr, .. } => {
1069 succ = self.propagate_through_expr(expr, succ)
1070 }
1071 hir::InlineAsmOperand::Out { expr, .. } => {
1072 if let Some(expr) = expr {
1073 succ = self.propagate_through_place_components(expr, succ);
1074 }
1075 }
1076 hir::InlineAsmOperand::InOut { expr, .. } => {
1077 succ = self.propagate_through_place_components(expr, succ);
1078 }
1079 hir::InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
1080 if let Some(expr) = out_expr {
1081 succ = self.propagate_through_place_components(expr, succ);
1082 }
1083 succ = self.propagate_through_expr(in_expr, succ);
1084 }
1085 hir::InlineAsmOperand::Const { .. } => {}
1086 }
1087 }
1088 succ
1089 }
1090
1091 hir::ExprKind::Lit(..)
1092 | hir::ExprKind::ConstBlock(..)
1093 | hir::ExprKind::Err
1094 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
1095 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => succ,
1096
1097 // Note that labels have been resolved, so we don't need to look
1098 // at the label ident
1099 hir::ExprKind::Block(ref blk, _) => self.propagate_through_block(&blk, succ),
1100 }
1101 }
1102
1103 fn propagate_through_place_components(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
1104 // # Places
1105 //
1106 // In general, the full flow graph structure for an
1107 // assignment/move/etc can be handled in one of two ways,
1108 // depending on whether what is being assigned is a "tracked
1109 // value" or not. A tracked value is basically a local
1110 // variable or argument.
1111 //
1112 // The two kinds of graphs are:
1113 //
1114 // Tracked place Untracked place
1115 // ----------------------++-----------------------
1116 // ||
1117 // | || |
1118 // v || v
1119 // (rvalue) || (rvalue)
1120 // | || |
1121 // v || v
1122 // (write of place) || (place components)
1123 // | || |
1124 // v || v
1125 // (succ) || (succ)
1126 // ||
1127 // ----------------------++-----------------------
1128 //
1129 // I will cover the two cases in turn:
1130 //
1131 // # Tracked places
1132 //
1133 // A tracked place is a local variable/argument `x`. In
1134 // these cases, the link_node where the write occurs is linked
1135 // to node id of `x`. The `write_place()` routine generates
1136 // the contents of this node. There are no subcomponents to
1137 // consider.
1138 //
1139 // # Non-tracked places
1140 //
1141 // These are places like `x[5]` or `x.f`. In that case, we
1142 // basically ignore the value which is written to but generate
1143 // reads for the components---`x` in these two examples. The
1144 // components reads are generated by
1145 // `propagate_through_place_components()` (this fn).
1146 //
1147 // # Illegal places
1148 //
1149 // It is still possible to observe assignments to non-places;
1150 // these errors are detected in the later pass borrowck. We
1151 // just ignore such cases and treat them as reads.
1152
1153 match expr.kind {
1154 hir::ExprKind::Path(_) => succ,
1155 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
1156 _ => self.propagate_through_expr(expr, succ),
1157 }
1158 }
1159
1160 // see comment on propagate_through_place()
1161 fn write_place(&mut self, expr: &Expr<'_>, succ: LiveNode, acc: u32) -> LiveNode {
1162 match expr.kind {
1163 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1164 self.access_path(expr.hir_id, path, succ, acc)
1165 }
1166
1167 // We do not track other places, so just propagate through
1168 // to their subcomponents. Also, it may happen that
1169 // non-places occur here, because those are detected in the
1170 // later pass borrowck.
1171 _ => succ,
1172 }
1173 }
1174
1175 fn access_var(
1176 &mut self,
1177 hir_id: HirId,
1178 var_hid: HirId,
1179 succ: LiveNode,
1180 acc: u32,
1181 span: Span,
1182 ) -> LiveNode {
1183 let ln = self.live_node(hir_id, span);
1184 if acc != 0 {
1185 self.init_from_succ(ln, succ);
1186 let var = self.variable(var_hid, span);
1187 self.acc(ln, var, acc);
1188 }
1189 ln
1190 }
1191
1192 fn access_path(
1193 &mut self,
1194 hir_id: HirId,
1195 path: &hir::Path<'_>,
1196 succ: LiveNode,
1197 acc: u32,
1198 ) -> LiveNode {
1199 match path.res {
1200 Res::Local(hid) => self.access_var(hir_id, hid, succ, acc, path.span),
1201 _ => succ,
1202 }
1203 }
1204
1205 fn propagate_through_loop(
1206 &mut self,
1207 expr: &Expr<'_>,
1208 body: &hir::Block<'_>,
1209 succ: LiveNode,
1210 ) -> LiveNode {
1211 /*
1212 We model control flow like this:
1213
1214 (expr) <-+
1215 | |
1216 v |
1217 (body) --+
1218
1219 Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
1220 Meanwhile, a `break` expression will have a successor of `succ`.
1221 */
1222
1223 // first iteration:
1224 let ln = self.live_node(expr.hir_id, expr.span);
1225 self.init_empty(ln, succ);
1226 debug!("propagate_through_loop: using id for loop body {} {:?}", expr.hir_id, body);
1227
1228 self.break_ln.insert(expr.hir_id, succ);
1229
1230 self.cont_ln.insert(expr.hir_id, ln);
1231
1232 let body_ln = self.propagate_through_block(body, ln);
1233
1234 // repeat until fixed point is reached:
1235 while self.merge_from_succ(ln, body_ln) {
1236 assert_eq!(body_ln, self.propagate_through_block(body, ln));
1237 }
1238
1239 ln
1240 }
1241
1242 fn check_is_ty_uninhabited(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
1243 let ty = self.typeck_results.expr_ty(expr);
1244 let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
1245 if self.ir.tcx.is_ty_uninhabited_from(m, ty, self.param_env) {
1246 match self.ir.lnks[succ] {
1247 LiveNodeKind::ExprNode(succ_span, succ_id) => {
1248 self.warn_about_unreachable(expr.span, ty, succ_span, succ_id, "expression");
1249 }
1250 LiveNodeKind::VarDefNode(succ_span, succ_id) => {
1251 self.warn_about_unreachable(expr.span, ty, succ_span, succ_id, "definition");
1252 }
1253 _ => {}
1254 };
1255 self.exit_ln
1256 } else {
1257 succ
1258 }
1259 }
1260
1261 fn warn_about_unreachable(
1262 &mut self,
1263 orig_span: Span,
1264 orig_ty: Ty<'tcx>,
1265 expr_span: Span,
1266 expr_id: HirId,
1267 descr: &str,
1268 ) {
1269 if !orig_ty.is_never() {
1270 // Unreachable code warnings are already emitted during type checking.
1271 // However, during type checking, full type information is being
1272 // calculated but not yet available, so the check for diverging
1273 // expressions due to uninhabited result types is pretty crude and
1274 // only checks whether ty.is_never(). Here, we have full type
1275 // information available and can issue warnings for less obviously
1276 // uninhabited types (e.g. empty enums). The check above is used so
1277 // that we do not emit the same warning twice if the uninhabited type
1278 // is indeed `!`.
1279
1280 self.ir.tcx.struct_span_lint_hir(
1281 lint::builtin::UNREACHABLE_CODE,
1282 expr_id,
1283 expr_span,
1284 |lint| {
1285 let msg = format!("unreachable {}", descr);
1286 lint.build(&msg)
1287 .span_label(expr_span, &msg)
1288 .span_label(orig_span, "any code following this expression is unreachable")
1289 .span_note(
1290 orig_span,
1291 &format!(
1292 "this expression has type `{}`, which is uninhabited",
1293 orig_ty
1294 ),
1295 )
1296 .emit();
1297 },
1298 );
1299 }
1300 }
1301 }
1302
1303 // _______________________________________________________________________
1304 // Checking for error conditions
1305
1306 impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
1307 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
1308 self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| {
1309 if local.init.is_some() {
1310 self.warn_about_dead_assign(spans, hir_id, ln, var);
1311 }
1312 });
1313
1314 intravisit::walk_local(self, local);
1315 }
1316
1317 fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
1318 check_expr(self, ex);
1319 intravisit::walk_expr(self, ex);
1320 }
1321
1322 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
1323 self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {});
1324 intravisit::walk_arm(self, arm);
1325 }
1326 }
1327
1328 fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr<'tcx>) {
1329 match expr.kind {
1330 hir::ExprKind::Assign(ref l, ..) => {
1331 this.check_place(&l);
1332 }
1333
1334 hir::ExprKind::AssignOp(_, ref l, _) => {
1335 if !this.typeck_results.is_method_call(expr) {
1336 this.check_place(&l);
1337 }
1338 }
1339
1340 hir::ExprKind::InlineAsm(ref asm) => {
1341 for (op, _op_sp) in asm.operands {
1342 match op {
1343 hir::InlineAsmOperand::Out { expr, .. } => {
1344 if let Some(expr) = expr {
1345 this.check_place(expr);
1346 }
1347 }
1348 hir::InlineAsmOperand::InOut { expr, .. } => {
1349 this.check_place(expr);
1350 }
1351 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1352 if let Some(out_expr) = out_expr {
1353 this.check_place(out_expr);
1354 }
1355 }
1356 _ => {}
1357 }
1358 }
1359 }
1360
1361 hir::ExprKind::Let(let_expr) => {
1362 this.check_unused_vars_in_pat(let_expr.pat, None, |_, _, _, _| {});
1363 }
1364
1365 // no correctness conditions related to liveness
1366 hir::ExprKind::Call(..)
1367 | hir::ExprKind::MethodCall(..)
1368 | hir::ExprKind::Match(..)
1369 | hir::ExprKind::Loop(..)
1370 | hir::ExprKind::Index(..)
1371 | hir::ExprKind::Field(..)
1372 | hir::ExprKind::Array(..)
1373 | hir::ExprKind::Tup(..)
1374 | hir::ExprKind::Binary(..)
1375 | hir::ExprKind::Cast(..)
1376 | hir::ExprKind::If(..)
1377 | hir::ExprKind::DropTemps(..)
1378 | hir::ExprKind::Unary(..)
1379 | hir::ExprKind::Ret(..)
1380 | hir::ExprKind::Break(..)
1381 | hir::ExprKind::Continue(..)
1382 | hir::ExprKind::Lit(_)
1383 | hir::ExprKind::ConstBlock(..)
1384 | hir::ExprKind::Block(..)
1385 | hir::ExprKind::AddrOf(..)
1386 | hir::ExprKind::Struct(..)
1387 | hir::ExprKind::Repeat(..)
1388 | hir::ExprKind::Closure(..)
1389 | hir::ExprKind::Path(_)
1390 | hir::ExprKind::Yield(..)
1391 | hir::ExprKind::Box(..)
1392 | hir::ExprKind::Type(..)
1393 | hir::ExprKind::Err => {}
1394 }
1395 }
1396
1397 impl<'tcx> Liveness<'_, 'tcx> {
1398 fn check_place(&mut self, expr: &'tcx Expr<'tcx>) {
1399 match expr.kind {
1400 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1401 if let Res::Local(var_hid) = path.res {
1402 // Assignment to an immutable variable or argument: only legal
1403 // if there is no later assignment. If this local is actually
1404 // mutable, then check for a reassignment to flag the mutability
1405 // as being used.
1406 let ln = self.live_node(expr.hir_id, expr.span);
1407 let var = self.variable(var_hid, expr.span);
1408 self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var);
1409 }
1410 }
1411 _ => {
1412 // For other kinds of places, no checks are required,
1413 // and any embedded expressions are actually rvalues
1414 intravisit::walk_expr(self, expr);
1415 }
1416 }
1417 }
1418
1419 fn should_warn(&self, var: Variable) -> Option<String> {
1420 let name = self.ir.variable_name(var);
1421 if name == kw::Empty {
1422 return None;
1423 }
1424 let name = name.as_str();
1425 if name.as_bytes()[0] == b'_' {
1426 return None;
1427 }
1428 Some(name.to_owned())
1429 }
1430
1431 fn warn_about_unused_upvars(&self, entry_ln: LiveNode) {
1432 let Some(closure_min_captures) = self.closure_min_captures else {
1433 return;
1434 };
1435
1436 // If closure_min_captures is Some(), upvars must be Some() too.
1437 for (&var_hir_id, min_capture_list) in closure_min_captures {
1438 for captured_place in min_capture_list {
1439 match captured_place.info.capture_kind {
1440 ty::UpvarCapture::ByValue => {}
1441 ty::UpvarCapture::ByRef(..) => continue,
1442 };
1443 let span = captured_place.get_capture_kind_span(self.ir.tcx);
1444 let var = self.variable(var_hir_id, span);
1445 if self.used_on_entry(entry_ln, var) {
1446 if !self.live_on_entry(entry_ln, var) {
1447 if let Some(name) = self.should_warn(var) {
1448 self.ir.tcx.struct_span_lint_hir(
1449 lint::builtin::UNUSED_ASSIGNMENTS,
1450 var_hir_id,
1451 vec![span],
1452 |lint| {
1453 lint.build(&format!(
1454 "value captured by `{}` is never read",
1455 name
1456 ))
1457 .help("did you mean to capture by reference instead?")
1458 .emit();
1459 },
1460 );
1461 }
1462 }
1463 } else {
1464 if let Some(name) = self.should_warn(var) {
1465 self.ir.tcx.struct_span_lint_hir(
1466 lint::builtin::UNUSED_VARIABLES,
1467 var_hir_id,
1468 vec![span],
1469 |lint| {
1470 lint.build(&format!("unused variable: `{}`", name))
1471 .help("did you mean to capture by reference instead?")
1472 .emit();
1473 },
1474 );
1475 }
1476 }
1477 }
1478 }
1479 }
1480
1481 fn warn_about_unused_args(&self, body: &hir::Body<'_>, entry_ln: LiveNode) {
1482 for p in body.params {
1483 self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| {
1484 if !self.live_on_entry(ln, var) {
1485 self.report_unused_assign(hir_id, spans, var, |name| {
1486 format!("value passed to `{}` is never read", name)
1487 });
1488 }
1489 });
1490 }
1491 }
1492
1493 fn check_unused_vars_in_pat(
1494 &self,
1495 pat: &hir::Pat<'_>,
1496 entry_ln: Option<LiveNode>,
1497 on_used_on_entry: impl Fn(Vec<Span>, HirId, LiveNode, Variable),
1498 ) {
1499 // In an or-pattern, only consider the variable; any later patterns must have the same
1500 // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
1501 // However, we should take the ids and spans of variables with the same name from the later
1502 // patterns so the suggestions to prefix with underscores will apply to those too.
1503 let mut vars: FxIndexMap<Symbol, (LiveNode, Variable, Vec<(HirId, Span, Span)>)> =
1504 <_>::default();
1505
1506 pat.each_binding(|_, hir_id, pat_sp, ident| {
1507 let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp));
1508 let var = self.variable(hir_id, ident.span);
1509 let id_and_sp = (hir_id, pat_sp, ident.span);
1510 vars.entry(self.ir.variable_name(var))
1511 .and_modify(|(.., hir_ids_and_spans)| hir_ids_and_spans.push(id_and_sp))
1512 .or_insert_with(|| (ln, var, vec![id_and_sp]));
1513 });
1514
1515 for (_, (ln, var, hir_ids_and_spans)) in vars {
1516 if self.used_on_entry(ln, var) {
1517 let id = hir_ids_and_spans[0].0;
1518 let spans =
1519 hir_ids_and_spans.into_iter().map(|(_, _, ident_span)| ident_span).collect();
1520 on_used_on_entry(spans, id, ln, var);
1521 } else {
1522 self.report_unused(hir_ids_and_spans, ln, var);
1523 }
1524 }
1525 }
1526
1527 fn report_unused(
1528 &self,
1529 hir_ids_and_spans: Vec<(HirId, Span, Span)>,
1530 ln: LiveNode,
1531 var: Variable,
1532 ) {
1533 let first_hir_id = hir_ids_and_spans[0].0;
1534
1535 if let Some(name) = self.should_warn(var).filter(|name| name != "self") {
1536 // annoying: for parameters in funcs like `fn(x: i32)
1537 // {ret}`, there is only one node, so asking about
1538 // assigned_on_exit() is not meaningful.
1539 let is_assigned =
1540 if ln == self.exit_ln { false } else { self.assigned_on_exit(ln, var) };
1541
1542 if is_assigned {
1543 self.ir.tcx.struct_span_lint_hir(
1544 lint::builtin::UNUSED_VARIABLES,
1545 first_hir_id,
1546 hir_ids_and_spans
1547 .into_iter()
1548 .map(|(_, _, ident_span)| ident_span)
1549 .collect::<Vec<_>>(),
1550 |lint| {
1551 lint.build(&format!("variable `{}` is assigned to, but never used", name))
1552 .note(&format!("consider using `_{}` instead", name))
1553 .emit();
1554 },
1555 )
1556 } else {
1557 let (shorthands, non_shorthands): (Vec<_>, Vec<_>) =
1558 hir_ids_and_spans.iter().copied().partition(|(hir_id, _, ident_span)| {
1559 let var = self.variable(*hir_id, *ident_span);
1560 self.ir.variable_is_shorthand(var)
1561 });
1562
1563 // If we have both shorthand and non-shorthand, prefer the "try ignoring
1564 // the field" message, and suggest `_` for the non-shorthands. If we only
1565 // have non-shorthand, then prefix with an underscore instead.
1566 if !shorthands.is_empty() {
1567 let shorthands = shorthands
1568 .into_iter()
1569 .map(|(_, pat_span, _)| (pat_span, format!("{}: _", name)))
1570 .chain(
1571 non_shorthands
1572 .into_iter()
1573 .map(|(_, pat_span, _)| (pat_span, "_".to_string())),
1574 )
1575 .collect::<Vec<_>>();
1576
1577 self.ir.tcx.struct_span_lint_hir(
1578 lint::builtin::UNUSED_VARIABLES,
1579 first_hir_id,
1580 hir_ids_and_spans
1581 .iter()
1582 .map(|(_, pat_span, _)| *pat_span)
1583 .collect::<Vec<_>>(),
1584 |lint| {
1585 let mut err = lint.build(&format!("unused variable: `{}`", name));
1586 err.multipart_suggestion(
1587 "try ignoring the field",
1588 shorthands,
1589 Applicability::MachineApplicable,
1590 );
1591 err.emit();
1592 },
1593 );
1594 } else {
1595 let non_shorthands = non_shorthands
1596 .into_iter()
1597 .map(|(_, _, ident_span)| (ident_span, format!("_{}", name)))
1598 .collect::<Vec<_>>();
1599
1600 self.ir.tcx.struct_span_lint_hir(
1601 lint::builtin::UNUSED_VARIABLES,
1602 first_hir_id,
1603 hir_ids_and_spans
1604 .iter()
1605 .map(|(_, _, ident_span)| *ident_span)
1606 .collect::<Vec<_>>(),
1607 |lint| {
1608 let mut err = lint.build(&format!("unused variable: `{}`", name));
1609 err.multipart_suggestion(
1610 "if this is intentional, prefix it with an underscore",
1611 non_shorthands,
1612 Applicability::MachineApplicable,
1613 );
1614 err.emit();
1615 },
1616 );
1617 }
1618 }
1619 }
1620 }
1621
1622 fn warn_about_dead_assign(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
1623 if !self.live_on_exit(ln, var) {
1624 self.report_unused_assign(hir_id, spans, var, |name| {
1625 format!("value assigned to `{}` is never read", name)
1626 });
1627 }
1628 }
1629
1630 fn report_unused_assign(
1631 &self,
1632 hir_id: HirId,
1633 spans: Vec<Span>,
1634 var: Variable,
1635 message: impl Fn(&str) -> String,
1636 ) {
1637 if let Some(name) = self.should_warn(var) {
1638 self.ir.tcx.struct_span_lint_hir(
1639 lint::builtin::UNUSED_ASSIGNMENTS,
1640 hir_id,
1641 spans,
1642 |lint| {
1643 lint.build(&message(&name))
1644 .help("maybe it is overwritten before being read?")
1645 .emit();
1646 },
1647 )
1648 }
1649 }
1650 }