1 //! A classic liveness analysis based on dataflow over the AST. Computes,
2 //! for each local variable in a function, whether that variable is live
3 //! at a given point. Program execution points are identified by their
8 //! The basic model is that each local variable is assigned an index. We
9 //! represent sets of local variables using a vector indexed by this
10 //! index. The value in the vector is either 0, indicating the variable
11 //! is dead, or the ID of an expression that uses the variable.
13 //! We conceptually walk over the AST in reverse execution order. If we
14 //! find a use of a variable, we add it to the set of live variables. If
15 //! we find an assignment to a variable, we remove it from the set of live
16 //! variables. When we have to merge two flows, we take the union of
17 //! those two flows -- if the variable is live on both paths, we simply
18 //! pick one ID. In the event of loops, we continue doing this until a
19 //! fixed point is reached.
21 //! ## Checking initialization
23 //! At the function entry point, all variables must be dead. If this is
24 //! not the case, we can report an error using the ID found in the set of
25 //! live variables, which identifies a use of the variable which is not
26 //! dominated by an assignment.
30 //! After each explicit move, the variable must be dead.
32 //! ## Computing last uses
34 //! Any use of the variable where the variable is dead afterwards is a
37 //! # Implementation details
39 //! The actual implementation contains two (nested) walks over the AST.
40 //! The outer walk has the job of building up the ir_maps instance for the
41 //! enclosing function. On the way down the tree, it identifies those AST
42 //! nodes and variable IDs that will be needed for the liveness analysis
43 //! and assigns them contiguous IDs. The liveness ID for an AST node is
44 //! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
45 //! is called a `variable` (another newtype'd `u32`).
47 //! On the way back up the tree, as we are about to exit from a function
48 //! declaration we allocate a `liveness` instance. Now that we know
49 //! precisely how many nodes and variables we need, we can allocate all
50 //! the various arrays that we will need to precisely the right size. We then
51 //! perform the actual propagation on the `liveness` instance.
53 //! This propagation is encoded in the various `propagate_through_*()`
54 //! methods. It effectively does a reverse walk of the AST; whenever we
55 //! reach a loop node, we iterate until a fixed point is reached.
57 //! ## The `RWU` struct
59 //! At each live node `N`, we track three pieces of information for each
60 //! variable `V` (these are encapsulated in the `RWU` struct):
62 //! - `reader`: the `LiveNode` ID of some node which will read the value
63 //! that `V` holds on entry to `N`. Formally: a node `M` such
64 //! that there exists a path `P` from `N` to `M` where `P` does not
65 //! write `V`. If the `reader` is `None`, then the current
66 //! value will never be read (the variable is dead, essentially).
68 //! - `writer`: the `LiveNode` ID of some node which will write the
69 //! variable `V` and which is reachable from `N`. Formally: a node `M`
70 //! such that there exists a path `P` from `N` to `M` and `M` writes
71 //! `V`. If the `writer` is `None`, then there is no writer
72 //! of `V` that follows `N`.
74 //! - `used`: a boolean value indicating whether `V` is *used*. We
75 //! distinguish a *read* from a *use* in that a *use* is some read that
76 //! is not just used to generate a new value. For example, `x += 1` is
77 //! a read but not a use. This is used to generate better warnings.
79 //! ## Special nodes and variables
81 //! We generate various special nodes for various, well, special purposes.
82 //! These are described in the `Liveness` struct.
84 use self::LiveNodeKind
::*;
87 use rustc_ast
::InlineAsmOptions
;
88 use rustc_data_structures
::fx
::FxIndexMap
;
89 use rustc_errors
::Applicability
;
91 use rustc_hir
::def
::*;
92 use rustc_hir
::def_id
::LocalDefId
;
93 use rustc_hir
::intravisit
::{self, NestedVisitorMap, Visitor}
;
94 use rustc_hir
::{Expr, HirId, HirIdMap, HirIdSet}
;
95 use rustc_index
::vec
::IndexVec
;
96 use rustc_middle
::hir
::map
::Map
;
97 use rustc_middle
::ty
::query
::Providers
;
98 use rustc_middle
::ty
::{self, DefIdTree, RootVariableMinCaptureList, TyCtxt}
;
99 use rustc_session
::lint
;
100 use rustc_span
::symbol
::{kw, sym, Symbol}
;
101 use rustc_span
::Span
;
103 use std
::collections
::VecDeque
;
105 use std
::io
::prelude
::*;
111 rustc_index
::newtype_index
! {
112 pub struct Variable
{
113 DEBUG_FORMAT
= "v({})",
117 rustc_index
::newtype_index
! {
118 pub struct LiveNode
{
119 DEBUG_FORMAT
= "ln({})",
123 #[derive(Copy, Clone, PartialEq, Debug)]
132 fn live_node_kind_to_string(lnk
: LiveNodeKind
, tcx
: TyCtxt
<'_
>) -> String
{
133 let sm
= tcx
.sess
.source_map();
135 UpvarNode(s
) => format
!("Upvar node [{}]", sm
.span_to_string(s
)),
136 ExprNode(s
) => format
!("Expr node [{}]", sm
.span_to_string(s
)),
137 VarDefNode(s
) => format
!("Var def node [{}]", sm
.span_to_string(s
)),
138 ClosureNode
=> "Closure node".to_owned(),
139 ExitNode
=> "Exit node".to_owned(),
143 fn check_mod_liveness(tcx
: TyCtxt
<'_
>, module_def_id
: LocalDefId
) {
144 tcx
.hir().visit_item_likes_in_module(module_def_id
, &mut IrMaps
::new(tcx
).as_deep_visitor());
147 pub fn provide(providers
: &mut Providers
) {
148 *providers
= Providers { check_mod_liveness, ..*providers }
;
151 // ______________________________________________________________________
154 // This is the first pass and the one that drives the main
155 // computation. It walks up and down the IR once. On the way down,
156 // we count for each function the number of variables as well as
157 // liveness nodes. A liveness node is basically an expression or
158 // capture clause that does something of interest: either it has
159 // interesting control flow or it uses/defines a local variable.
161 // On the way back up, at each function node we create liveness sets
162 // (we now know precisely how big to make our various vectors and so
163 // forth) and then do the data-flow propagation to compute the set
164 // of live variables at each program point.
166 // Finally, we run back over the IR one last time and, using the
167 // computed liveness, check various safety conditions. For example,
168 // there must be no live nodes at the definition site for a variable
169 // unless it has an initializer. Similarly, each non-mutable local
170 // variable must not be assigned if there is some successor
171 // assignment. And so forth.
178 #[derive(Copy, Clone, Debug)]
185 #[derive(Copy, Clone, Debug)]
187 Param(HirId
, Symbol
),
189 Upvar(HirId
, Symbol
),
192 struct IrMaps
<'tcx
> {
194 live_node_map
: HirIdMap
<LiveNode
>,
195 variable_map
: HirIdMap
<Variable
>,
196 capture_info_map
: HirIdMap
<Rc
<Vec
<CaptureInfo
>>>,
197 var_kinds
: IndexVec
<Variable
, VarKind
>,
198 lnks
: IndexVec
<LiveNode
, LiveNodeKind
>,
202 fn new(tcx
: TyCtxt
<'tcx
>) -> IrMaps
<'tcx
> {
205 live_node_map
: HirIdMap
::default(),
206 variable_map
: HirIdMap
::default(),
207 capture_info_map
: Default
::default(),
208 var_kinds
: IndexVec
::new(),
209 lnks
: IndexVec
::new(),
213 fn add_live_node(&mut self, lnk
: LiveNodeKind
) -> LiveNode
{
214 let ln
= self.lnks
.push(lnk
);
216 debug
!("{:?} is of kind {}", ln
, live_node_kind_to_string(lnk
, self.tcx
));
221 fn add_live_node_for_node(&mut self, hir_id
: HirId
, lnk
: LiveNodeKind
) {
222 let ln
= self.add_live_node(lnk
);
223 self.live_node_map
.insert(hir_id
, ln
);
225 debug
!("{:?} is node {:?}", ln
, hir_id
);
228 fn add_variable(&mut self, vk
: VarKind
) -> Variable
{
229 let v
= self.var_kinds
.push(vk
);
232 Local(LocalInfo { id: node_id, .. }
) | Param(node_id
, _
) | Upvar(node_id
, _
) => {
233 self.variable_map
.insert(node_id
, v
);
237 debug
!("{:?} is {:?}", v
, vk
);
242 fn variable(&self, hir_id
: HirId
, span
: Span
) -> Variable
{
243 match self.variable_map
.get(&hir_id
) {
246 span_bug
!(span
, "no variable registered for id {:?}", hir_id
);
251 fn variable_name(&self, var
: Variable
) -> Symbol
{
252 match self.var_kinds
[var
] {
253 Local(LocalInfo { name, .. }
) | Param(_
, name
) | Upvar(_
, name
) => name
,
257 fn variable_is_shorthand(&self, var
: Variable
) -> bool
{
258 match self.var_kinds
[var
] {
259 Local(LocalInfo { is_shorthand, .. }
) => is_shorthand
,
260 Param(..) | Upvar(..) => false,
264 fn set_captures(&mut self, hir_id
: HirId
, cs
: Vec
<CaptureInfo
>) {
265 self.capture_info_map
.insert(hir_id
, Rc
::new(cs
));
268 fn add_from_pat(&mut self, pat
: &hir
::Pat
<'tcx
>) {
269 // For struct patterns, take note of which fields used shorthand
270 // (`x` rather than `x: x`).
271 let mut shorthand_field_ids
= HirIdSet
::default();
272 let mut pats
= VecDeque
::new();
274 while let Some(pat
) = pats
.pop_front() {
275 use rustc_hir
::PatKind
::*;
277 Binding(.., inner_pat
) => {
278 pats
.extend(inner_pat
.iter());
280 Struct(_
, fields
, _
) => {
281 let ids
= fields
.iter().filter(|f
| f
.is_shorthand
).map(|f
| f
.pat
.hir_id
);
282 shorthand_field_ids
.extend(ids
);
284 Ref(inner_pat
, _
) | Box(inner_pat
) => {
285 pats
.push_back(inner_pat
);
287 TupleStruct(_
, inner_pats
, _
) | Tuple(inner_pats
, _
) | Or(inner_pats
) => {
288 pats
.extend(inner_pats
.iter());
290 Slice(pre_pats
, inner_pat
, post_pats
) => {
291 pats
.extend(pre_pats
.iter());
292 pats
.extend(inner_pat
.iter());
293 pats
.extend(post_pats
.iter());
299 pat
.each_binding(|_
, hir_id
, _
, ident
| {
300 self.add_live_node_for_node(hir_id
, VarDefNode(ident
.span
));
301 self.add_variable(Local(LocalInfo
{
304 is_shorthand
: shorthand_field_ids
.contains(&hir_id
),
310 impl<'tcx
> Visitor
<'tcx
> for IrMaps
<'tcx
> {
311 type Map
= Map
<'tcx
>;
313 fn nested_visit_map(&mut self) -> NestedVisitorMap
<Self::Map
> {
314 NestedVisitorMap
::OnlyBodies(self.tcx
.hir())
317 fn visit_body(&mut self, body
: &'tcx hir
::Body
<'tcx
>) {
318 debug
!("visit_body {:?}", body
.id());
320 // swap in a new set of IR maps for this body
321 let mut maps
= IrMaps
::new(self.tcx
);
322 let hir_id
= maps
.tcx
.hir().body_owner(body
.id());
323 let local_def_id
= maps
.tcx
.hir().local_def_id(hir_id
);
324 let def_id
= local_def_id
.to_def_id();
326 // Don't run unused pass for #[derive()]
327 if let Some(parent
) = self.tcx
.parent(def_id
) {
328 if let DefKind
::Impl
= self.tcx
.def_kind(parent
.expect_local()) {
329 if self.tcx
.has_attr(parent
, sym
::automatically_derived
) {
335 if let Some(captures
) = maps
.tcx
.typeck(local_def_id
).closure_min_captures
.get(&def_id
) {
336 for &var_hir_id
in captures
.keys() {
337 let var_name
= maps
.tcx
.hir().name(var_hir_id
);
338 maps
.add_variable(Upvar(var_hir_id
, var_name
));
342 // gather up the various local variables, significant expressions,
344 intravisit
::walk_body(&mut maps
, body
);
347 let mut lsets
= Liveness
::new(&mut maps
, local_def_id
);
348 let entry_ln
= lsets
.compute(&body
, hir_id
);
349 lsets
.log_liveness(entry_ln
, body
.id().hir_id
);
351 // check for various error conditions
352 lsets
.visit_body(body
);
353 lsets
.warn_about_unused_upvars(entry_ln
);
354 lsets
.warn_about_unused_args(body
, entry_ln
);
357 fn visit_local(&mut self, local
: &'tcx hir
::Local
<'tcx
>) {
358 self.add_from_pat(&local
.pat
);
359 intravisit
::walk_local(self, local
);
362 fn visit_arm(&mut self, arm
: &'tcx hir
::Arm
<'tcx
>) {
363 self.add_from_pat(&arm
.pat
);
364 if let Some(hir
::Guard
::IfLet(ref pat
, _
)) = arm
.guard
{
365 self.add_from_pat(pat
);
367 intravisit
::walk_arm(self, arm
);
370 fn visit_param(&mut self, param
: &'tcx hir
::Param
<'tcx
>) {
371 param
.pat
.each_binding(|_bm
, hir_id
, _x
, ident
| {
372 let var
= match param
.pat
.kind
{
373 rustc_hir
::PatKind
::Struct(_
, fields
, _
) => Local(LocalInfo
{
378 .find(|f
| f
.ident
== ident
)
379 .map_or(false, |f
| f
.is_shorthand
),
381 _
=> Param(hir_id
, ident
.name
),
383 self.add_variable(var
);
385 intravisit
::walk_param(self, param
);
388 fn visit_expr(&mut self, expr
: &'tcx Expr
<'tcx
>) {
390 // live nodes required for uses or definitions of variables:
391 hir
::ExprKind
::Path(hir
::QPath
::Resolved(_
, ref path
)) => {
392 debug
!("expr {}: path that leads to {:?}", expr
.hir_id
, path
.res
);
393 if let Res
::Local(_var_hir_id
) = path
.res
{
394 self.add_live_node_for_node(expr
.hir_id
, ExprNode(expr
.span
));
396 intravisit
::walk_expr(self, expr
);
398 hir
::ExprKind
::Closure(..) => {
399 // Interesting control flow (for loops can contain labeled
400 // breaks or continues)
401 self.add_live_node_for_node(expr
.hir_id
, ExprNode(expr
.span
));
403 // Make a live_node for each captured variable, with the span
404 // being the location that the variable is used. This results
405 // in better error messages than just pointing at the closure
406 // construction site.
407 let mut call_caps
= Vec
::new();
408 let closure_def_id
= self.tcx
.hir().local_def_id(expr
.hir_id
);
409 if let Some(captures
) = self
411 .typeck(closure_def_id
)
412 .closure_min_captures
413 .get(&closure_def_id
.to_def_id())
415 // If closure_min_captures is Some, upvars_mentioned must also be Some
416 let upvars
= self.tcx
.upvars_mentioned(closure_def_id
).unwrap();
417 call_caps
.extend(captures
.keys().map(|var_id
| {
418 let upvar
= upvars
[var_id
];
419 let upvar_ln
= self.add_live_node(UpvarNode(upvar
.span
));
420 CaptureInfo { ln: upvar_ln, var_hid: *var_id }
423 self.set_captures(expr
.hir_id
, call_caps
);
424 intravisit
::walk_expr(self, expr
);
427 // live nodes required for interesting control flow:
428 hir
::ExprKind
::If(..) | hir
::ExprKind
::Match(..) | hir
::ExprKind
::Loop(..) => {
429 self.add_live_node_for_node(expr
.hir_id
, ExprNode(expr
.span
));
430 intravisit
::walk_expr(self, expr
);
432 hir
::ExprKind
::Binary(op
, ..) if op
.node
.is_lazy() => {
433 self.add_live_node_for_node(expr
.hir_id
, ExprNode(expr
.span
));
434 intravisit
::walk_expr(self, expr
);
437 // otherwise, live nodes are not required:
438 hir
::ExprKind
::Index(..)
439 | hir
::ExprKind
::Field(..)
440 | hir
::ExprKind
::Array(..)
441 | hir
::ExprKind
::Call(..)
442 | hir
::ExprKind
::MethodCall(..)
443 | hir
::ExprKind
::Tup(..)
444 | hir
::ExprKind
::Binary(..)
445 | hir
::ExprKind
::AddrOf(..)
446 | hir
::ExprKind
::Cast(..)
447 | hir
::ExprKind
::DropTemps(..)
448 | hir
::ExprKind
::Unary(..)
449 | hir
::ExprKind
::Break(..)
450 | hir
::ExprKind
::Continue(_
)
451 | hir
::ExprKind
::Lit(_
)
452 | hir
::ExprKind
::ConstBlock(..)
453 | hir
::ExprKind
::Ret(..)
454 | hir
::ExprKind
::Block(..)
455 | hir
::ExprKind
::Assign(..)
456 | hir
::ExprKind
::AssignOp(..)
457 | hir
::ExprKind
::Struct(..)
458 | hir
::ExprKind
::Repeat(..)
459 | hir
::ExprKind
::InlineAsm(..)
460 | hir
::ExprKind
::LlvmInlineAsm(..)
461 | hir
::ExprKind
::Box(..)
462 | hir
::ExprKind
::Yield(..)
463 | hir
::ExprKind
::Type(..)
465 | hir
::ExprKind
::Path(hir
::QPath
::TypeRelative(..))
466 | hir
::ExprKind
::Path(hir
::QPath
::LangItem(..)) => {
467 intravisit
::walk_expr(self, expr
);
473 // ______________________________________________________________________
474 // Computing liveness sets
476 // Actually we compute just a bit more than just liveness, but we use
477 // the same basic propagation framework in all cases.
479 const ACC_READ
: u32 = 1;
480 const ACC_WRITE
: u32 = 2;
481 const ACC_USE
: u32 = 4;
483 struct Liveness
<'a
, 'tcx
> {
484 ir
: &'a
mut IrMaps
<'tcx
>,
485 typeck_results
: &'a ty
::TypeckResults
<'tcx
>,
486 param_env
: ty
::ParamEnv
<'tcx
>,
487 upvars
: Option
<&'tcx FxIndexMap
<hir
::HirId
, hir
::Upvar
>>,
488 closure_min_captures
: Option
<&'tcx RootVariableMinCaptureList
<'tcx
>>,
489 successors
: IndexVec
<LiveNode
, Option
<LiveNode
>>,
490 rwu_table
: rwu_table
::RWUTable
,
492 /// A live node representing a point of execution before closure entry &
493 /// after closure exit. Used to calculate liveness of captured variables
494 /// through calls to the same closure. Used for Fn & FnMut closures only.
495 closure_ln
: LiveNode
,
496 /// A live node representing every 'exit' from the function, whether it be
497 /// by explicit return, panic, or other means.
500 // mappings from loop node ID to LiveNode
501 // ("break" label should map to loop node ID,
502 // it probably doesn't now)
503 break_ln
: HirIdMap
<LiveNode
>,
504 cont_ln
: HirIdMap
<LiveNode
>,
507 impl<'a
, 'tcx
> Liveness
<'a
, 'tcx
> {
508 fn new(ir
: &'a
mut IrMaps
<'tcx
>, body_owner
: LocalDefId
) -> Liveness
<'a
, 'tcx
> {
509 let typeck_results
= ir
.tcx
.typeck(body_owner
);
510 let param_env
= ir
.tcx
.param_env(body_owner
);
511 let upvars
= ir
.tcx
.upvars_mentioned(body_owner
);
512 let closure_min_captures
= typeck_results
.closure_min_captures
.get(&body_owner
.to_def_id());
513 let closure_ln
= ir
.add_live_node(ClosureNode
);
514 let exit_ln
= ir
.add_live_node(ExitNode
);
516 let num_live_nodes
= ir
.lnks
.len();
517 let num_vars
= ir
.var_kinds
.len();
524 closure_min_captures
,
525 successors
: IndexVec
::from_elem_n(None
, num_live_nodes
),
526 rwu_table
: rwu_table
::RWUTable
::new(num_live_nodes
, num_vars
),
529 break_ln
: Default
::default(),
530 cont_ln
: Default
::default(),
534 fn live_node(&self, hir_id
: HirId
, span
: Span
) -> LiveNode
{
535 match self.ir
.live_node_map
.get(&hir_id
) {
538 // This must be a mismatch between the ir_map construction
539 // above and the propagation code below; the two sets of
540 // code have to agree about which AST nodes are worth
541 // creating liveness nodes for.
542 span_bug
!(span
, "no live node registered for node {:?}", hir_id
);
547 fn variable(&self, hir_id
: HirId
, span
: Span
) -> Variable
{
548 self.ir
.variable(hir_id
, span
)
551 fn define_bindings_in_pat(&mut self, pat
: &hir
::Pat
<'_
>, mut succ
: LiveNode
) -> LiveNode
{
552 // In an or-pattern, only consider the first pattern; any later patterns
553 // must have the same bindings, and we also consider the first pattern
554 // to be the "authoritative" set of ids.
555 pat
.each_binding_or_first(&mut |_
, hir_id
, pat_sp
, ident
| {
556 let ln
= self.live_node(hir_id
, pat_sp
);
557 let var
= self.variable(hir_id
, ident
.span
);
558 self.init_from_succ(ln
, succ
);
559 self.define(ln
, var
);
565 fn live_on_entry(&self, ln
: LiveNode
, var
: Variable
) -> bool
{
566 self.rwu_table
.get_reader(ln
, var
)
569 // Is this variable live on entry to any of its successor nodes?
570 fn live_on_exit(&self, ln
: LiveNode
, var
: Variable
) -> bool
{
571 let successor
= self.successors
[ln
].unwrap();
572 self.live_on_entry(successor
, var
)
575 fn used_on_entry(&self, ln
: LiveNode
, var
: Variable
) -> bool
{
576 self.rwu_table
.get_used(ln
, var
)
579 fn assigned_on_entry(&self, ln
: LiveNode
, var
: Variable
) -> bool
{
580 self.rwu_table
.get_writer(ln
, var
)
583 fn assigned_on_exit(&self, ln
: LiveNode
, var
: Variable
) -> bool
{
584 let successor
= self.successors
[ln
].unwrap();
585 self.assigned_on_entry(successor
, var
)
588 fn write_vars
<F
>(&self, wr
: &mut dyn Write
, mut test
: F
) -> io
::Result
<()>
590 F
: FnMut(Variable
) -> bool
,
592 for var_idx
in 0..self.ir
.var_kinds
.len() {
593 let var
= Variable
::from(var_idx
);
595 write
!(wr
, " {:?}", var
)?
;
601 #[allow(unused_must_use)]
602 fn ln_str(&self, ln
: LiveNode
) -> String
{
603 let mut wr
= Vec
::new();
605 let wr
= &mut wr
as &mut dyn Write
;
606 write
!(wr
, "[{:?} of kind {:?} reads", ln
, self.ir
.lnks
[ln
]);
607 self.write_vars(wr
, |var
| self.rwu_table
.get_reader(ln
, var
));
608 write
!(wr
, " writes");
609 self.write_vars(wr
, |var
| self.rwu_table
.get_writer(ln
, var
));
611 self.write_vars(wr
, |var
| self.rwu_table
.get_used(ln
, var
));
613 write
!(wr
, " precedes {:?}]", self.successors
[ln
]);
615 String
::from_utf8(wr
).unwrap()
618 fn log_liveness(&self, entry_ln
: LiveNode
, hir_id
: hir
::HirId
) {
619 // hack to skip the loop unless debug! is enabled:
621 "^^ liveness computation results for body {} (entry={:?})",
623 for ln_idx
in 0..self.ir
.lnks
.len() {
624 debug
!("{:?}", self.ln_str(LiveNode
::from(ln_idx
)));
632 fn init_empty(&mut self, ln
: LiveNode
, succ_ln
: LiveNode
) {
633 self.successors
[ln
] = Some(succ_ln
);
635 // It is not necessary to initialize the RWUs here because they are all
636 // empty when created, and the sets only grow during iterations.
639 fn init_from_succ(&mut self, ln
: LiveNode
, succ_ln
: LiveNode
) {
640 // more efficient version of init_empty() / merge_from_succ()
641 self.successors
[ln
] = Some(succ_ln
);
642 self.rwu_table
.copy(ln
, succ_ln
);
643 debug
!("init_from_succ(ln={}, succ={})", self.ln_str(ln
), self.ln_str(succ_ln
));
646 fn merge_from_succ(&mut self, ln
: LiveNode
, succ_ln
: LiveNode
) -> bool
{
651 let changed
= self.rwu_table
.union(ln
, succ_ln
);
652 debug
!("merge_from_succ(ln={:?}, succ={}, changed={})", ln
, self.ln_str(succ_ln
), changed
);
656 // Indicates that a local variable was *defined*; we know that no
657 // uses of the variable can precede the definition (resolve checks
658 // this) so we just clear out all the data.
659 fn define(&mut self, writer
: LiveNode
, var
: Variable
) {
660 let used
= self.rwu_table
.get_used(writer
, var
);
661 self.rwu_table
.set(writer
, var
, rwu_table
::RWU { reader: false, writer: false, used }
);
662 debug
!("{:?} defines {:?}: {}", writer
, var
, self.ln_str(writer
));
665 // Either read, write, or both depending on the acc bitset
666 fn acc(&mut self, ln
: LiveNode
, var
: Variable
, acc
: u32) {
667 debug
!("{:?} accesses[{:x}] {:?}: {}", ln
, acc
, var
, self.ln_str(ln
));
669 let mut rwu
= self.rwu_table
.get(ln
, var
);
671 if (acc
& ACC_WRITE
) != 0 {
676 // Important: if we both read/write, must do read second
677 // or else the write will override.
678 if (acc
& ACC_READ
) != 0 {
682 if (acc
& ACC_USE
) != 0 {
686 self.rwu_table
.set(ln
, var
, rwu
);
689 fn compute(&mut self, body
: &hir
::Body
<'_
>, hir_id
: HirId
) -> LiveNode
{
690 debug
!("compute: for body {:?}", body
.id().hir_id
);
692 // # Liveness of captured variables
694 // When computing the liveness for captured variables we take into
695 // account how variable is captured (ByRef vs ByValue) and what is the
696 // closure kind (Generator / FnOnce vs Fn / FnMut).
698 // Variables captured by reference are assumed to be used on the exit
701 // In FnOnce closures, variables captured by value are known to be dead
702 // on exit since it is impossible to call the closure again.
704 // In Fn / FnMut closures, variables captured by value are live on exit
705 // if they are live on the entry to the closure, since only the closure
706 // itself can access them on subsequent calls.
708 if let Some(closure_min_captures
) = self.closure_min_captures
{
709 // Mark upvars captured by reference as used after closure exits.
710 for (&var_hir_id
, min_capture_list
) in closure_min_captures
{
711 for captured_place
in min_capture_list
{
712 match captured_place
.info
.capture_kind
{
713 ty
::UpvarCapture
::ByRef(_
) => {
714 let var
= self.variable(
716 captured_place
.get_capture_kind_span(self.ir
.tcx
),
718 self.acc(self.exit_ln
, var
, ACC_READ
| ACC_USE
);
720 ty
::UpvarCapture
::ByValue(_
) => {}
726 let succ
= self.propagate_through_expr(&body
.value
, self.exit_ln
);
728 if self.closure_min_captures
.is_none() {
729 // Either not a closure, or closure without any captured variables.
730 // No need to determine liveness of captured variables, since there
735 let ty
= self.typeck_results
.node_type(hir_id
);
737 ty
::Closure(_def_id
, substs
) => match substs
.as_closure().kind() {
738 ty
::ClosureKind
::Fn
=> {}
739 ty
::ClosureKind
::FnMut
=> {}
740 ty
::ClosureKind
::FnOnce
=> return succ
,
742 ty
::Generator(..) => return succ
,
746 "{} has upvars so it should have a closure type: {:?}",
753 // Propagate through calls to the closure.
755 self.init_from_succ(self.closure_ln
, succ
);
756 for param
in body
.params
{
757 param
.pat
.each_binding(|_bm
, hir_id
, _x
, ident
| {
758 let var
= self.variable(hir_id
, ident
.span
);
759 self.define(self.closure_ln
, var
);
763 if !self.merge_from_succ(self.exit_ln
, self.closure_ln
) {
766 assert_eq
!(succ
, self.propagate_through_expr(&body
.value
, self.exit_ln
));
772 fn propagate_through_block(&mut self, blk
: &hir
::Block
<'_
>, succ
: LiveNode
) -> LiveNode
{
773 if blk
.targeted_by_break
{
774 self.break_ln
.insert(blk
.hir_id
, succ
);
776 let succ
= self.propagate_through_opt_expr(blk
.expr
.as_deref(), succ
);
777 blk
.stmts
.iter().rev().fold(succ
, |succ
, stmt
| self.propagate_through_stmt(stmt
, succ
))
780 fn propagate_through_stmt(&mut self, stmt
: &hir
::Stmt
<'_
>, succ
: LiveNode
) -> LiveNode
{
782 hir
::StmtKind
::Local(ref local
) => {
783 // Note: we mark the variable as defined regardless of whether
784 // there is an initializer. Initially I had thought to only mark
785 // the live variable as defined if it was initialized, and then we
786 // could check for uninit variables just by scanning what is live
787 // at the start of the function. But that doesn't work so well for
788 // immutable variables defined in a loop:
789 // loop { let x; x = 5; }
790 // because the "assignment" loops back around and generates an error.
792 // So now we just check that variables defined w/o an
793 // initializer are not live at the point of their
794 // initialization, which is mildly more complex than checking
795 // once at the func header but otherwise equivalent.
797 let succ
= self.propagate_through_opt_expr(local
.init
.as_deref(), succ
);
798 self.define_bindings_in_pat(&local
.pat
, succ
)
800 hir
::StmtKind
::Item(..) => succ
,
801 hir
::StmtKind
::Expr(ref expr
) | hir
::StmtKind
::Semi(ref expr
) => {
802 self.propagate_through_expr(&expr
, succ
)
807 fn propagate_through_exprs(&mut self, exprs
: &[Expr
<'_
>], succ
: LiveNode
) -> LiveNode
{
808 exprs
.iter().rev().fold(succ
, |succ
, expr
| self.propagate_through_expr(&expr
, succ
))
811 fn propagate_through_opt_expr(
813 opt_expr
: Option
<&Expr
<'_
>>,
816 opt_expr
.map_or(succ
, |expr
| self.propagate_through_expr(expr
, succ
))
819 fn propagate_through_expr(&mut self, expr
: &Expr
<'_
>, succ
: LiveNode
) -> LiveNode
{
820 debug
!("propagate_through_expr: {:?}", expr
);
823 // Interesting cases with control flow or which gen/kill
824 hir
::ExprKind
::Path(hir
::QPath
::Resolved(_
, ref path
)) => {
825 self.access_path(expr
.hir_id
, path
, succ
, ACC_READ
| ACC_USE
)
828 hir
::ExprKind
::Field(ref e
, _
) => self.propagate_through_expr(&e
, succ
),
830 hir
::ExprKind
::Closure(..) => {
831 debug
!("{:?} is an ExprKind::Closure", expr
);
833 // the construction of a closure itself is not important,
834 // but we have to consider the closed over variables.
840 .unwrap_or_else(|| span_bug
!(expr
.span
, "no registered caps"));
842 caps
.iter().rev().fold(succ
, |succ
, cap
| {
843 self.init_from_succ(cap
.ln
, succ
);
844 let var
= self.variable(cap
.var_hid
, expr
.span
);
845 self.acc(cap
.ln
, var
, ACC_READ
| ACC_USE
);
850 // Note that labels have been resolved, so we don't need to look
851 // at the label ident
852 hir
::ExprKind
::Loop(ref blk
, ..) => self.propagate_through_loop(expr
, &blk
, succ
),
854 hir
::ExprKind
::If(ref cond
, ref then
, ref else_opt
) => {
869 self.propagate_through_opt_expr(else_opt
.as_ref().map(|e
| &**e
), succ
);
870 let then_ln
= self.propagate_through_expr(&then
, succ
);
871 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
872 self.init_from_succ(ln
, else_ln
);
873 self.merge_from_succ(ln
, then_ln
);
874 self.propagate_through_expr(&cond
, ln
)
877 hir
::ExprKind
::Match(ref e
, arms
, _
) => {
892 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
893 self.init_empty(ln
, succ
);
895 let body_succ
= self.propagate_through_expr(&arm
.body
, succ
);
897 let guard_succ
= arm
.guard
.as_ref().map_or(body_succ
, |g
| match g
{
898 hir
::Guard
::If(e
) => self.propagate_through_expr(e
, body_succ
),
899 hir
::Guard
::IfLet(pat
, e
) => {
900 let let_bind
= self.define_bindings_in_pat(pat
, body_succ
);
901 self.propagate_through_expr(e
, let_bind
)
904 let arm_succ
= self.define_bindings_in_pat(&arm
.pat
, guard_succ
);
905 self.merge_from_succ(ln
, arm_succ
);
907 self.propagate_through_expr(&e
, ln
)
910 hir
::ExprKind
::Ret(ref o_e
) => {
911 // Ignore succ and subst exit_ln.
912 self.propagate_through_opt_expr(o_e
.as_ref().map(|e
| &**e
), self.exit_ln
)
915 hir
::ExprKind
::Break(label
, ref opt_expr
) => {
916 // Find which label this break jumps to
917 let target
= match label
.target_id
{
918 Ok(hir_id
) => self.break_ln
.get(&hir_id
),
919 Err(err
) => span_bug
!(expr
.span
, "loop scope error: {}", err
),
923 // Now that we know the label we're going to,
924 // look it up in the break loop nodes table
927 Some(b
) => self.propagate_through_opt_expr(opt_expr
.as_ref().map(|e
| &**e
), b
),
928 None
=> span_bug
!(expr
.span
, "`break` to unknown label"),
932 hir
::ExprKind
::Continue(label
) => {
933 // Find which label this expr continues to
936 .unwrap_or_else(|err
| span_bug
!(expr
.span
, "loop scope error: {}", err
));
938 // Now that we know the label we're going to,
939 // look it up in the continue loop nodes table
943 .unwrap_or_else(|| span_bug
!(expr
.span
, "continue to unknown label"))
946 hir
::ExprKind
::Assign(ref l
, ref r
, _
) => {
947 // see comment on places in
948 // propagate_through_place_components()
949 let succ
= self.write_place(&l
, succ
, ACC_WRITE
);
950 let succ
= self.propagate_through_place_components(&l
, succ
);
951 self.propagate_through_expr(&r
, succ
)
954 hir
::ExprKind
::AssignOp(_
, ref l
, ref r
) => {
955 // an overloaded assign op is like a method call
956 if self.typeck_results
.is_method_call(expr
) {
957 let succ
= self.propagate_through_expr(&l
, succ
);
958 self.propagate_through_expr(&r
, succ
)
960 // see comment on places in
961 // propagate_through_place_components()
962 let succ
= self.write_place(&l
, succ
, ACC_WRITE
| ACC_READ
);
963 let succ
= self.propagate_through_expr(&r
, succ
);
964 self.propagate_through_place_components(&l
, succ
)
968 // Uninteresting cases: just propagate in rev exec order
969 hir
::ExprKind
::Array(ref exprs
) => self.propagate_through_exprs(exprs
, succ
),
971 hir
::ExprKind
::Struct(_
, ref fields
, ref with_expr
) => {
972 let succ
= self.propagate_through_opt_expr(with_expr
.as_ref().map(|e
| &**e
), succ
);
976 .fold(succ
, |succ
, field
| self.propagate_through_expr(&field
.expr
, succ
))
979 hir
::ExprKind
::Call(ref f
, ref args
) => {
980 let m
= self.ir
.tcx
.parent_module(expr
.hir_id
).to_def_id();
981 let succ
= if self.ir
.tcx
.is_ty_uninhabited_from(
983 self.typeck_results
.expr_ty(expr
),
990 let succ
= self.propagate_through_exprs(args
, succ
);
991 self.propagate_through_expr(&f
, succ
)
994 hir
::ExprKind
::MethodCall(.., ref args
, _
) => {
995 let m
= self.ir
.tcx
.parent_module(expr
.hir_id
).to_def_id();
996 let succ
= if self.ir
.tcx
.is_ty_uninhabited_from(
998 self.typeck_results
.expr_ty(expr
),
1006 self.propagate_through_exprs(args
, succ
)
1009 hir
::ExprKind
::Tup(ref exprs
) => self.propagate_through_exprs(exprs
, succ
),
1011 hir
::ExprKind
::Binary(op
, ref l
, ref r
) if op
.node
.is_lazy() => {
1012 let r_succ
= self.propagate_through_expr(&r
, succ
);
1014 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
1015 self.init_from_succ(ln
, succ
);
1016 self.merge_from_succ(ln
, r_succ
);
1018 self.propagate_through_expr(&l
, ln
)
1021 hir
::ExprKind
::Index(ref l
, ref r
) | hir
::ExprKind
::Binary(_
, ref l
, ref r
) => {
1022 let r_succ
= self.propagate_through_expr(&r
, succ
);
1023 self.propagate_through_expr(&l
, r_succ
)
1026 hir
::ExprKind
::Box(ref e
)
1027 | hir
::ExprKind
::AddrOf(_
, _
, ref e
)
1028 | hir
::ExprKind
::Cast(ref e
, _
)
1029 | hir
::ExprKind
::Type(ref e
, _
)
1030 | hir
::ExprKind
::DropTemps(ref e
)
1031 | hir
::ExprKind
::Unary(_
, ref e
)
1032 | hir
::ExprKind
::Yield(ref e
, _
)
1033 | hir
::ExprKind
::Repeat(ref e
, _
) => self.propagate_through_expr(&e
, succ
),
1035 hir
::ExprKind
::InlineAsm(ref asm
) => {
1036 // Handle non-returning asm
1037 let mut succ
= if asm
.options
.contains(InlineAsmOptions
::NORETURN
) {
1043 // Do a first pass for writing outputs only
1044 for (op
, _op_sp
) in asm
.operands
.iter().rev() {
1046 hir
::InlineAsmOperand
::In { .. }
1047 | hir
::InlineAsmOperand
::Const { .. }
1048 | hir
::InlineAsmOperand
::Sym { .. }
=> {}
1049 hir
::InlineAsmOperand
::Out { expr, .. }
=> {
1050 if let Some(expr
) = expr
{
1051 succ
= self.write_place(expr
, succ
, ACC_WRITE
);
1054 hir
::InlineAsmOperand
::InOut { expr, .. }
=> {
1055 succ
= self.write_place(expr
, succ
, ACC_READ
| ACC_WRITE
| ACC_USE
);
1057 hir
::InlineAsmOperand
::SplitInOut { out_expr, .. }
=> {
1058 if let Some(expr
) = out_expr
{
1059 succ
= self.write_place(expr
, succ
, ACC_WRITE
);
1065 // Then do a second pass for inputs
1066 let mut succ
= succ
;
1067 for (op
, _op_sp
) in asm
.operands
.iter().rev() {
1069 hir
::InlineAsmOperand
::In { expr, .. }
1070 | hir
::InlineAsmOperand
::Sym { expr, .. }
=> {
1071 succ
= self.propagate_through_expr(expr
, succ
)
1073 hir
::InlineAsmOperand
::Out { expr, .. }
=> {
1074 if let Some(expr
) = expr
{
1075 succ
= self.propagate_through_place_components(expr
, succ
);
1078 hir
::InlineAsmOperand
::InOut { expr, .. }
=> {
1079 succ
= self.propagate_through_place_components(expr
, succ
);
1081 hir
::InlineAsmOperand
::SplitInOut { in_expr, out_expr, .. }
=> {
1082 if let Some(expr
) = out_expr
{
1083 succ
= self.propagate_through_place_components(expr
, succ
);
1085 succ
= self.propagate_through_expr(in_expr
, succ
);
1087 hir
::InlineAsmOperand
::Const { .. }
=> {}
1093 hir
::ExprKind
::LlvmInlineAsm(ref asm
) => {
1094 let ia
= &asm
.inner
;
1095 let outputs
= asm
.outputs_exprs
;
1096 let inputs
= asm
.inputs_exprs
;
1097 let succ
= iter
::zip(&ia
.outputs
, outputs
).rev().fold(succ
, |succ
, (o
, output
)| {
1098 // see comment on places
1099 // in propagate_through_place_components()
1101 self.propagate_through_expr(output
, succ
)
1103 let acc
= if o
.is_rw { ACC_WRITE | ACC_READ }
else { ACC_WRITE }
;
1104 let succ
= self.write_place(output
, succ
, acc
);
1105 self.propagate_through_place_components(output
, succ
)
1109 // Inputs are executed first. Propagate last because of rev order
1110 self.propagate_through_exprs(inputs
, succ
)
1113 hir
::ExprKind
::Lit(..)
1114 | hir
::ExprKind
::ConstBlock(..)
1115 | hir
::ExprKind
::Err
1116 | hir
::ExprKind
::Path(hir
::QPath
::TypeRelative(..))
1117 | hir
::ExprKind
::Path(hir
::QPath
::LangItem(..)) => succ
,
1119 // Note that labels have been resolved, so we don't need to look
1120 // at the label ident
1121 hir
::ExprKind
::Block(ref blk
, _
) => self.propagate_through_block(&blk
, succ
),
1125 fn propagate_through_place_components(&mut self, expr
: &Expr
<'_
>, succ
: LiveNode
) -> LiveNode
{
1128 // In general, the full flow graph structure for an
1129 // assignment/move/etc can be handled in one of two ways,
1130 // depending on whether what is being assigned is a "tracked
1131 // value" or not. A tracked value is basically a local
1132 // variable or argument.
1134 // The two kinds of graphs are:
1136 // Tracked place Untracked place
1137 // ----------------------++-----------------------
1141 // (rvalue) || (rvalue)
1144 // (write of place) || (place components)
1149 // ----------------------++-----------------------
1151 // I will cover the two cases in turn:
1155 // A tracked place is a local variable/argument `x`. In
1156 // these cases, the link_node where the write occurs is linked
1157 // to node id of `x`. The `write_place()` routine generates
1158 // the contents of this node. There are no subcomponents to
1161 // # Non-tracked places
1163 // These are places like `x[5]` or `x.f`. In that case, we
1164 // basically ignore the value which is written to but generate
1165 // reads for the components---`x` in these two examples. The
1166 // components reads are generated by
1167 // `propagate_through_place_components()` (this fn).
1171 // It is still possible to observe assignments to non-places;
1172 // these errors are detected in the later pass borrowck. We
1173 // just ignore such cases and treat them as reads.
1176 hir
::ExprKind
::Path(_
) => succ
,
1177 hir
::ExprKind
::Field(ref e
, _
) => self.propagate_through_expr(&e
, succ
),
1178 _
=> self.propagate_through_expr(expr
, succ
),
1182 // see comment on propagate_through_place()
1183 fn write_place(&mut self, expr
: &Expr
<'_
>, succ
: LiveNode
, acc
: u32) -> LiveNode
{
1185 hir
::ExprKind
::Path(hir
::QPath
::Resolved(_
, ref path
)) => {
1186 self.access_path(expr
.hir_id
, path
, succ
, acc
)
1189 // We do not track other places, so just propagate through
1190 // to their subcomponents. Also, it may happen that
1191 // non-places occur here, because those are detected in the
1192 // later pass borrowck.
1205 let ln
= self.live_node(hir_id
, span
);
1207 self.init_from_succ(ln
, succ
);
1208 let var
= self.variable(var_hid
, span
);
1209 self.acc(ln
, var
, acc
);
1217 path
: &hir
::Path
<'_
>,
1222 Res
::Local(hid
) => {
1223 let in_upvars
= self.upvars
.map_or(false, |u
| u
.contains_key(&hid
));
1224 let in_captures
= self.closure_min_captures
.map_or(false, |c
| c
.contains_key(&hid
));
1226 match (in_upvars
, in_captures
) {
1227 (false, _
) | (true, true) => self.access_var(hir_id
, hid
, succ
, acc
, path
.span
),
1229 // This case is possible when with RFC-2229, a wild pattern
1230 // is used within a closure.
1231 // eg: `let _ = x`. The closure doesn't capture x here,
1232 // even though it's mentioned in the closure.
1241 fn propagate_through_loop(
1244 body
: &hir
::Block
<'_
>,
1248 We model control flow like this:
1255 Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
1256 Meanwhile, a `break` expression will have a successor of `succ`.
1260 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
1261 self.init_empty(ln
, succ
);
1262 debug
!("propagate_through_loop: using id for loop body {} {:?}", expr
.hir_id
, body
);
1264 self.break_ln
.insert(expr
.hir_id
, succ
);
1266 self.cont_ln
.insert(expr
.hir_id
, ln
);
1268 let body_ln
= self.propagate_through_block(body
, ln
);
1270 // repeat until fixed point is reached:
1271 while self.merge_from_succ(ln
, body_ln
) {
1272 assert_eq
!(body_ln
, self.propagate_through_block(body
, ln
));
1279 // _______________________________________________________________________
1280 // Checking for error conditions
1282 impl<'a
, 'tcx
> Visitor
<'tcx
> for Liveness
<'a
, 'tcx
> {
1283 type Map
= intravisit
::ErasedMap
<'tcx
>;
1285 fn nested_visit_map(&mut self) -> NestedVisitorMap
<Self::Map
> {
1286 NestedVisitorMap
::None
1289 fn visit_local(&mut self, local
: &'tcx hir
::Local
<'tcx
>) {
1290 self.check_unused_vars_in_pat(&local
.pat
, None
, |spans
, hir_id
, ln
, var
| {
1291 if local
.init
.is_some() {
1292 self.warn_about_dead_assign(spans
, hir_id
, ln
, var
);
1296 intravisit
::walk_local(self, local
);
1299 fn visit_expr(&mut self, ex
: &'tcx Expr
<'tcx
>) {
1300 check_expr(self, ex
);
1303 fn visit_arm(&mut self, arm
: &'tcx hir
::Arm
<'tcx
>) {
1304 self.check_unused_vars_in_pat(&arm
.pat
, None
, |_
, _
, _
, _
| {}
);
1305 intravisit
::walk_arm(self, arm
);
1309 fn check_expr
<'tcx
>(this
: &mut Liveness
<'_
, 'tcx
>, expr
: &'tcx Expr
<'tcx
>) {
1311 hir
::ExprKind
::Assign(ref l
, ..) => {
1312 this
.check_place(&l
);
1315 hir
::ExprKind
::AssignOp(_
, ref l
, _
) => {
1316 if !this
.typeck_results
.is_method_call(expr
) {
1317 this
.check_place(&l
);
1321 hir
::ExprKind
::InlineAsm(ref asm
) => {
1322 for (op
, _op_sp
) in asm
.operands
{
1324 hir
::InlineAsmOperand
::Out { expr, .. }
=> {
1325 if let Some(expr
) = expr
{
1326 this
.check_place(expr
);
1329 hir
::InlineAsmOperand
::InOut { expr, .. }
=> {
1330 this
.check_place(expr
);
1332 hir
::InlineAsmOperand
::SplitInOut { out_expr, .. }
=> {
1333 if let Some(out_expr
) = out_expr
{
1334 this
.check_place(out_expr
);
1342 hir
::ExprKind
::LlvmInlineAsm(ref asm
) => {
1343 for input
in asm
.inputs_exprs
{
1344 this
.visit_expr(input
);
1347 // Output operands must be places
1348 for (o
, output
) in iter
::zip(&asm
.inner
.outputs
, asm
.outputs_exprs
) {
1350 this
.check_place(output
);
1352 this
.visit_expr(output
);
1356 // no correctness conditions related to liveness
1357 hir
::ExprKind
::Call(..)
1358 | hir
::ExprKind
::MethodCall(..)
1359 | hir
::ExprKind
::Match(..)
1360 | hir
::ExprKind
::Loop(..)
1361 | hir
::ExprKind
::Index(..)
1362 | hir
::ExprKind
::Field(..)
1363 | hir
::ExprKind
::Array(..)
1364 | hir
::ExprKind
::Tup(..)
1365 | hir
::ExprKind
::Binary(..)
1366 | hir
::ExprKind
::Cast(..)
1367 | hir
::ExprKind
::If(..)
1368 | hir
::ExprKind
::DropTemps(..)
1369 | hir
::ExprKind
::Unary(..)
1370 | hir
::ExprKind
::Ret(..)
1371 | hir
::ExprKind
::Break(..)
1372 | hir
::ExprKind
::Continue(..)
1373 | hir
::ExprKind
::Lit(_
)
1374 | hir
::ExprKind
::ConstBlock(..)
1375 | hir
::ExprKind
::Block(..)
1376 | hir
::ExprKind
::AddrOf(..)
1377 | hir
::ExprKind
::Struct(..)
1378 | hir
::ExprKind
::Repeat(..)
1379 | hir
::ExprKind
::Closure(..)
1380 | hir
::ExprKind
::Path(_
)
1381 | hir
::ExprKind
::Yield(..)
1382 | hir
::ExprKind
::Box(..)
1383 | hir
::ExprKind
::Type(..)
1384 | hir
::ExprKind
::Err
=> {}
1387 intravisit
::walk_expr(this
, expr
);
1390 impl<'tcx
> Liveness
<'_
, 'tcx
> {
1391 fn check_place(&mut self, expr
: &'tcx Expr
<'tcx
>) {
1393 hir
::ExprKind
::Path(hir
::QPath
::Resolved(_
, ref path
)) => {
1394 if let Res
::Local(var_hid
) = path
.res
{
1395 // Assignment to an immutable variable or argument: only legal
1396 // if there is no later assignment. If this local is actually
1397 // mutable, then check for a reassignment to flag the mutability
1399 let ln
= self.live_node(expr
.hir_id
, expr
.span
);
1400 let var
= self.variable(var_hid
, expr
.span
);
1401 self.warn_about_dead_assign(vec
![expr
.span
], expr
.hir_id
, ln
, var
);
1405 // For other kinds of places, no checks are required,
1406 // and any embedded expressions are actually rvalues
1407 intravisit
::walk_expr(self, expr
);
1412 fn should_warn(&self, var
: Variable
) -> Option
<String
> {
1413 let name
= self.ir
.variable_name(var
);
1414 if name
== kw
::Empty
{
1417 let name
: &str = &name
.as_str();
1418 if name
.as_bytes()[0] == b'_'
{
1421 Some(name
.to_owned())
1424 fn warn_about_unused_upvars(&self, entry_ln
: LiveNode
) {
1425 let closure_min_captures
= match self.closure_min_captures
{
1427 Some(closure_min_captures
) => closure_min_captures
,
1430 // If closure_min_captures is Some(), upvars must be Some() too.
1431 for (&var_hir_id
, min_capture_list
) in closure_min_captures
{
1432 for captured_place
in min_capture_list
{
1433 match captured_place
.info
.capture_kind
{
1434 ty
::UpvarCapture
::ByValue(_
) => {}
1435 ty
::UpvarCapture
::ByRef(..) => continue,
1437 let span
= captured_place
.get_capture_kind_span(self.ir
.tcx
);
1438 let var
= self.variable(var_hir_id
, span
);
1439 if self.used_on_entry(entry_ln
, var
) {
1440 if !self.live_on_entry(entry_ln
, var
) {
1441 if let Some(name
) = self.should_warn(var
) {
1442 self.ir
.tcx
.struct_span_lint_hir(
1443 lint
::builtin
::UNUSED_ASSIGNMENTS
,
1447 lint
.build(&format
!(
1448 "value captured by `{}` is never read",
1451 .help("did you mean to capture by reference instead?")
1458 if let Some(name
) = self.should_warn(var
) {
1459 self.ir
.tcx
.struct_span_lint_hir(
1460 lint
::builtin
::UNUSED_VARIABLES
,
1464 lint
.build(&format
!("unused variable: `{}`", name
))
1465 .help("did you mean to capture by reference instead?")
1475 fn warn_about_unused_args(&self, body
: &hir
::Body
<'_
>, entry_ln
: LiveNode
) {
1476 for p
in body
.params
{
1477 self.check_unused_vars_in_pat(&p
.pat
, Some(entry_ln
), |spans
, hir_id
, ln
, var
| {
1478 if !self.live_on_entry(ln
, var
) {
1479 self.report_unused_assign(hir_id
, spans
, var
, |name
| {
1480 format
!("value passed to `{}` is never read", name
)
1487 fn check_unused_vars_in_pat(
1490 entry_ln
: Option
<LiveNode
>,
1491 on_used_on_entry
: impl Fn(Vec
<Span
>, HirId
, LiveNode
, Variable
),
1493 // In an or-pattern, only consider the variable; any later patterns must have the same
1494 // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
1495 // However, we should take the ids and spans of variables with the same name from the later
1496 // patterns so the suggestions to prefix with underscores will apply to those too.
1497 let mut vars
: FxIndexMap
<Symbol
, (LiveNode
, Variable
, Vec
<(HirId
, Span
, Span
)>)> =
1500 pat
.each_binding(|_
, hir_id
, pat_sp
, ident
| {
1501 let ln
= entry_ln
.unwrap_or_else(|| self.live_node(hir_id
, pat_sp
));
1502 let var
= self.variable(hir_id
, ident
.span
);
1503 let id_and_sp
= (hir_id
, pat_sp
, ident
.span
);
1504 vars
.entry(self.ir
.variable_name(var
))
1505 .and_modify(|(.., hir_ids_and_spans
)| hir_ids_and_spans
.push(id_and_sp
))
1506 .or_insert_with(|| (ln
, var
, vec
![id_and_sp
]));
1509 for (_
, (ln
, var
, hir_ids_and_spans
)) in vars
{
1510 if self.used_on_entry(ln
, var
) {
1511 let id
= hir_ids_and_spans
[0].0;
1513 hir_ids_and_spans
.into_iter().map(|(_
, _
, ident_span
)| ident_span
).collect();
1514 on_used_on_entry(spans
, id
, ln
, var
);
1516 self.report_unused(hir_ids_and_spans
, ln
, var
);
1523 hir_ids_and_spans
: Vec
<(HirId
, Span
, Span
)>,
1527 let first_hir_id
= hir_ids_and_spans
[0].0;
1529 if let Some(name
) = self.should_warn(var
).filter(|name
| name
!= "self") {
1530 // annoying: for parameters in funcs like `fn(x: i32)
1531 // {ret}`, there is only one node, so asking about
1532 // assigned_on_exit() is not meaningful.
1534 if ln
== self.exit_ln { false }
else { self.assigned_on_exit(ln, var) }
;
1537 self.ir
.tcx
.struct_span_lint_hir(
1538 lint
::builtin
::UNUSED_VARIABLES
,
1542 .map(|(_
, _
, ident_span
)| ident_span
)
1543 .collect
::<Vec
<_
>>(),
1545 lint
.build(&format
!("variable `{}` is assigned to, but never used", name
))
1546 .note(&format
!("consider using `_{}` instead", name
))
1551 let (shorthands
, non_shorthands
): (Vec
<_
>, Vec
<_
>) =
1552 hir_ids_and_spans
.iter().copied().partition(|(hir_id
, _
, ident_span
)| {
1553 let var
= self.variable(*hir_id
, *ident_span
);
1554 self.ir
.variable_is_shorthand(var
)
1557 // If we have both shorthand and non-shorthand, prefer the "try ignoring
1558 // the field" message, and suggest `_` for the non-shorthands. If we only
1559 // have non-shorthand, then prefix with an underscore instead.
1560 if !shorthands
.is_empty() {
1561 let shorthands
= shorthands
1563 .map(|(_
, pat_span
, _
)| (pat_span
, format
!("{}: _", name
)))
1567 .map(|(_
, pat_span
, _
)| (pat_span
, "_".to_string())),
1569 .collect
::<Vec
<_
>>();
1571 self.ir
.tcx
.struct_span_lint_hir(
1572 lint
::builtin
::UNUSED_VARIABLES
,
1576 .map(|(_
, pat_span
, _
)| *pat_span
)
1577 .collect
::<Vec
<_
>>(),
1579 let mut err
= lint
.build(&format
!("unused variable: `{}`", name
));
1580 err
.multipart_suggestion(
1581 "try ignoring the field",
1583 Applicability
::MachineApplicable
,
1589 let non_shorthands
= non_shorthands
1591 .map(|(_
, _
, ident_span
)| (ident_span
, format
!("_{}", name
)))
1592 .collect
::<Vec
<_
>>();
1594 self.ir
.tcx
.struct_span_lint_hir(
1595 lint
::builtin
::UNUSED_VARIABLES
,
1599 .map(|(_
, _
, ident_span
)| *ident_span
)
1600 .collect
::<Vec
<_
>>(),
1602 let mut err
= lint
.build(&format
!("unused variable: `{}`", name
));
1603 err
.multipart_suggestion(
1604 "if this is intentional, prefix it with an underscore",
1606 Applicability
::MachineApplicable
,
1616 fn warn_about_dead_assign(&self, spans
: Vec
<Span
>, hir_id
: HirId
, ln
: LiveNode
, var
: Variable
) {
1617 if !self.live_on_exit(ln
, var
) {
1618 self.report_unused_assign(hir_id
, spans
, var
, |name
| {
1619 format
!("value assigned to `{}` is never read", name
)
1624 fn report_unused_assign(
1629 message
: impl Fn(&str) -> String
,
1631 if let Some(name
) = self.should_warn(var
) {
1632 self.ir
.tcx
.struct_span_lint_hir(
1633 lint
::builtin
::UNUSED_ASSIGNMENTS
,
1637 lint
.build(&message(&name
))
1638 .help("maybe it is overwritten before being read?")