1 //! A solver for dataflow problems.
3 use std
::ffi
::OsString
;
5 use std
::path
::PathBuf
;
7 use rustc
::mir
::{self, traversal, BasicBlock, Location}
;
8 use rustc
::ty
::{self, TyCtxt}
;
10 use rustc_data_structures
::work_queue
::WorkQueue
;
11 use rustc_hir
::def_id
::DefId
;
12 use rustc_index
::bit_set
::BitSet
;
13 use rustc_index
::vec
::IndexVec
;
14 use rustc_span
::symbol
::{sym, Symbol}
;
17 use super::{Analysis, GenKillAnalysis, GenKillSet, Results}
;
19 /// A solver for dataflow problems.
20 pub struct Engine
<'a
, 'tcx
, A
>
24 bits_per_block
: usize,
26 body
: &'a mir
::Body
<'tcx
>,
28 dead_unwinds
: Option
<&'a BitSet
<BasicBlock
>>,
29 entry_sets
: IndexVec
<BasicBlock
, BitSet
<A
::Idx
>>,
32 /// Cached, cumulative transfer functions for each block.
33 trans_for_block
: Option
<IndexVec
<BasicBlock
, GenKillSet
<A
::Idx
>>>,
36 impl<A
> Engine
<'a
, 'tcx
, A
>
38 A
: GenKillAnalysis
<'tcx
>,
40 /// Creates a new `Engine` to solve a gen-kill dataflow problem.
43 body
: &'a mir
::Body
<'tcx
>,
47 // If there are no back-edges in the control-flow graph, we only ever need to apply the
48 // transfer function for each block exactly once (assuming that we process blocks in RPO).
50 // In this case, there's no need to compute the block transfer functions ahead of time.
51 if !body
.is_cfg_cyclic() {
52 return Self::new(tcx
, body
, def_id
, analysis
, None
);
55 // Otherwise, compute and store the cumulative transfer function for each block.
57 let bits_per_block
= analysis
.bits_per_block(body
);
58 let mut trans_for_block
=
59 IndexVec
::from_elem(GenKillSet
::identity(bits_per_block
), body
.basic_blocks());
61 for (block
, block_data
) in body
.basic_blocks().iter_enumerated() {
62 let trans
= &mut trans_for_block
[block
];
64 for (i
, statement
) in block_data
.statements
.iter().enumerate() {
65 let loc
= Location { block, statement_index: i }
;
66 analysis
.before_statement_effect(trans
, statement
, loc
);
67 analysis
.statement_effect(trans
, statement
, loc
);
70 let terminator
= block_data
.terminator();
71 let loc
= Location { block, statement_index: block_data.statements.len() }
;
72 analysis
.before_terminator_effect(trans
, terminator
, loc
);
73 analysis
.terminator_effect(trans
, terminator
, loc
);
76 Self::new(tcx
, body
, def_id
, analysis
, Some(trans_for_block
))
80 impl<A
> Engine
<'a
, 'tcx
, A
>
84 /// Creates a new `Engine` to solve a dataflow problem with an arbitrary transfer
87 /// Gen-kill problems should use `new_gen_kill`, which will coalesce transfer functions for
88 /// better performance.
91 body
: &'a mir
::Body
<'tcx
>,
95 Self::new(tcx
, body
, def_id
, analysis
, None
)
100 body
: &'a mir
::Body
<'tcx
>,
103 trans_for_block
: Option
<IndexVec
<BasicBlock
, GenKillSet
<A
::Idx
>>>,
105 let bits_per_block
= analysis
.bits_per_block(body
);
107 let bottom_value_set
= if A
::BOTTOM_VALUE
{
108 BitSet
::new_filled(bits_per_block
)
110 BitSet
::new_empty(bits_per_block
)
113 let mut entry_sets
= IndexVec
::from_elem(bottom_value_set
, body
.basic_blocks());
114 analysis
.initialize_start_block(body
, &mut entry_sets
[mir
::START_BLOCK
]);
128 /// Signals that we do not want dataflow state to propagate across unwind edges for these
131 /// You must take care that `dead_unwinds` does not contain a `BasicBlock` that *can* actually
132 /// unwind during execution. Otherwise, your dataflow results will not be correct.
133 pub fn dead_unwinds(mut self, dead_unwinds
: &'a BitSet
<BasicBlock
>) -> Self {
134 self.dead_unwinds
= Some(dead_unwinds
);
138 /// Computes the fixpoint for this dataflow problem and returns it.
139 pub fn iterate_to_fixpoint(mut self) -> Results
<'tcx
, A
> {
140 let mut temp_state
= BitSet
::new_empty(self.bits_per_block
);
142 let mut dirty_queue
: WorkQueue
<BasicBlock
> =
143 WorkQueue
::with_none(self.body
.basic_blocks().len());
145 for (bb
, _
) in traversal
::reverse_postorder(self.body
) {
146 dirty_queue
.insert(bb
);
149 // Add blocks that are not reachable from START_BLOCK to the work queue. These blocks will
150 // be processed after the ones added above.
151 for bb
in self.body
.basic_blocks().indices() {
152 dirty_queue
.insert(bb
);
155 while let Some(bb
) = dirty_queue
.pop() {
156 let bb_data
= &self.body
[bb
];
157 let on_entry
= &self.entry_sets
[bb
];
159 temp_state
.overwrite(on_entry
);
160 self.apply_whole_block_effect(&mut temp_state
, bb
, bb_data
);
162 self.propagate_bits_into_graph_successors_of(
169 let Engine { tcx, body, def_id, trans_for_block, entry_sets, analysis, .. }
= self;
170 let results
= Results { analysis, entry_sets }
;
172 let res
= write_graphviz_results(tcx
, def_id
, body
, &results
, trans_for_block
);
173 if let Err(e
) = res
{
174 warn
!("Failed to write graphviz dataflow results: {}", e
);
180 /// Applies the cumulative effect of an entire block, excluding the call return effect if one
182 fn apply_whole_block_effect(
184 state
: &mut BitSet
<A
::Idx
>,
186 block_data
: &mir
::BasicBlockData
<'tcx
>,
188 // Use the cached block transfer function if available.
189 if let Some(trans_for_block
) = &self.trans_for_block
{
190 trans_for_block
[block
].apply(state
);
194 // Otherwise apply effects one-by-one.
196 for (statement_index
, statement
) in block_data
.statements
.iter().enumerate() {
197 let location
= Location { block, statement_index }
;
198 self.analysis
.apply_before_statement_effect(state
, statement
, location
);
199 self.analysis
.apply_statement_effect(state
, statement
, location
);
202 let terminator
= block_data
.terminator();
203 let location
= Location { block, statement_index: block_data.statements.len() }
;
204 self.analysis
.apply_before_terminator_effect(state
, terminator
, location
);
205 self.analysis
.apply_terminator_effect(state
, terminator
, location
);
208 fn propagate_bits_into_graph_successors_of(
210 in_out
: &mut BitSet
<A
::Idx
>,
211 (bb
, bb_data
): (BasicBlock
, &'a mir
::BasicBlockData
<'tcx
>),
212 dirty_list
: &mut WorkQueue
<BasicBlock
>,
214 use mir
::TerminatorKind
::*;
216 match bb_data
.terminator().kind
{
217 Return
| Resume
| Abort
| GeneratorDrop
| Unreachable
=> {}
220 | Assert { target, cleanup: None, .. }
221 | Yield { resume: target, drop: None, .. }
222 | Drop { target, location: _, unwind: None }
223 | DropAndReplace { target, value: _, location: _, unwind: None }
=> {
224 self.propagate_bits_into_entry_set_for(in_out
, target
, dirty_list
)
227 Yield { resume: target, drop: Some(drop), .. }
=> {
228 self.propagate_bits_into_entry_set_for(in_out
, target
, dirty_list
);
229 self.propagate_bits_into_entry_set_for(in_out
, drop
, dirty_list
);
232 Assert { target, cleanup: Some(unwind), .. }
233 | Drop { target, location: _, unwind: Some(unwind) }
234 | DropAndReplace { target, value: _, location: _, unwind: Some(unwind) }
=> {
235 self.propagate_bits_into_entry_set_for(in_out
, target
, dirty_list
);
236 if self.dead_unwinds
.map_or(true, |bbs
| !bbs
.contains(bb
)) {
237 self.propagate_bits_into_entry_set_for(in_out
, unwind
, dirty_list
);
241 SwitchInt { ref targets, ref values, ref discr, .. }
=> {
242 let Engine { tcx, body, .. }
= *self;
245 .and_then(|discr
| switch_on_enum_discriminant(tcx
, body
, bb_data
, discr
));
247 // If this is a switch on an enum discriminant, a custom effect may be applied
248 // along each outgoing edge.
249 Some((enum_place
, enum_def
)) => {
250 self.propagate_bits_into_enum_discriminant_switch_successors(
251 in_out
, bb
, enum_def
, enum_place
, dirty_list
, &*values
, &*targets
,
255 // Otherwise, it's just a normal `SwitchInt`, and every successor sees the same
258 for target
in targets
.iter().copied() {
259 self.propagate_bits_into_entry_set_for(&in_out
, target
, dirty_list
);
265 Call { cleanup, ref destination, ref func, ref args, .. }
=> {
266 if let Some(unwind
) = cleanup
{
267 if self.dead_unwinds
.map_or(true, |bbs
| !bbs
.contains(bb
)) {
268 self.propagate_bits_into_entry_set_for(in_out
, unwind
, dirty_list
);
272 if let Some((ref dest_place
, dest_bb
)) = *destination
{
273 // N.B.: This must be done *last*, otherwise the unwind path will see the call
275 self.analysis
.apply_call_return_effect(in_out
, bb
, func
, args
, dest_place
);
276 self.propagate_bits_into_entry_set_for(in_out
, dest_bb
, dirty_list
);
280 FalseEdges { real_target, imaginary_target }
=> {
281 self.propagate_bits_into_entry_set_for(in_out
, real_target
, dirty_list
);
282 self.propagate_bits_into_entry_set_for(in_out
, imaginary_target
, dirty_list
);
285 FalseUnwind { real_target, unwind }
=> {
286 self.propagate_bits_into_entry_set_for(in_out
, real_target
, dirty_list
);
287 if let Some(unwind
) = unwind
{
288 if self.dead_unwinds
.map_or(true, |bbs
| !bbs
.contains(bb
)) {
289 self.propagate_bits_into_entry_set_for(in_out
, unwind
, dirty_list
);
296 fn propagate_bits_into_entry_set_for(
298 in_out
: &BitSet
<A
::Idx
>,
300 dirty_queue
: &mut WorkQueue
<BasicBlock
>,
302 let entry_set
= &mut self.entry_sets
[bb
];
303 let set_changed
= self.analysis
.join(entry_set
, &in_out
);
305 dirty_queue
.insert(bb
);
309 fn propagate_bits_into_enum_discriminant_switch_successors(
311 in_out
: &mut BitSet
<A
::Idx
>,
313 enum_def
: &'tcx ty
::AdtDef
,
314 enum_place
: &mir
::Place
<'tcx
>,
315 dirty_list
: &mut WorkQueue
<BasicBlock
>,
317 targets
: &[BasicBlock
],
319 // MIR building adds discriminants to the `values` array in the same order as they
320 // are yielded by `AdtDef::discriminants`. We rely on this to match each
321 // discriminant in `values` to its corresponding variant in linear time.
322 let mut tmp
= BitSet
::new_empty(in_out
.domain_size());
323 let mut discriminants
= enum_def
.discriminants(self.tcx
);
324 for (value
, target
) in values
.iter().zip(targets
.iter().copied()) {
325 let (variant_idx
, _
) = discriminants
.find(|&(_
, discr
)| discr
.val
== *value
).expect(
326 "Order of `AdtDef::discriminants` differed from that of `SwitchInt::values`",
329 tmp
.overwrite(in_out
);
330 self.analysis
.apply_discriminant_switch_effect(
337 self.propagate_bits_into_entry_set_for(&tmp
, target
, dirty_list
);
342 // Propagate dataflow state along the "otherwise" edge.
343 let otherwise
= targets
.last().copied().unwrap();
344 self.propagate_bits_into_entry_set_for(&in_out
, otherwise
, dirty_list
);
348 /// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
349 /// an enum discriminant.
351 /// We expect such blocks to have a call to `discriminant` as their last statement like so:
352 /// _42 = discriminant(_1)
353 /// SwitchInt(_42, ..)
355 /// If the basic block matches this pattern, this function returns the place corresponding to the
356 /// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
357 fn switch_on_enum_discriminant(
359 body
: &'mir mir
::Body
<'tcx
>,
360 block
: &'mir mir
::BasicBlockData
<'tcx
>,
361 switch_on
: &mir
::Place
<'tcx
>,
362 ) -> Option
<(&'mir mir
::Place
<'tcx
>, &'tcx ty
::AdtDef
)> {
363 match block
.statements
.last().map(|stmt
| &stmt
.kind
) {
364 Some(mir
::StatementKind
::Assign(box (lhs
, mir
::Rvalue
::Discriminant(discriminated
))))
365 if lhs
== switch_on
=>
367 match &discriminated
.ty(body
, tcx
).ty
.kind
{
368 ty
::Adt(def
, _
) => Some((discriminated
, def
)),
370 // `Rvalue::Discriminant` is also used to get the active yield point for a
371 // generator, but we do not need edge-specific effects in that case. This may
372 // change in the future.
373 ty
::Generator(..) => None
,
375 t
=> bug
!("`discriminant` called on unexpected type {:?}", t
),
385 /// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
386 /// `rustc_mir` attributes.
387 fn write_graphviz_results
<A
>(
390 body
: &mir
::Body
<'tcx
>,
391 results
: &Results
<'tcx
, A
>,
392 block_transfer_functions
: Option
<IndexVec
<BasicBlock
, GenKillSet
<A
::Idx
>>>,
393 ) -> std
::io
::Result
<()>
397 let attrs
= match RustcMirAttrs
::parse(tcx
, def_id
) {
400 // Invalid `rustc_mir` attrs will be reported using `span_err`.
401 Err(()) => return Ok(()),
404 let path
= match attrs
.output_path(A
::NAME
) {
406 None
=> return Ok(()),
409 let bits_per_block
= results
.analysis
.bits_per_block(body
);
411 let mut formatter
: Box
<dyn graphviz
::StateFormatter
<'tcx
, _
>> = match attrs
.formatter
{
412 Some(sym
::two_phase
) => Box
::new(graphviz
::TwoPhaseDiff
::new(bits_per_block
)),
413 Some(sym
::gen_kill
) => {
414 if let Some(trans_for_block
) = block_transfer_functions
{
415 Box
::new(graphviz
::BlockTransferFunc
::new(body
, trans_for_block
))
417 Box
::new(graphviz
::SimpleDiff
::new(bits_per_block
))
421 // Default to the `SimpleDiff` output style.
422 _
=> Box
::new(graphviz
::SimpleDiff
::new(bits_per_block
)),
425 debug
!("printing dataflow results for {:?} to {}", def_id
, path
.display());
426 let mut buf
= Vec
::new();
428 let graphviz
= graphviz
::Formatter
::new(body
, def_id
, results
, &mut *formatter
);
429 dot
::render_opts(&graphviz
, &mut buf
, &[dot
::RenderOption
::Monospace
])?
;
430 fs
::write(&path
, buf
)?
;
435 struct RustcMirAttrs
{
436 basename_and_suffix
: Option
<PathBuf
>,
437 formatter
: Option
<Symbol
>,
441 fn parse(tcx
: TyCtxt
<'tcx
>, def_id
: DefId
) -> Result
<Self, ()> {
442 let attrs
= tcx
.get_attrs(def_id
);
444 let mut result
= Ok(());
445 let mut ret
= RustcMirAttrs
::default();
447 let rustc_mir_attrs
= attrs
449 .filter(|attr
| attr
.check_name(sym
::rustc_mir
))
450 .flat_map(|attr
| attr
.meta_item_list().into_iter().flat_map(|v
| v
.into_iter()));
452 for attr
in rustc_mir_attrs
{
453 let attr_result
= if attr
.check_name(sym
::borrowck_graphviz_postflow
) {
454 Self::set_field(&mut ret
.basename_and_suffix
, tcx
, &attr
, |s
| {
455 let path
= PathBuf
::from(s
.to_string());
456 match path
.file_name() {
459 tcx
.sess
.span_err(attr
.span(), "path must end in a filename");
464 } else if attr
.check_name(sym
::borrowck_graphviz_format
) {
465 Self::set_field(&mut ret
.formatter
, tcx
, &attr
, |s
| match s
{
466 sym
::gen_kill
| sym
::two_phase
=> Ok(s
),
468 tcx
.sess
.span_err(attr
.span(), "unknown formatter");
476 result
= result
.and(attr_result
);
483 field
: &mut Option
<T
>,
485 attr
: &ast
::NestedMetaItem
,
486 mapper
: impl FnOnce(Symbol
) -> Result
<T
, ()>,
487 ) -> Result
<(), ()> {
490 .span_err(attr
.span(), &format
!("duplicate values for `{}`", attr
.name_or_empty()));
495 if let Some(s
) = attr
.value_str() {
496 *field
= Some(mapper(s
)?
);
500 .span_err(attr
.span(), &format
!("`{}` requires an argument", attr
.name_or_empty()));
505 /// Returns the path where dataflow results should be written, or `None`
506 /// `borrowck_graphviz_postflow` was not specified.
508 /// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
510 /// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
511 fn output_path(&self, analysis_name
: &str) -> Option
<PathBuf
> {
512 let mut ret
= self.basename_and_suffix
.as_ref().cloned()?
;
513 let suffix
= ret
.file_name().unwrap(); // Checked when parsing attrs
515 let mut file_name
: OsString
= analysis_name
.into();
517 file_name
.push(suffix
);
518 ret
.set_file_name(file_name
);