1 //! A solver for dataflow problems.
3 use std
::ffi
::OsString
;
5 use std
::path
::PathBuf
;
8 use rustc_data_structures
::work_queue
::WorkQueue
;
9 use rustc_hir
::def_id
::DefId
;
10 use rustc_index
::bit_set
::BitSet
;
11 use rustc_index
::vec
::IndexVec
;
12 use rustc_middle
::mir
::{self, traversal, BasicBlock, Location}
;
13 use rustc_middle
::ty
::{self, TyCtxt}
;
14 use rustc_span
::symbol
::{sym, Symbol}
;
17 use super::{Analysis, GenKillAnalysis, GenKillSet, Results}
;
18 use crate::util
::pretty
::dump_enabled
;
20 /// A solver for dataflow problems.
21 pub struct Engine
<'a
, 'tcx
, A
>
25 bits_per_block
: usize,
27 body
: &'a mir
::Body
<'tcx
>,
29 dead_unwinds
: Option
<&'a BitSet
<BasicBlock
>>,
30 entry_sets
: IndexVec
<BasicBlock
, BitSet
<A
::Idx
>>,
33 /// Cached, cumulative transfer functions for each block.
34 trans_for_block
: Option
<IndexVec
<BasicBlock
, GenKillSet
<A
::Idx
>>>,
37 impl<A
> Engine
<'a
, 'tcx
, A
>
39 A
: GenKillAnalysis
<'tcx
>,
41 /// Creates a new `Engine` to solve a gen-kill dataflow problem.
44 body
: &'a mir
::Body
<'tcx
>,
48 // If there are no back-edges in the control-flow graph, we only ever need to apply the
49 // transfer function for each block exactly once (assuming that we process blocks in RPO).
51 // In this case, there's no need to compute the block transfer functions ahead of time.
52 if !body
.is_cfg_cyclic() {
53 return Self::new(tcx
, body
, def_id
, analysis
, None
);
56 // Otherwise, compute and store the cumulative transfer function for each block.
58 let bits_per_block
= analysis
.bits_per_block(body
);
59 let mut trans_for_block
=
60 IndexVec
::from_elem(GenKillSet
::identity(bits_per_block
), body
.basic_blocks());
62 for (block
, block_data
) in body
.basic_blocks().iter_enumerated() {
63 let trans
= &mut trans_for_block
[block
];
65 for (i
, statement
) in block_data
.statements
.iter().enumerate() {
66 let loc
= Location { block, statement_index: i }
;
67 analysis
.before_statement_effect(trans
, statement
, loc
);
68 analysis
.statement_effect(trans
, statement
, loc
);
71 let terminator
= block_data
.terminator();
72 let loc
= Location { block, statement_index: block_data.statements.len() }
;
73 analysis
.before_terminator_effect(trans
, terminator
, loc
);
74 analysis
.terminator_effect(trans
, terminator
, loc
);
77 Self::new(tcx
, body
, def_id
, analysis
, Some(trans_for_block
))
81 impl<A
> Engine
<'a
, 'tcx
, A
>
85 /// Creates a new `Engine` to solve a dataflow problem with an arbitrary transfer
88 /// Gen-kill problems should use `new_gen_kill`, which will coalesce transfer functions for
89 /// better performance.
92 body
: &'a mir
::Body
<'tcx
>,
96 Self::new(tcx
, body
, def_id
, analysis
, None
)
101 body
: &'a mir
::Body
<'tcx
>,
104 trans_for_block
: Option
<IndexVec
<BasicBlock
, GenKillSet
<A
::Idx
>>>,
106 let bits_per_block
= analysis
.bits_per_block(body
);
108 let bottom_value_set
= if A
::BOTTOM_VALUE
{
109 BitSet
::new_filled(bits_per_block
)
111 BitSet
::new_empty(bits_per_block
)
114 let mut entry_sets
= IndexVec
::from_elem(bottom_value_set
, body
.basic_blocks());
115 analysis
.initialize_start_block(body
, &mut entry_sets
[mir
::START_BLOCK
]);
129 /// Signals that we do not want dataflow state to propagate across unwind edges for these
132 /// You must take care that `dead_unwinds` does not contain a `BasicBlock` that *can* actually
133 /// unwind during execution. Otherwise, your dataflow results will not be correct.
134 pub fn dead_unwinds(mut self, dead_unwinds
: &'a BitSet
<BasicBlock
>) -> Self {
135 self.dead_unwinds
= Some(dead_unwinds
);
139 /// Computes the fixpoint for this dataflow problem and returns it.
140 pub fn iterate_to_fixpoint(mut self) -> Results
<'tcx
, A
> {
141 let mut temp_state
= BitSet
::new_empty(self.bits_per_block
);
143 let mut dirty_queue
: WorkQueue
<BasicBlock
> =
144 WorkQueue
::with_none(self.body
.basic_blocks().len());
146 for (bb
, _
) in traversal
::reverse_postorder(self.body
) {
147 dirty_queue
.insert(bb
);
150 // Add blocks that are not reachable from START_BLOCK to the work queue. These blocks will
151 // be processed after the ones added above.
152 for bb
in self.body
.basic_blocks().indices() {
153 dirty_queue
.insert(bb
);
156 while let Some(bb
) = dirty_queue
.pop() {
157 let bb_data
= &self.body
[bb
];
158 let on_entry
= &self.entry_sets
[bb
];
160 temp_state
.overwrite(on_entry
);
161 self.apply_whole_block_effect(&mut temp_state
, bb
, bb_data
);
163 self.propagate_bits_into_graph_successors_of(
170 let Engine { tcx, body, def_id, trans_for_block, entry_sets, analysis, .. }
= self;
171 let results
= Results { analysis, entry_sets }
;
173 let res
= write_graphviz_results(tcx
, def_id
, body
, &results
, trans_for_block
);
174 if let Err(e
) = res
{
175 warn
!("Failed to write graphviz dataflow results: {}", e
);
181 /// Applies the cumulative effect of an entire block, excluding the call return effect if one
183 fn apply_whole_block_effect(
185 state
: &mut BitSet
<A
::Idx
>,
187 block_data
: &mir
::BasicBlockData
<'tcx
>,
189 // Use the cached block transfer function if available.
190 if let Some(trans_for_block
) = &self.trans_for_block
{
191 trans_for_block
[block
].apply(state
);
195 // Otherwise apply effects one-by-one.
197 for (statement_index
, statement
) in block_data
.statements
.iter().enumerate() {
198 let location
= Location { block, statement_index }
;
199 self.analysis
.apply_before_statement_effect(state
, statement
, location
);
200 self.analysis
.apply_statement_effect(state
, statement
, location
);
203 let terminator
= block_data
.terminator();
204 let location
= Location { block, statement_index: block_data.statements.len() }
;
205 self.analysis
.apply_before_terminator_effect(state
, terminator
, location
);
206 self.analysis
.apply_terminator_effect(state
, terminator
, location
);
209 fn propagate_bits_into_graph_successors_of(
211 in_out
: &mut BitSet
<A
::Idx
>,
212 (bb
, bb_data
): (BasicBlock
, &'a mir
::BasicBlockData
<'tcx
>),
213 dirty_list
: &mut WorkQueue
<BasicBlock
>,
215 use mir
::TerminatorKind
::*;
217 match bb_data
.terminator().kind
{
218 Return
| Resume
| Abort
| GeneratorDrop
| Unreachable
=> {}
221 | Assert { target, cleanup: None, .. }
222 | Drop { target, location: _, unwind: None }
223 | DropAndReplace { target, value: _, location: _, unwind: None }
=> {
224 self.propagate_bits_into_entry_set_for(in_out
, target
, dirty_list
)
227 Yield { resume: target, drop, resume_arg, .. }
=> {
228 if let Some(drop
) = drop
{
229 self.propagate_bits_into_entry_set_for(in_out
, drop
, dirty_list
);
232 self.analysis
.apply_yield_resume_effect(in_out
, target
, resume_arg
);
233 self.propagate_bits_into_entry_set_for(in_out
, target
, dirty_list
);
236 Assert { target, cleanup: Some(unwind), .. }
237 | Drop { target, location: _, unwind: Some(unwind) }
238 | DropAndReplace { target, value: _, location: _, unwind: Some(unwind) }
=> {
239 self.propagate_bits_into_entry_set_for(in_out
, target
, dirty_list
);
240 if self.dead_unwinds
.map_or(true, |bbs
| !bbs
.contains(bb
)) {
241 self.propagate_bits_into_entry_set_for(in_out
, unwind
, dirty_list
);
245 SwitchInt { ref targets, ref values, ref discr, .. }
=> {
246 let Engine { tcx, body, .. }
= *self;
249 .and_then(|discr
| switch_on_enum_discriminant(tcx
, body
, bb_data
, discr
));
251 // If this is a switch on an enum discriminant, a custom effect may be applied
252 // along each outgoing edge.
253 Some((enum_place
, enum_def
)) => {
254 self.propagate_bits_into_enum_discriminant_switch_successors(
255 in_out
, bb
, enum_def
, enum_place
, dirty_list
, &*values
, &*targets
,
259 // Otherwise, it's just a normal `SwitchInt`, and every successor sees the same
262 for target
in targets
.iter().copied() {
263 self.propagate_bits_into_entry_set_for(&in_out
, target
, dirty_list
);
269 Call { cleanup, ref destination, ref func, ref args, .. }
=> {
270 if let Some(unwind
) = cleanup
{
271 if self.dead_unwinds
.map_or(true, |bbs
| !bbs
.contains(bb
)) {
272 self.propagate_bits_into_entry_set_for(in_out
, unwind
, dirty_list
);
276 if let Some((dest_place
, dest_bb
)) = *destination
{
277 // N.B.: This must be done *last*, otherwise the unwind path will see the call
279 self.analysis
.apply_call_return_effect(in_out
, bb
, func
, args
, dest_place
);
280 self.propagate_bits_into_entry_set_for(in_out
, dest_bb
, dirty_list
);
284 FalseEdges { real_target, imaginary_target }
=> {
285 self.propagate_bits_into_entry_set_for(in_out
, real_target
, dirty_list
);
286 self.propagate_bits_into_entry_set_for(in_out
, imaginary_target
, dirty_list
);
289 FalseUnwind { real_target, unwind }
=> {
290 self.propagate_bits_into_entry_set_for(in_out
, real_target
, dirty_list
);
291 if let Some(unwind
) = unwind
{
292 if self.dead_unwinds
.map_or(true, |bbs
| !bbs
.contains(bb
)) {
293 self.propagate_bits_into_entry_set_for(in_out
, unwind
, dirty_list
);
300 fn propagate_bits_into_entry_set_for(
302 in_out
: &BitSet
<A
::Idx
>,
304 dirty_queue
: &mut WorkQueue
<BasicBlock
>,
306 let entry_set
= &mut self.entry_sets
[bb
];
307 let set_changed
= self.analysis
.join(entry_set
, &in_out
);
309 dirty_queue
.insert(bb
);
313 fn propagate_bits_into_enum_discriminant_switch_successors(
315 in_out
: &mut BitSet
<A
::Idx
>,
317 enum_def
: &'tcx ty
::AdtDef
,
318 enum_place
: mir
::Place
<'tcx
>,
319 dirty_list
: &mut WorkQueue
<BasicBlock
>,
321 targets
: &[BasicBlock
],
323 // MIR building adds discriminants to the `values` array in the same order as they
324 // are yielded by `AdtDef::discriminants`. We rely on this to match each
325 // discriminant in `values` to its corresponding variant in linear time.
326 let mut tmp
= BitSet
::new_empty(in_out
.domain_size());
327 let mut discriminants
= enum_def
.discriminants(self.tcx
);
328 for (value
, target
) in values
.iter().zip(targets
.iter().copied()) {
329 let (variant_idx
, _
) = discriminants
.find(|&(_
, discr
)| discr
.val
== *value
).expect(
330 "Order of `AdtDef::discriminants` differed from that of `SwitchInt::values`",
333 tmp
.overwrite(in_out
);
334 self.analysis
.apply_discriminant_switch_effect(
341 self.propagate_bits_into_entry_set_for(&tmp
, target
, dirty_list
);
346 // Propagate dataflow state along the "otherwise" edge.
347 let otherwise
= targets
.last().copied().unwrap();
348 self.propagate_bits_into_entry_set_for(&in_out
, otherwise
, dirty_list
);
352 /// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
353 /// an enum discriminant.
355 /// We expect such blocks to have a call to `discriminant` as their last statement like so:
356 /// _42 = discriminant(_1)
357 /// SwitchInt(_42, ..)
359 /// If the basic block matches this pattern, this function returns the place corresponding to the
360 /// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
361 fn switch_on_enum_discriminant(
363 body
: &'mir mir
::Body
<'tcx
>,
364 block
: &'mir mir
::BasicBlockData
<'tcx
>,
365 switch_on
: mir
::Place
<'tcx
>,
366 ) -> Option
<(mir
::Place
<'tcx
>, &'tcx ty
::AdtDef
)> {
367 match block
.statements
.last().map(|stmt
| &stmt
.kind
) {
368 Some(mir
::StatementKind
::Assign(box (lhs
, mir
::Rvalue
::Discriminant(discriminated
))))
369 if *lhs
== switch_on
=>
371 match &discriminated
.ty(body
, tcx
).ty
.kind
{
372 ty
::Adt(def
, _
) => Some((*discriminated
, def
)),
374 // `Rvalue::Discriminant` is also used to get the active yield point for a
375 // generator, but we do not need edge-specific effects in that case. This may
376 // change in the future.
377 ty
::Generator(..) => None
,
379 t
=> bug
!("`discriminant` called on unexpected type {:?}", t
),
389 /// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
390 /// `rustc_mir` attributes.
391 fn write_graphviz_results
<A
>(
394 body
: &mir
::Body
<'tcx
>,
395 results
: &Results
<'tcx
, A
>,
396 block_transfer_functions
: Option
<IndexVec
<BasicBlock
, GenKillSet
<A
::Idx
>>>,
397 ) -> std
::io
::Result
<()>
401 let attrs
= match RustcMirAttrs
::parse(tcx
, def_id
) {
404 // Invalid `rustc_mir` attrs are reported in `RustcMirAttrs::parse`
405 Err(()) => return Ok(()),
408 let path
= match attrs
.output_path(A
::NAME
) {
411 None
if tcx
.sess
.opts
.debugging_opts
.dump_mir_dataflow
412 && dump_enabled(tcx
, A
::NAME
, def_id
) =>
414 let mut path
= PathBuf
::from(&tcx
.sess
.opts
.debugging_opts
.dump_mir_dir
);
416 let item_name
= ty
::print
::with_forced_impl_filename_line(|| {
417 tcx
.def_path(def_id
).to_filename_friendly_no_crate()
419 path
.push(format
!("rustc.{}.{}.dot", item_name
, A
::NAME
));
423 None
=> return Ok(()),
426 let bits_per_block
= results
.analysis
.bits_per_block(body
);
428 let mut formatter
: Box
<dyn graphviz
::StateFormatter
<'tcx
, _
>> = match attrs
.formatter
{
429 Some(sym
::two_phase
) => Box
::new(graphviz
::TwoPhaseDiff
::new(bits_per_block
)),
430 Some(sym
::gen_kill
) => {
431 if let Some(trans_for_block
) = block_transfer_functions
{
432 Box
::new(graphviz
::BlockTransferFunc
::new(body
, trans_for_block
))
434 Box
::new(graphviz
::SimpleDiff
::new(bits_per_block
))
438 // Default to the `SimpleDiff` output style.
439 _
=> Box
::new(graphviz
::SimpleDiff
::new(bits_per_block
)),
442 debug
!("printing dataflow results for {:?} to {}", def_id
, path
.display());
443 let mut buf
= Vec
::new();
445 let graphviz
= graphviz
::Formatter
::new(body
, def_id
, results
, &mut *formatter
);
446 dot
::render_opts(&graphviz
, &mut buf
, &[dot
::RenderOption
::Monospace
])?
;
448 if let Some(parent
) = path
.parent() {
449 fs
::create_dir_all(parent
)?
;
451 fs
::write(&path
, buf
)?
;
457 struct RustcMirAttrs
{
458 basename_and_suffix
: Option
<PathBuf
>,
459 formatter
: Option
<Symbol
>,
463 fn parse(tcx
: TyCtxt
<'tcx
>, def_id
: DefId
) -> Result
<Self, ()> {
464 let attrs
= tcx
.get_attrs(def_id
);
466 let mut result
= Ok(());
467 let mut ret
= RustcMirAttrs
::default();
469 let rustc_mir_attrs
= attrs
471 .filter(|attr
| attr
.check_name(sym
::rustc_mir
))
472 .flat_map(|attr
| attr
.meta_item_list().into_iter().flat_map(|v
| v
.into_iter()));
474 for attr
in rustc_mir_attrs
{
475 let attr_result
= if attr
.check_name(sym
::borrowck_graphviz_postflow
) {
476 Self::set_field(&mut ret
.basename_and_suffix
, tcx
, &attr
, |s
| {
477 let path
= PathBuf
::from(s
.to_string());
478 match path
.file_name() {
481 tcx
.sess
.span_err(attr
.span(), "path must end in a filename");
486 } else if attr
.check_name(sym
::borrowck_graphviz_format
) {
487 Self::set_field(&mut ret
.formatter
, tcx
, &attr
, |s
| match s
{
488 sym
::gen_kill
| sym
::two_phase
=> Ok(s
),
490 tcx
.sess
.span_err(attr
.span(), "unknown formatter");
498 result
= result
.and(attr_result
);
505 field
: &mut Option
<T
>,
507 attr
: &ast
::NestedMetaItem
,
508 mapper
: impl FnOnce(Symbol
) -> Result
<T
, ()>,
509 ) -> Result
<(), ()> {
512 .span_err(attr
.span(), &format
!("duplicate values for `{}`", attr
.name_or_empty()));
517 if let Some(s
) = attr
.value_str() {
518 *field
= Some(mapper(s
)?
);
522 .span_err(attr
.span(), &format
!("`{}` requires an argument", attr
.name_or_empty()));
527 /// Returns the path where dataflow results should be written, or `None`
528 /// `borrowck_graphviz_postflow` was not specified.
530 /// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
532 /// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
533 fn output_path(&self, analysis_name
: &str) -> Option
<PathBuf
> {
534 let mut ret
= self.basename_and_suffix
.as_ref().cloned()?
;
535 let suffix
= ret
.file_name().unwrap(); // Checked when parsing attrs
537 let mut file_name
: OsString
= analysis_name
.into();
539 file_name
.push(suffix
);
540 ret
.set_file_name(file_name
);