]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/dataflow/generic/engine.rs
New upstream version 1.43.0+dfsg1
[rustc.git] / src / librustc_mir / dataflow / generic / engine.rs
1 //! A solver for dataflow problems.
2
3 use std::ffi::OsString;
4 use std::fs;
5 use std::path::PathBuf;
6
7 use rustc::mir::{self, traversal, BasicBlock, Location};
8 use rustc::ty::{self, TyCtxt};
9 use rustc_ast::ast;
10 use rustc_data_structures::work_queue::WorkQueue;
11 use rustc_hir::def_id::DefId;
12 use rustc_index::bit_set::BitSet;
13 use rustc_index::vec::IndexVec;
14 use rustc_span::symbol::{sym, Symbol};
15
16 use super::graphviz;
17 use super::{Analysis, GenKillAnalysis, GenKillSet, Results};
18
19 /// A solver for dataflow problems.
20 pub struct Engine<'a, 'tcx, A>
21 where
22 A: Analysis<'tcx>,
23 {
24 bits_per_block: usize,
25 tcx: TyCtxt<'tcx>,
26 body: &'a mir::Body<'tcx>,
27 def_id: DefId,
28 dead_unwinds: Option<&'a BitSet<BasicBlock>>,
29 entry_sets: IndexVec<BasicBlock, BitSet<A::Idx>>,
30 analysis: A,
31
32 /// Cached, cumulative transfer functions for each block.
33 trans_for_block: Option<IndexVec<BasicBlock, GenKillSet<A::Idx>>>,
34 }
35
36 impl<A> Engine<'a, 'tcx, A>
37 where
38 A: GenKillAnalysis<'tcx>,
39 {
40 /// Creates a new `Engine` to solve a gen-kill dataflow problem.
41 pub fn new_gen_kill(
42 tcx: TyCtxt<'tcx>,
43 body: &'a mir::Body<'tcx>,
44 def_id: DefId,
45 analysis: A,
46 ) -> Self {
47 // If there are no back-edges in the control-flow graph, we only ever need to apply the
48 // transfer function for each block exactly once (assuming that we process blocks in RPO).
49 //
50 // In this case, there's no need to compute the block transfer functions ahead of time.
51 if !body.is_cfg_cyclic() {
52 return Self::new(tcx, body, def_id, analysis, None);
53 }
54
55 // Otherwise, compute and store the cumulative transfer function for each block.
56
57 let bits_per_block = analysis.bits_per_block(body);
58 let mut trans_for_block =
59 IndexVec::from_elem(GenKillSet::identity(bits_per_block), body.basic_blocks());
60
61 for (block, block_data) in body.basic_blocks().iter_enumerated() {
62 let trans = &mut trans_for_block[block];
63
64 for (i, statement) in block_data.statements.iter().enumerate() {
65 let loc = Location { block, statement_index: i };
66 analysis.before_statement_effect(trans, statement, loc);
67 analysis.statement_effect(trans, statement, loc);
68 }
69
70 let terminator = block_data.terminator();
71 let loc = Location { block, statement_index: block_data.statements.len() };
72 analysis.before_terminator_effect(trans, terminator, loc);
73 analysis.terminator_effect(trans, terminator, loc);
74 }
75
76 Self::new(tcx, body, def_id, analysis, Some(trans_for_block))
77 }
78 }
79
80 impl<A> Engine<'a, 'tcx, A>
81 where
82 A: Analysis<'tcx>,
83 {
84 /// Creates a new `Engine` to solve a dataflow problem with an arbitrary transfer
85 /// function.
86 ///
87 /// Gen-kill problems should use `new_gen_kill`, which will coalesce transfer functions for
88 /// better performance.
89 pub fn new_generic(
90 tcx: TyCtxt<'tcx>,
91 body: &'a mir::Body<'tcx>,
92 def_id: DefId,
93 analysis: A,
94 ) -> Self {
95 Self::new(tcx, body, def_id, analysis, None)
96 }
97
98 fn new(
99 tcx: TyCtxt<'tcx>,
100 body: &'a mir::Body<'tcx>,
101 def_id: DefId,
102 analysis: A,
103 trans_for_block: Option<IndexVec<BasicBlock, GenKillSet<A::Idx>>>,
104 ) -> Self {
105 let bits_per_block = analysis.bits_per_block(body);
106
107 let bottom_value_set = if A::BOTTOM_VALUE {
108 BitSet::new_filled(bits_per_block)
109 } else {
110 BitSet::new_empty(bits_per_block)
111 };
112
113 let mut entry_sets = IndexVec::from_elem(bottom_value_set, body.basic_blocks());
114 analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
115
116 Engine {
117 analysis,
118 bits_per_block,
119 tcx,
120 body,
121 def_id,
122 dead_unwinds: None,
123 entry_sets,
124 trans_for_block,
125 }
126 }
127
128 /// Signals that we do not want dataflow state to propagate across unwind edges for these
129 /// `BasicBlock`s.
130 ///
131 /// You must take care that `dead_unwinds` does not contain a `BasicBlock` that *can* actually
132 /// unwind during execution. Otherwise, your dataflow results will not be correct.
133 pub fn dead_unwinds(mut self, dead_unwinds: &'a BitSet<BasicBlock>) -> Self {
134 self.dead_unwinds = Some(dead_unwinds);
135 self
136 }
137
138 /// Computes the fixpoint for this dataflow problem and returns it.
139 pub fn iterate_to_fixpoint(mut self) -> Results<'tcx, A> {
140 let mut temp_state = BitSet::new_empty(self.bits_per_block);
141
142 let mut dirty_queue: WorkQueue<BasicBlock> =
143 WorkQueue::with_none(self.body.basic_blocks().len());
144
145 for (bb, _) in traversal::reverse_postorder(self.body) {
146 dirty_queue.insert(bb);
147 }
148
149 // Add blocks that are not reachable from START_BLOCK to the work queue. These blocks will
150 // be processed after the ones added above.
151 for bb in self.body.basic_blocks().indices() {
152 dirty_queue.insert(bb);
153 }
154
155 while let Some(bb) = dirty_queue.pop() {
156 let bb_data = &self.body[bb];
157 let on_entry = &self.entry_sets[bb];
158
159 temp_state.overwrite(on_entry);
160 self.apply_whole_block_effect(&mut temp_state, bb, bb_data);
161
162 self.propagate_bits_into_graph_successors_of(
163 &mut temp_state,
164 (bb, bb_data),
165 &mut dirty_queue,
166 );
167 }
168
169 let Engine { tcx, body, def_id, trans_for_block, entry_sets, analysis, .. } = self;
170 let results = Results { analysis, entry_sets };
171
172 let res = write_graphviz_results(tcx, def_id, body, &results, trans_for_block);
173 if let Err(e) = res {
174 warn!("Failed to write graphviz dataflow results: {}", e);
175 }
176
177 results
178 }
179
180 /// Applies the cumulative effect of an entire block, excluding the call return effect if one
181 /// exists.
182 fn apply_whole_block_effect(
183 &self,
184 state: &mut BitSet<A::Idx>,
185 block: BasicBlock,
186 block_data: &mir::BasicBlockData<'tcx>,
187 ) {
188 // Use the cached block transfer function if available.
189 if let Some(trans_for_block) = &self.trans_for_block {
190 trans_for_block[block].apply(state);
191 return;
192 }
193
194 // Otherwise apply effects one-by-one.
195
196 for (statement_index, statement) in block_data.statements.iter().enumerate() {
197 let location = Location { block, statement_index };
198 self.analysis.apply_before_statement_effect(state, statement, location);
199 self.analysis.apply_statement_effect(state, statement, location);
200 }
201
202 let terminator = block_data.terminator();
203 let location = Location { block, statement_index: block_data.statements.len() };
204 self.analysis.apply_before_terminator_effect(state, terminator, location);
205 self.analysis.apply_terminator_effect(state, terminator, location);
206 }
207
208 fn propagate_bits_into_graph_successors_of(
209 &mut self,
210 in_out: &mut BitSet<A::Idx>,
211 (bb, bb_data): (BasicBlock, &'a mir::BasicBlockData<'tcx>),
212 dirty_list: &mut WorkQueue<BasicBlock>,
213 ) {
214 use mir::TerminatorKind::*;
215
216 match bb_data.terminator().kind {
217 Return | Resume | Abort | GeneratorDrop | Unreachable => {}
218
219 Goto { target }
220 | Assert { target, cleanup: None, .. }
221 | Yield { resume: target, drop: None, .. }
222 | Drop { target, location: _, unwind: None }
223 | DropAndReplace { target, value: _, location: _, unwind: None } => {
224 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list)
225 }
226
227 Yield { resume: target, drop: Some(drop), .. } => {
228 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
229 self.propagate_bits_into_entry_set_for(in_out, drop, dirty_list);
230 }
231
232 Assert { target, cleanup: Some(unwind), .. }
233 | Drop { target, location: _, unwind: Some(unwind) }
234 | DropAndReplace { target, value: _, location: _, unwind: Some(unwind) } => {
235 self.propagate_bits_into_entry_set_for(in_out, target, dirty_list);
236 if self.dead_unwinds.map_or(true, |bbs| !bbs.contains(bb)) {
237 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
238 }
239 }
240
241 SwitchInt { ref targets, ref values, ref discr, .. } => {
242 let Engine { tcx, body, .. } = *self;
243 let enum_ = discr
244 .place()
245 .and_then(|discr| switch_on_enum_discriminant(tcx, body, bb_data, discr));
246 match enum_ {
247 // If this is a switch on an enum discriminant, a custom effect may be applied
248 // along each outgoing edge.
249 Some((enum_place, enum_def)) => {
250 self.propagate_bits_into_enum_discriminant_switch_successors(
251 in_out, bb, enum_def, enum_place, dirty_list, &*values, &*targets,
252 );
253 }
254
255 // Otherwise, it's just a normal `SwitchInt`, and every successor sees the same
256 // exit state.
257 None => {
258 for target in targets.iter().copied() {
259 self.propagate_bits_into_entry_set_for(&in_out, target, dirty_list);
260 }
261 }
262 }
263 }
264
265 Call { cleanup, ref destination, ref func, ref args, .. } => {
266 if let Some(unwind) = cleanup {
267 if self.dead_unwinds.map_or(true, |bbs| !bbs.contains(bb)) {
268 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
269 }
270 }
271
272 if let Some((ref dest_place, dest_bb)) = *destination {
273 // N.B.: This must be done *last*, otherwise the unwind path will see the call
274 // return effect.
275 self.analysis.apply_call_return_effect(in_out, bb, func, args, dest_place);
276 self.propagate_bits_into_entry_set_for(in_out, dest_bb, dirty_list);
277 }
278 }
279
280 FalseEdges { real_target, imaginary_target } => {
281 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
282 self.propagate_bits_into_entry_set_for(in_out, imaginary_target, dirty_list);
283 }
284
285 FalseUnwind { real_target, unwind } => {
286 self.propagate_bits_into_entry_set_for(in_out, real_target, dirty_list);
287 if let Some(unwind) = unwind {
288 if self.dead_unwinds.map_or(true, |bbs| !bbs.contains(bb)) {
289 self.propagate_bits_into_entry_set_for(in_out, unwind, dirty_list);
290 }
291 }
292 }
293 }
294 }
295
296 fn propagate_bits_into_entry_set_for(
297 &mut self,
298 in_out: &BitSet<A::Idx>,
299 bb: BasicBlock,
300 dirty_queue: &mut WorkQueue<BasicBlock>,
301 ) {
302 let entry_set = &mut self.entry_sets[bb];
303 let set_changed = self.analysis.join(entry_set, &in_out);
304 if set_changed {
305 dirty_queue.insert(bb);
306 }
307 }
308
309 fn propagate_bits_into_enum_discriminant_switch_successors(
310 &mut self,
311 in_out: &mut BitSet<A::Idx>,
312 bb: BasicBlock,
313 enum_def: &'tcx ty::AdtDef,
314 enum_place: &mir::Place<'tcx>,
315 dirty_list: &mut WorkQueue<BasicBlock>,
316 values: &[u128],
317 targets: &[BasicBlock],
318 ) {
319 // MIR building adds discriminants to the `values` array in the same order as they
320 // are yielded by `AdtDef::discriminants`. We rely on this to match each
321 // discriminant in `values` to its corresponding variant in linear time.
322 let mut tmp = BitSet::new_empty(in_out.domain_size());
323 let mut discriminants = enum_def.discriminants(self.tcx);
324 for (value, target) in values.iter().zip(targets.iter().copied()) {
325 let (variant_idx, _) = discriminants.find(|&(_, discr)| discr.val == *value).expect(
326 "Order of `AdtDef::discriminants` differed from that of `SwitchInt::values`",
327 );
328
329 tmp.overwrite(in_out);
330 self.analysis.apply_discriminant_switch_effect(
331 &mut tmp,
332 bb,
333 enum_place,
334 enum_def,
335 variant_idx,
336 );
337 self.propagate_bits_into_entry_set_for(&tmp, target, dirty_list);
338 }
339
340 std::mem::drop(tmp);
341
342 // Propagate dataflow state along the "otherwise" edge.
343 let otherwise = targets.last().copied().unwrap();
344 self.propagate_bits_into_entry_set_for(&in_out, otherwise, dirty_list);
345 }
346 }
347
348 /// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
349 /// an enum discriminant.
350 ///
351 /// We expect such blocks to have a call to `discriminant` as their last statement like so:
352 /// _42 = discriminant(_1)
353 /// SwitchInt(_42, ..)
354 ///
355 /// If the basic block matches this pattern, this function returns the place corresponding to the
356 /// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
357 fn switch_on_enum_discriminant(
358 tcx: TyCtxt<'tcx>,
359 body: &'mir mir::Body<'tcx>,
360 block: &'mir mir::BasicBlockData<'tcx>,
361 switch_on: &mir::Place<'tcx>,
362 ) -> Option<(&'mir mir::Place<'tcx>, &'tcx ty::AdtDef)> {
363 match block.statements.last().map(|stmt| &stmt.kind) {
364 Some(mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated))))
365 if lhs == switch_on =>
366 {
367 match &discriminated.ty(body, tcx).ty.kind {
368 ty::Adt(def, _) => Some((discriminated, def)),
369
370 // `Rvalue::Discriminant` is also used to get the active yield point for a
371 // generator, but we do not need edge-specific effects in that case. This may
372 // change in the future.
373 ty::Generator(..) => None,
374
375 t => bug!("`discriminant` called on unexpected type {:?}", t),
376 }
377 }
378
379 _ => None,
380 }
381 }
382
383 // Graphviz
384
385 /// Writes a DOT file containing the results of a dataflow analysis if the user requested it via
386 /// `rustc_mir` attributes.
387 fn write_graphviz_results<A>(
388 tcx: TyCtxt<'tcx>,
389 def_id: DefId,
390 body: &mir::Body<'tcx>,
391 results: &Results<'tcx, A>,
392 block_transfer_functions: Option<IndexVec<BasicBlock, GenKillSet<A::Idx>>>,
393 ) -> std::io::Result<()>
394 where
395 A: Analysis<'tcx>,
396 {
397 let attrs = match RustcMirAttrs::parse(tcx, def_id) {
398 Ok(attrs) => attrs,
399
400 // Invalid `rustc_mir` attrs will be reported using `span_err`.
401 Err(()) => return Ok(()),
402 };
403
404 let path = match attrs.output_path(A::NAME) {
405 Some(path) => path,
406 None => return Ok(()),
407 };
408
409 let bits_per_block = results.analysis.bits_per_block(body);
410
411 let mut formatter: Box<dyn graphviz::StateFormatter<'tcx, _>> = match attrs.formatter {
412 Some(sym::two_phase) => Box::new(graphviz::TwoPhaseDiff::new(bits_per_block)),
413 Some(sym::gen_kill) => {
414 if let Some(trans_for_block) = block_transfer_functions {
415 Box::new(graphviz::BlockTransferFunc::new(body, trans_for_block))
416 } else {
417 Box::new(graphviz::SimpleDiff::new(bits_per_block))
418 }
419 }
420
421 // Default to the `SimpleDiff` output style.
422 _ => Box::new(graphviz::SimpleDiff::new(bits_per_block)),
423 };
424
425 debug!("printing dataflow results for {:?} to {}", def_id, path.display());
426 let mut buf = Vec::new();
427
428 let graphviz = graphviz::Formatter::new(body, def_id, results, &mut *formatter);
429 dot::render_opts(&graphviz, &mut buf, &[dot::RenderOption::Monospace])?;
430 fs::write(&path, buf)?;
431 Ok(())
432 }
433
434 #[derive(Default)]
435 struct RustcMirAttrs {
436 basename_and_suffix: Option<PathBuf>,
437 formatter: Option<Symbol>,
438 }
439
440 impl RustcMirAttrs {
441 fn parse(tcx: TyCtxt<'tcx>, def_id: DefId) -> Result<Self, ()> {
442 let attrs = tcx.get_attrs(def_id);
443
444 let mut result = Ok(());
445 let mut ret = RustcMirAttrs::default();
446
447 let rustc_mir_attrs = attrs
448 .iter()
449 .filter(|attr| attr.check_name(sym::rustc_mir))
450 .flat_map(|attr| attr.meta_item_list().into_iter().flat_map(|v| v.into_iter()));
451
452 for attr in rustc_mir_attrs {
453 let attr_result = if attr.check_name(sym::borrowck_graphviz_postflow) {
454 Self::set_field(&mut ret.basename_and_suffix, tcx, &attr, |s| {
455 let path = PathBuf::from(s.to_string());
456 match path.file_name() {
457 Some(_) => Ok(path),
458 None => {
459 tcx.sess.span_err(attr.span(), "path must end in a filename");
460 Err(())
461 }
462 }
463 })
464 } else if attr.check_name(sym::borrowck_graphviz_format) {
465 Self::set_field(&mut ret.formatter, tcx, &attr, |s| match s {
466 sym::gen_kill | sym::two_phase => Ok(s),
467 _ => {
468 tcx.sess.span_err(attr.span(), "unknown formatter");
469 Err(())
470 }
471 })
472 } else {
473 Ok(())
474 };
475
476 result = result.and(attr_result);
477 }
478
479 result.map(|()| ret)
480 }
481
482 fn set_field<T>(
483 field: &mut Option<T>,
484 tcx: TyCtxt<'tcx>,
485 attr: &ast::NestedMetaItem,
486 mapper: impl FnOnce(Symbol) -> Result<T, ()>,
487 ) -> Result<(), ()> {
488 if field.is_some() {
489 tcx.sess
490 .span_err(attr.span(), &format!("duplicate values for `{}`", attr.name_or_empty()));
491
492 return Err(());
493 }
494
495 if let Some(s) = attr.value_str() {
496 *field = Some(mapper(s)?);
497 Ok(())
498 } else {
499 tcx.sess
500 .span_err(attr.span(), &format!("`{}` requires an argument", attr.name_or_empty()));
501 Err(())
502 }
503 }
504
505 /// Returns the path where dataflow results should be written, or `None`
506 /// `borrowck_graphviz_postflow` was not specified.
507 ///
508 /// This performs the following transformation to the argument of `borrowck_graphviz_postflow`:
509 ///
510 /// "path/suffix.dot" -> "path/analysis_name_suffix.dot"
511 fn output_path(&self, analysis_name: &str) -> Option<PathBuf> {
512 let mut ret = self.basename_and_suffix.as_ref().cloned()?;
513 let suffix = ret.file_name().unwrap(); // Checked when parsing attrs
514
515 let mut file_name: OsString = analysis_name.into();
516 file_name.push("_");
517 file_name.push(suffix);
518 ret.set_file_name(file_name);
519
520 Some(ret)
521 }
522 }