]>
Commit | Line | Data |
---|---|---|
f9f354fc XL |
1 | //! Validates the MIR to ensure that invariants are upheld. |
2 | ||
29967ef6 | 3 | use rustc_index::bit_set::BitSet; |
1b1a35ee | 4 | use rustc_infer::infer::TyCtxtInferExt; |
29967ef6 XL |
5 | use rustc_middle::mir::interpret::Scalar; |
6 | use rustc_middle::mir::traversal; | |
1b1a35ee XL |
7 | use rustc_middle::mir::visit::{PlaceContext, Visitor}; |
8 | use rustc_middle::mir::{ | |
a2a8927a XL |
9 | AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPass, MirPhase, Operand, |
10 | PlaceElem, PlaceRef, ProjectionElem, Rvalue, SourceScope, Statement, StatementKind, Terminator, | |
c295e0f8 | 11 | TerminatorKind, START_BLOCK, |
f9f354fc | 12 | }; |
1b1a35ee XL |
13 | use rustc_middle::ty::fold::BottomUpFolder; |
14 | use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, TypeFoldable}; | |
c295e0f8 XL |
15 | use rustc_mir_dataflow::impls::MaybeStorageLive; |
16 | use rustc_mir_dataflow::storage::AlwaysLiveLocals; | |
17 | use rustc_mir_dataflow::{Analysis, ResultsCursor}; | |
29967ef6 | 18 | use rustc_target::abi::Size; |
f035d41b XL |
19 | |
20 | #[derive(Copy, Clone, Debug)] | |
21 | enum EdgeKind { | |
22 | Unwind, | |
23 | Normal, | |
24 | } | |
f9f354fc XL |
25 | |
26 | pub struct Validator { | |
27 | /// Describes at which point in the pipeline this validation is happening. | |
28 | pub when: String, | |
3dfed10e XL |
29 | /// The phase for which we are upholding the dialect. If the given phase forbids a specific |
30 | /// element, this validator will now emit errors if that specific element is encountered. | |
31 | /// Note that phases that change the dialect cause all *following* phases to check the | |
32 | /// invariants of the new dialect. A phase that changes dialects never checks the new invariants | |
33 | /// itself. | |
34 | pub mir_phase: MirPhase, | |
f9f354fc XL |
35 | } |
36 | ||
37 | impl<'tcx> MirPass<'tcx> for Validator { | |
29967ef6 XL |
38 | fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { |
39 | let def_id = body.source.def_id(); | |
1b1a35ee | 40 | let param_env = tcx.param_env(def_id); |
3dfed10e | 41 | let mir_phase = self.mir_phase; |
1b1a35ee XL |
42 | |
43 | let always_live_locals = AlwaysLiveLocals::new(body); | |
44 | let storage_liveness = MaybeStorageLive::new(always_live_locals) | |
29967ef6 | 45 | .into_engine(tcx, body) |
1b1a35ee XL |
46 | .iterate_to_fixpoint() |
47 | .into_results_cursor(body); | |
48 | ||
29967ef6 XL |
49 | TypeChecker { |
50 | when: &self.when, | |
51 | body, | |
52 | tcx, | |
53 | param_env, | |
54 | mir_phase, | |
55 | reachable_blocks: traversal::reachable_as_bitset(body), | |
56 | storage_liveness, | |
57 | place_cache: Vec::new(), | |
58 | } | |
59 | .visit_body(body); | |
f9f354fc XL |
60 | } |
61 | } | |
62 | ||
f035d41b XL |
63 | /// Returns whether the two types are equal up to lifetimes. |
64 | /// All lifetimes, including higher-ranked ones, get ignored for this comparison. | |
65 | /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.) | |
66 | /// | |
67 | /// The point of this function is to approximate "equal up to subtyping". However, | |
68 | /// the approximation is incorrect as variance is ignored. | |
a2a8927a | 69 | pub fn equal_up_to_regions<'tcx>( |
f035d41b XL |
70 | tcx: TyCtxt<'tcx>, |
71 | param_env: ParamEnv<'tcx>, | |
72 | src: Ty<'tcx>, | |
73 | dest: Ty<'tcx>, | |
74 | ) -> bool { | |
75 | // Fast path. | |
76 | if src == dest { | |
77 | return true; | |
78 | } | |
79 | ||
1b1a35ee XL |
80 | // Normalize lifetimes away on both sides, then compare. |
81 | let param_env = param_env.with_reveal_all_normalized(tcx); | |
82 | let normalize = |ty: Ty<'tcx>| { | |
83 | tcx.normalize_erasing_regions( | |
84 | param_env, | |
85 | ty.fold_with(&mut BottomUpFolder { | |
86 | tcx, | |
29967ef6 XL |
87 | // FIXME: We erase all late-bound lifetimes, but this is not fully correct. |
88 | // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`, | |
89 | // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`, | |
90 | // since one may have an `impl SomeTrait for fn(&32)` and | |
91 | // `impl SomeTrait for fn(&'static u32)` at the same time which | |
92 | // specify distinct values for Assoc. (See also #56105) | |
1b1a35ee XL |
93 | lt_op: |_| tcx.lifetimes.re_erased, |
94 | // Leave consts and types unchanged. | |
95 | ct_op: |ct| ct, | |
96 | ty_op: |ty| ty, | |
97 | }), | |
98 | ) | |
99 | }; | |
100 | tcx.infer_ctxt().enter(|infcx| infcx.can_eq(param_env, normalize(src), normalize(dest)).is_ok()) | |
f035d41b XL |
101 | } |
102 | ||
f9f354fc XL |
103 | struct TypeChecker<'a, 'tcx> { |
104 | when: &'a str, | |
f9f354fc XL |
105 | body: &'a Body<'tcx>, |
106 | tcx: TyCtxt<'tcx>, | |
107 | param_env: ParamEnv<'tcx>, | |
3dfed10e | 108 | mir_phase: MirPhase, |
29967ef6 | 109 | reachable_blocks: BitSet<BasicBlock>, |
1b1a35ee | 110 | storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>, |
29967ef6 | 111 | place_cache: Vec<PlaceRef<'tcx>>, |
f9f354fc XL |
112 | } |
113 | ||
114 | impl<'a, 'tcx> TypeChecker<'a, 'tcx> { | |
f035d41b XL |
115 | fn fail(&self, location: Location, msg: impl AsRef<str>) { |
116 | let span = self.body.source_info(location).span; | |
f9f354fc XL |
117 | // We use `delay_span_bug` as we might see broken MIR when other errors have already |
118 | // occurred. | |
119 | self.tcx.sess.diagnostic().delay_span_bug( | |
120 | span, | |
f035d41b XL |
121 | &format!( |
122 | "broken MIR in {:?} ({}) at {:?}:\n{}", | |
29967ef6 | 123 | self.body.source.instance, |
f035d41b XL |
124 | self.when, |
125 | location, | |
126 | msg.as_ref() | |
127 | ), | |
f9f354fc XL |
128 | ); |
129 | } | |
f035d41b XL |
130 | |
131 | fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) { | |
c295e0f8 XL |
132 | if bb == START_BLOCK { |
133 | self.fail(location, "start block must not have predecessors") | |
134 | } | |
f035d41b XL |
135 | if let Some(bb) = self.body.basic_blocks().get(bb) { |
136 | let src = self.body.basic_blocks().get(location.block).unwrap(); | |
137 | match (src.is_cleanup, bb.is_cleanup, edge_kind) { | |
138 | // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges | |
139 | (false, false, EdgeKind::Normal) | |
140 | // Non-cleanup blocks can jump to cleanup blocks along unwind edges | |
141 | | (false, true, EdgeKind::Unwind) | |
142 | // Cleanup blocks can jump to cleanup blocks along non-unwind edges | |
143 | | (true, true, EdgeKind::Normal) => {} | |
144 | // All other jumps are invalid | |
145 | _ => { | |
146 | self.fail( | |
147 | location, | |
148 | format!( | |
149 | "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})", | |
150 | edge_kind, | |
151 | bb, | |
152 | src.is_cleanup, | |
153 | bb.is_cleanup, | |
154 | ) | |
155 | ) | |
156 | } | |
157 | } | |
158 | } else { | |
159 | self.fail(location, format!("encountered jump to invalid basic block {:?}", bb)) | |
160 | } | |
161 | } | |
162 | ||
163 | /// Check if src can be assigned into dest. | |
164 | /// This is not precise, it will accept some incorrect assignments. | |
165 | fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool { | |
166 | // Fast path before we normalize. | |
167 | if src == dest { | |
168 | // Equal types, all is good. | |
169 | return true; | |
170 | } | |
171 | // Normalize projections and things like that. | |
172 | // FIXME: We need to reveal_all, as some optimizations change types in ways | |
173 | // that require unfolding opaque types. | |
3dfed10e | 174 | let param_env = self.param_env.with_reveal_all_normalized(self.tcx); |
f035d41b XL |
175 | let src = self.tcx.normalize_erasing_regions(param_env, src); |
176 | let dest = self.tcx.normalize_erasing_regions(param_env, dest); | |
177 | ||
178 | // Type-changing assignments can happen when subtyping is used. While | |
179 | // all normal lifetimes are erased, higher-ranked types with their | |
180 | // late-bound lifetimes are still around and can lead to type | |
181 | // differences. So we compare ignoring lifetimes. | |
182 | equal_up_to_regions(self.tcx, param_env, src, dest) | |
183 | } | |
f9f354fc XL |
184 | } |
185 | ||
186 | impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { | |
1b1a35ee | 187 | fn visit_local(&mut self, local: &Local, context: PlaceContext, location: Location) { |
fc512014 XL |
188 | if self.body.local_decls.get(*local).is_none() { |
189 | self.fail( | |
190 | location, | |
191 | format!("local {:?} has no corresponding declaration in `body.local_decls`", local), | |
192 | ); | |
193 | } | |
194 | ||
29967ef6 | 195 | if self.reachable_blocks.contains(location.block) && context.is_use() { |
1b1a35ee XL |
196 | // Uses of locals must occur while the local's storage is allocated. |
197 | self.storage_liveness.seek_after_primary_effect(location); | |
198 | let locals_with_storage = self.storage_liveness.get(); | |
199 | if !locals_with_storage.contains(*local) { | |
200 | self.fail(location, format!("use of local {:?}, which has no storage here", local)); | |
201 | } | |
202 | } | |
203 | } | |
204 | ||
f9f354fc | 205 | fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { |
29967ef6 XL |
206 | // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed. |
207 | if self.tcx.sess.opts.debugging_opts.validate_mir { | |
208 | // `Operand::Copy` is only supposed to be used with `Copy` types. | |
209 | if let Operand::Copy(place) = operand { | |
210 | let ty = place.ty(&self.body.local_decls, self.tcx).ty; | |
211 | let span = self.body.source_info(location).span; | |
f9f354fc | 212 | |
29967ef6 XL |
213 | if !ty.is_copy_modulo_regions(self.tcx.at(span), self.param_env) { |
214 | self.fail(location, format!("`Operand::Copy` with non-`Copy` type {}", ty)); | |
215 | } | |
f9f354fc XL |
216 | } |
217 | } | |
218 | ||
219 | self.super_operand(operand, location); | |
220 | } | |
221 | ||
17df50a5 XL |
222 | fn visit_projection_elem( |
223 | &mut self, | |
224 | local: Local, | |
225 | proj_base: &[PlaceElem<'tcx>], | |
226 | elem: PlaceElem<'tcx>, | |
227 | context: PlaceContext, | |
228 | location: Location, | |
229 | ) { | |
230 | if let ProjectionElem::Index(index) = elem { | |
231 | let index_ty = self.body.local_decls[index].ty; | |
232 | if index_ty != self.tcx.types.usize { | |
233 | self.fail(location, format!("bad index ({:?} != usize)", index_ty)) | |
234 | } | |
235 | } | |
236 | self.super_projection_elem(local, proj_base, elem, context, location); | |
237 | } | |
238 | ||
f9f354fc | 239 | fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { |
f035d41b XL |
240 | match &statement.kind { |
241 | StatementKind::Assign(box (dest, rvalue)) => { | |
242 | // LHS and RHS of the assignment must have the same type. | |
243 | let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty; | |
244 | let right_ty = rvalue.ty(&self.body.local_decls, self.tcx); | |
245 | if !self.mir_assign_valid_types(right_ty, left_ty) { | |
246 | self.fail( | |
247 | location, | |
248 | format!( | |
3dfed10e | 249 | "encountered `{:?}` with incompatible types:\n\ |
f035d41b XL |
250 | left-hand side has type: {}\n\ |
251 | right-hand side has type: {}", | |
3dfed10e | 252 | statement.kind, left_ty, right_ty, |
f035d41b XL |
253 | ), |
254 | ); | |
255 | } | |
f035d41b | 256 | match rvalue { |
3dfed10e XL |
257 | // The sides of an assignment must not alias. Currently this just checks whether the places |
258 | // are identical. | |
f035d41b XL |
259 | Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) => { |
260 | if dest == src { | |
261 | self.fail( | |
262 | location, | |
263 | "encountered `Assign` statement with overlapping memory", | |
264 | ); | |
265 | } | |
f9f354fc | 266 | } |
3dfed10e XL |
267 | // The deaggregator currently does not deaggreagate arrays. |
268 | // So for now, we ignore them here. | |
269 | Rvalue::Aggregate(box AggregateKind::Array { .. }, _) => {} | |
270 | // All other aggregates must be gone after some phases. | |
271 | Rvalue::Aggregate(box kind, _) => { | |
272 | if self.mir_phase > MirPhase::DropLowering | |
273 | && !matches!(kind, AggregateKind::Generator(..)) | |
274 | { | |
275 | // Generators persist until the state machine transformation, but all | |
276 | // other aggregates must have been lowered. | |
277 | self.fail( | |
278 | location, | |
279 | format!("{:?} have been lowered to field assignments", rvalue), | |
280 | ) | |
281 | } else if self.mir_phase > MirPhase::GeneratorLowering { | |
282 | // No more aggregates after drop and generator lowering. | |
283 | self.fail( | |
284 | location, | |
285 | format!("{:?} have been lowered to field assignments", rvalue), | |
286 | ) | |
287 | } | |
288 | } | |
1b1a35ee XL |
289 | Rvalue::Ref(_, BorrowKind::Shallow, _) => { |
290 | if self.mir_phase > MirPhase::DropLowering { | |
291 | self.fail( | |
292 | location, | |
293 | "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase", | |
294 | ); | |
295 | } | |
296 | } | |
f035d41b XL |
297 | _ => {} |
298 | } | |
299 | } | |
1b1a35ee XL |
300 | StatementKind::AscribeUserType(..) => { |
301 | if self.mir_phase > MirPhase::DropLowering { | |
302 | self.fail( | |
303 | location, | |
304 | "`AscribeUserType` should have been removed after drop lowering phase", | |
305 | ); | |
306 | } | |
307 | } | |
308 | StatementKind::FakeRead(..) => { | |
309 | if self.mir_phase > MirPhase::DropLowering { | |
310 | self.fail( | |
311 | location, | |
312 | "`FakeRead` should have been removed after drop lowering phase", | |
313 | ); | |
314 | } | |
315 | } | |
6a06907d XL |
316 | StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping { |
317 | ref src, | |
318 | ref dst, | |
319 | ref count, | |
320 | }) => { | |
321 | let src_ty = src.ty(&self.body.local_decls, self.tcx); | |
322 | let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) { | |
323 | src_deref.ty | |
324 | } else { | |
325 | self.fail( | |
326 | location, | |
327 | format!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty), | |
328 | ); | |
329 | return; | |
330 | }; | |
331 | let dst_ty = dst.ty(&self.body.local_decls, self.tcx); | |
332 | let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) { | |
333 | dst_deref.ty | |
334 | } else { | |
335 | self.fail( | |
336 | location, | |
337 | format!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty), | |
338 | ); | |
339 | return; | |
340 | }; | |
341 | // since CopyNonOverlapping is parametrized by 1 type, | |
342 | // we only need to check that they are equal and not keep an extra parameter. | |
343 | if op_src_ty != op_dst_ty { | |
344 | self.fail(location, format!("bad arg ({:?} != {:?})", op_src_ty, op_dst_ty)); | |
345 | } | |
346 | ||
347 | let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx); | |
348 | if op_cnt_ty != self.tcx.types.usize { | |
349 | self.fail(location, format!("bad arg ({:?} != usize)", op_cnt_ty)) | |
350 | } | |
351 | } | |
352 | StatementKind::SetDiscriminant { .. } | |
353 | | StatementKind::StorageLive(..) | |
354 | | StatementKind::StorageDead(..) | |
355 | | StatementKind::LlvmInlineAsm(..) | |
356 | | StatementKind::Retag(_, _) | |
357 | | StatementKind::Coverage(_) | |
358 | | StatementKind::Nop => {} | |
f035d41b | 359 | } |
29967ef6 XL |
360 | |
361 | self.super_statement(statement, location); | |
f035d41b XL |
362 | } |
363 | ||
364 | fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { | |
365 | match &terminator.kind { | |
366 | TerminatorKind::Goto { target } => { | |
367 | self.check_edge(location, *target, EdgeKind::Normal); | |
368 | } | |
29967ef6 | 369 | TerminatorKind::SwitchInt { targets, switch_ty, discr } => { |
f035d41b XL |
370 | let ty = discr.ty(&self.body.local_decls, self.tcx); |
371 | if ty != *switch_ty { | |
372 | self.fail( | |
373 | location, | |
374 | format!( | |
375 | "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}", | |
376 | ty, switch_ty, | |
377 | ), | |
378 | ); | |
379 | } | |
29967ef6 XL |
380 | |
381 | let target_width = self.tcx.sess.target.pointer_width; | |
382 | ||
383 | let size = Size::from_bits(match switch_ty.kind() { | |
384 | ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(), | |
385 | ty::Int(int) => int.normalize(target_width).bit_width().unwrap(), | |
386 | ty::Char => 32, | |
387 | ty::Bool => 1, | |
388 | other => bug!("unhandled type: {:?}", other), | |
389 | }); | |
390 | ||
391 | for (value, target) in targets.iter() { | |
392 | if Scalar::<()>::try_from_uint(value, size).is_none() { | |
393 | self.fail( | |
394 | location, | |
395 | format!("the value {:#x} is not a proper {:?}", value, switch_ty), | |
396 | ) | |
397 | } | |
398 | ||
399 | self.check_edge(location, target, EdgeKind::Normal); | |
f035d41b | 400 | } |
29967ef6 | 401 | self.check_edge(location, targets.otherwise(), EdgeKind::Normal); |
f035d41b XL |
402 | } |
403 | TerminatorKind::Drop { target, unwind, .. } => { | |
404 | self.check_edge(location, *target, EdgeKind::Normal); | |
405 | if let Some(unwind) = unwind { | |
406 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
407 | } | |
408 | } | |
409 | TerminatorKind::DropAndReplace { target, unwind, .. } => { | |
3dfed10e XL |
410 | if self.mir_phase > MirPhase::DropLowering { |
411 | self.fail( | |
412 | location, | |
413 | "`DropAndReplace` is not permitted to exist after drop elaboration", | |
414 | ); | |
415 | } | |
f035d41b XL |
416 | self.check_edge(location, *target, EdgeKind::Normal); |
417 | if let Some(unwind) = unwind { | |
418 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
419 | } | |
420 | } | |
29967ef6 | 421 | TerminatorKind::Call { func, args, destination, cleanup, .. } => { |
f035d41b | 422 | let func_ty = func.ty(&self.body.local_decls, self.tcx); |
1b1a35ee | 423 | match func_ty.kind() { |
f035d41b XL |
424 | ty::FnPtr(..) | ty::FnDef(..) => {} |
425 | _ => self.fail( | |
426 | location, | |
427 | format!("encountered non-callable type {} in `Call` terminator", func_ty), | |
428 | ), | |
429 | } | |
430 | if let Some((_, target)) = destination { | |
431 | self.check_edge(location, *target, EdgeKind::Normal); | |
432 | } | |
433 | if let Some(cleanup) = cleanup { | |
434 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
435 | } | |
29967ef6 XL |
436 | |
437 | // The call destination place and Operand::Move place used as an argument might be | |
438 | // passed by a reference to the callee. Consequently they must be non-overlapping. | |
439 | // Currently this simply checks for duplicate places. | |
440 | self.place_cache.clear(); | |
441 | if let Some((destination, _)) = destination { | |
442 | self.place_cache.push(destination.as_ref()); | |
443 | } | |
444 | for arg in args { | |
445 | if let Operand::Move(place) = arg { | |
446 | self.place_cache.push(place.as_ref()); | |
447 | } | |
448 | } | |
449 | let all_len = self.place_cache.len(); | |
450 | self.place_cache.sort_unstable(); | |
451 | self.place_cache.dedup(); | |
452 | let has_duplicates = all_len != self.place_cache.len(); | |
453 | if has_duplicates { | |
454 | self.fail( | |
455 | location, | |
456 | format!( | |
457 | "encountered overlapping memory in `Call` terminator: {:?}", | |
458 | terminator.kind, | |
459 | ), | |
460 | ); | |
461 | } | |
f035d41b XL |
462 | } |
463 | TerminatorKind::Assert { cond, target, cleanup, .. } => { | |
464 | let cond_ty = cond.ty(&self.body.local_decls, self.tcx); | |
465 | if cond_ty != self.tcx.types.bool { | |
466 | self.fail( | |
467 | location, | |
468 | format!( | |
469 | "encountered non-boolean condition of type {} in `Assert` terminator", | |
470 | cond_ty | |
471 | ), | |
472 | ); | |
473 | } | |
474 | self.check_edge(location, *target, EdgeKind::Normal); | |
475 | if let Some(cleanup) = cleanup { | |
476 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
477 | } | |
478 | } | |
479 | TerminatorKind::Yield { resume, drop, .. } => { | |
3dfed10e XL |
480 | if self.mir_phase > MirPhase::GeneratorLowering { |
481 | self.fail(location, "`Yield` should have been replaced by generator lowering"); | |
482 | } | |
f035d41b XL |
483 | self.check_edge(location, *resume, EdgeKind::Normal); |
484 | if let Some(drop) = drop { | |
485 | self.check_edge(location, *drop, EdgeKind::Normal); | |
486 | } | |
487 | } | |
488 | TerminatorKind::FalseEdge { real_target, imaginary_target } => { | |
489 | self.check_edge(location, *real_target, EdgeKind::Normal); | |
490 | self.check_edge(location, *imaginary_target, EdgeKind::Normal); | |
491 | } | |
492 | TerminatorKind::FalseUnwind { real_target, unwind } => { | |
493 | self.check_edge(location, *real_target, EdgeKind::Normal); | |
494 | if let Some(unwind) = unwind { | |
495 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
496 | } | |
497 | } | |
a2a8927a | 498 | TerminatorKind::InlineAsm { destination, cleanup, .. } => { |
f035d41b XL |
499 | if let Some(destination) = destination { |
500 | self.check_edge(location, *destination, EdgeKind::Normal); | |
f9f354fc | 501 | } |
a2a8927a XL |
502 | if let Some(cleanup) = cleanup { |
503 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
504 | } | |
f9f354fc | 505 | } |
f035d41b XL |
506 | // Nothing to validate for these. |
507 | TerminatorKind::Resume | |
508 | | TerminatorKind::Abort | |
509 | | TerminatorKind::Return | |
510 | | TerminatorKind::Unreachable | |
511 | | TerminatorKind::GeneratorDrop => {} | |
f9f354fc | 512 | } |
29967ef6 XL |
513 | |
514 | self.super_terminator(terminator, location); | |
515 | } | |
516 | ||
517 | fn visit_source_scope(&mut self, scope: &SourceScope) { | |
518 | if self.body.source_scopes.get(*scope).is_none() { | |
519 | self.tcx.sess.diagnostic().delay_span_bug( | |
520 | self.body.span, | |
521 | &format!( | |
522 | "broken MIR in {:?} ({}):\ninvalid source scope {:?}", | |
523 | self.body.source.instance, self.when, scope, | |
524 | ), | |
525 | ); | |
526 | } | |
f9f354fc XL |
527 | } |
528 | } |