]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir/src/transform/validate.rs
New upstream version 1.54.0+dfsg1
[rustc.git] / compiler / rustc_mir / src / transform / validate.rs
1 //! Validates the MIR to ensure that invariants are upheld.
2
3 use crate::dataflow::impls::MaybeStorageLive;
4 use crate::dataflow::{Analysis, ResultsCursor};
5 use crate::util::storage::AlwaysLiveLocals;
6
7 use super::MirPass;
8 use rustc_index::bit_set::BitSet;
9 use rustc_infer::infer::TyCtxtInferExt;
10 use rustc_middle::mir::interpret::Scalar;
11 use rustc_middle::mir::traversal;
12 use rustc_middle::mir::visit::{PlaceContext, Visitor};
13 use rustc_middle::mir::{
14 AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPhase, Operand, PlaceElem,
15 PlaceRef, ProjectionElem, Rvalue, SourceScope, Statement, StatementKind, Terminator,
16 TerminatorKind,
17 };
18 use rustc_middle::ty::fold::BottomUpFolder;
19 use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, TypeFoldable};
20 use rustc_target::abi::Size;
21
22 #[derive(Copy, Clone, Debug)]
23 enum EdgeKind {
24 Unwind,
25 Normal,
26 }
27
28 pub struct Validator {
29 /// Describes at which point in the pipeline this validation is happening.
30 pub when: String,
31 /// The phase for which we are upholding the dialect. If the given phase forbids a specific
32 /// element, this validator will now emit errors if that specific element is encountered.
33 /// Note that phases that change the dialect cause all *following* phases to check the
34 /// invariants of the new dialect. A phase that changes dialects never checks the new invariants
35 /// itself.
36 pub mir_phase: MirPhase,
37 }
38
39 impl<'tcx> MirPass<'tcx> for Validator {
40 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
41 let def_id = body.source.def_id();
42 let param_env = tcx.param_env(def_id);
43 let mir_phase = self.mir_phase;
44
45 let always_live_locals = AlwaysLiveLocals::new(body);
46 let storage_liveness = MaybeStorageLive::new(always_live_locals)
47 .into_engine(tcx, body)
48 .iterate_to_fixpoint()
49 .into_results_cursor(body);
50
51 TypeChecker {
52 when: &self.when,
53 body,
54 tcx,
55 param_env,
56 mir_phase,
57 reachable_blocks: traversal::reachable_as_bitset(body),
58 storage_liveness,
59 place_cache: Vec::new(),
60 }
61 .visit_body(body);
62 }
63 }
64
65 /// Returns whether the two types are equal up to lifetimes.
66 /// All lifetimes, including higher-ranked ones, get ignored for this comparison.
67 /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.)
68 ///
69 /// The point of this function is to approximate "equal up to subtyping". However,
70 /// the approximation is incorrect as variance is ignored.
71 pub fn equal_up_to_regions(
72 tcx: TyCtxt<'tcx>,
73 param_env: ParamEnv<'tcx>,
74 src: Ty<'tcx>,
75 dest: Ty<'tcx>,
76 ) -> bool {
77 // Fast path.
78 if src == dest {
79 return true;
80 }
81
82 // Normalize lifetimes away on both sides, then compare.
83 let param_env = param_env.with_reveal_all_normalized(tcx);
84 let normalize = |ty: Ty<'tcx>| {
85 tcx.normalize_erasing_regions(
86 param_env,
87 ty.fold_with(&mut BottomUpFolder {
88 tcx,
89 // FIXME: We erase all late-bound lifetimes, but this is not fully correct.
90 // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`,
91 // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`,
92 // since one may have an `impl SomeTrait for fn(&32)` and
93 // `impl SomeTrait for fn(&'static u32)` at the same time which
94 // specify distinct values for Assoc. (See also #56105)
95 lt_op: |_| tcx.lifetimes.re_erased,
96 // Leave consts and types unchanged.
97 ct_op: |ct| ct,
98 ty_op: |ty| ty,
99 }),
100 )
101 };
102 tcx.infer_ctxt().enter(|infcx| infcx.can_eq(param_env, normalize(src), normalize(dest)).is_ok())
103 }
104
105 struct TypeChecker<'a, 'tcx> {
106 when: &'a str,
107 body: &'a Body<'tcx>,
108 tcx: TyCtxt<'tcx>,
109 param_env: ParamEnv<'tcx>,
110 mir_phase: MirPhase,
111 reachable_blocks: BitSet<BasicBlock>,
112 storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>,
113 place_cache: Vec<PlaceRef<'tcx>>,
114 }
115
116 impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
117 fn fail(&self, location: Location, msg: impl AsRef<str>) {
118 let span = self.body.source_info(location).span;
119 // We use `delay_span_bug` as we might see broken MIR when other errors have already
120 // occurred.
121 self.tcx.sess.diagnostic().delay_span_bug(
122 span,
123 &format!(
124 "broken MIR in {:?} ({}) at {:?}:\n{}",
125 self.body.source.instance,
126 self.when,
127 location,
128 msg.as_ref()
129 ),
130 );
131 }
132
133 fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
134 if let Some(bb) = self.body.basic_blocks().get(bb) {
135 let src = self.body.basic_blocks().get(location.block).unwrap();
136 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
137 // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
138 (false, false, EdgeKind::Normal)
139 // Non-cleanup blocks can jump to cleanup blocks along unwind edges
140 | (false, true, EdgeKind::Unwind)
141 // Cleanup blocks can jump to cleanup blocks along non-unwind edges
142 | (true, true, EdgeKind::Normal) => {}
143 // All other jumps are invalid
144 _ => {
145 self.fail(
146 location,
147 format!(
148 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
149 edge_kind,
150 bb,
151 src.is_cleanup,
152 bb.is_cleanup,
153 )
154 )
155 }
156 }
157 } else {
158 self.fail(location, format!("encountered jump to invalid basic block {:?}", bb))
159 }
160 }
161
162 /// Check if src can be assigned into dest.
163 /// This is not precise, it will accept some incorrect assignments.
164 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
165 // Fast path before we normalize.
166 if src == dest {
167 // Equal types, all is good.
168 return true;
169 }
170 // Normalize projections and things like that.
171 // FIXME: We need to reveal_all, as some optimizations change types in ways
172 // that require unfolding opaque types.
173 let param_env = self.param_env.with_reveal_all_normalized(self.tcx);
174 let src = self.tcx.normalize_erasing_regions(param_env, src);
175 let dest = self.tcx.normalize_erasing_regions(param_env, dest);
176
177 // Type-changing assignments can happen when subtyping is used. While
178 // all normal lifetimes are erased, higher-ranked types with their
179 // late-bound lifetimes are still around and can lead to type
180 // differences. So we compare ignoring lifetimes.
181 equal_up_to_regions(self.tcx, param_env, src, dest)
182 }
183 }
184
185 impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
186 fn visit_local(&mut self, local: &Local, context: PlaceContext, location: Location) {
187 if self.body.local_decls.get(*local).is_none() {
188 self.fail(
189 location,
190 format!("local {:?} has no corresponding declaration in `body.local_decls`", local),
191 );
192 }
193
194 if self.reachable_blocks.contains(location.block) && context.is_use() {
195 // Uses of locals must occur while the local's storage is allocated.
196 self.storage_liveness.seek_after_primary_effect(location);
197 let locals_with_storage = self.storage_liveness.get();
198 if !locals_with_storage.contains(*local) {
199 self.fail(location, format!("use of local {:?}, which has no storage here", local));
200 }
201 }
202 }
203
204 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
205 // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed.
206 if self.tcx.sess.opts.debugging_opts.validate_mir {
207 // `Operand::Copy` is only supposed to be used with `Copy` types.
208 if let Operand::Copy(place) = operand {
209 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
210 let span = self.body.source_info(location).span;
211
212 if !ty.is_copy_modulo_regions(self.tcx.at(span), self.param_env) {
213 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {}", ty));
214 }
215 }
216 }
217
218 self.super_operand(operand, location);
219 }
220
221 fn visit_projection_elem(
222 &mut self,
223 local: Local,
224 proj_base: &[PlaceElem<'tcx>],
225 elem: PlaceElem<'tcx>,
226 context: PlaceContext,
227 location: Location,
228 ) {
229 if let ProjectionElem::Index(index) = elem {
230 let index_ty = self.body.local_decls[index].ty;
231 if index_ty != self.tcx.types.usize {
232 self.fail(location, format!("bad index ({:?} != usize)", index_ty))
233 }
234 }
235 self.super_projection_elem(local, proj_base, elem, context, location);
236 }
237
238 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
239 match &statement.kind {
240 StatementKind::Assign(box (dest, rvalue)) => {
241 // LHS and RHS of the assignment must have the same type.
242 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
243 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
244 if !self.mir_assign_valid_types(right_ty, left_ty) {
245 self.fail(
246 location,
247 format!(
248 "encountered `{:?}` with incompatible types:\n\
249 left-hand side has type: {}\n\
250 right-hand side has type: {}",
251 statement.kind, left_ty, right_ty,
252 ),
253 );
254 }
255 match rvalue {
256 // The sides of an assignment must not alias. Currently this just checks whether the places
257 // are identical.
258 Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) => {
259 if dest == src {
260 self.fail(
261 location,
262 "encountered `Assign` statement with overlapping memory",
263 );
264 }
265 }
266 // The deaggregator currently does not deaggreagate arrays.
267 // So for now, we ignore them here.
268 Rvalue::Aggregate(box AggregateKind::Array { .. }, _) => {}
269 // All other aggregates must be gone after some phases.
270 Rvalue::Aggregate(box kind, _) => {
271 if self.mir_phase > MirPhase::DropLowering
272 && !matches!(kind, AggregateKind::Generator(..))
273 {
274 // Generators persist until the state machine transformation, but all
275 // other aggregates must have been lowered.
276 self.fail(
277 location,
278 format!("{:?} have been lowered to field assignments", rvalue),
279 )
280 } else if self.mir_phase > MirPhase::GeneratorLowering {
281 // No more aggregates after drop and generator lowering.
282 self.fail(
283 location,
284 format!("{:?} have been lowered to field assignments", rvalue),
285 )
286 }
287 }
288 Rvalue::Ref(_, BorrowKind::Shallow, _) => {
289 if self.mir_phase > MirPhase::DropLowering {
290 self.fail(
291 location,
292 "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase",
293 );
294 }
295 }
296 _ => {}
297 }
298 }
299 StatementKind::AscribeUserType(..) => {
300 if self.mir_phase > MirPhase::DropLowering {
301 self.fail(
302 location,
303 "`AscribeUserType` should have been removed after drop lowering phase",
304 );
305 }
306 }
307 StatementKind::FakeRead(..) => {
308 if self.mir_phase > MirPhase::DropLowering {
309 self.fail(
310 location,
311 "`FakeRead` should have been removed after drop lowering phase",
312 );
313 }
314 }
315 StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping {
316 ref src,
317 ref dst,
318 ref count,
319 }) => {
320 let src_ty = src.ty(&self.body.local_decls, self.tcx);
321 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
322 src_deref.ty
323 } else {
324 self.fail(
325 location,
326 format!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty),
327 );
328 return;
329 };
330 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
331 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
332 dst_deref.ty
333 } else {
334 self.fail(
335 location,
336 format!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty),
337 );
338 return;
339 };
340 // since CopyNonOverlapping is parametrized by 1 type,
341 // we only need to check that they are equal and not keep an extra parameter.
342 if op_src_ty != op_dst_ty {
343 self.fail(location, format!("bad arg ({:?} != {:?})", op_src_ty, op_dst_ty));
344 }
345
346 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
347 if op_cnt_ty != self.tcx.types.usize {
348 self.fail(location, format!("bad arg ({:?} != usize)", op_cnt_ty))
349 }
350 }
351 StatementKind::SetDiscriminant { .. }
352 | StatementKind::StorageLive(..)
353 | StatementKind::StorageDead(..)
354 | StatementKind::LlvmInlineAsm(..)
355 | StatementKind::Retag(_, _)
356 | StatementKind::Coverage(_)
357 | StatementKind::Nop => {}
358 }
359
360 self.super_statement(statement, location);
361 }
362
363 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
364 match &terminator.kind {
365 TerminatorKind::Goto { target } => {
366 self.check_edge(location, *target, EdgeKind::Normal);
367 }
368 TerminatorKind::SwitchInt { targets, switch_ty, discr } => {
369 let ty = discr.ty(&self.body.local_decls, self.tcx);
370 if ty != *switch_ty {
371 self.fail(
372 location,
373 format!(
374 "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}",
375 ty, switch_ty,
376 ),
377 );
378 }
379
380 let target_width = self.tcx.sess.target.pointer_width;
381
382 let size = Size::from_bits(match switch_ty.kind() {
383 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
384 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
385 ty::Char => 32,
386 ty::Bool => 1,
387 other => bug!("unhandled type: {:?}", other),
388 });
389
390 for (value, target) in targets.iter() {
391 if Scalar::<()>::try_from_uint(value, size).is_none() {
392 self.fail(
393 location,
394 format!("the value {:#x} is not a proper {:?}", value, switch_ty),
395 )
396 }
397
398 self.check_edge(location, target, EdgeKind::Normal);
399 }
400 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
401 }
402 TerminatorKind::Drop { target, unwind, .. } => {
403 self.check_edge(location, *target, EdgeKind::Normal);
404 if let Some(unwind) = unwind {
405 self.check_edge(location, *unwind, EdgeKind::Unwind);
406 }
407 }
408 TerminatorKind::DropAndReplace { target, unwind, .. } => {
409 if self.mir_phase > MirPhase::DropLowering {
410 self.fail(
411 location,
412 "`DropAndReplace` is not permitted to exist after drop elaboration",
413 );
414 }
415 self.check_edge(location, *target, EdgeKind::Normal);
416 if let Some(unwind) = unwind {
417 self.check_edge(location, *unwind, EdgeKind::Unwind);
418 }
419 }
420 TerminatorKind::Call { func, args, destination, cleanup, .. } => {
421 let func_ty = func.ty(&self.body.local_decls, self.tcx);
422 match func_ty.kind() {
423 ty::FnPtr(..) | ty::FnDef(..) => {}
424 _ => self.fail(
425 location,
426 format!("encountered non-callable type {} in `Call` terminator", func_ty),
427 ),
428 }
429 if let Some((_, target)) = destination {
430 self.check_edge(location, *target, EdgeKind::Normal);
431 }
432 if let Some(cleanup) = cleanup {
433 self.check_edge(location, *cleanup, EdgeKind::Unwind);
434 }
435
436 // The call destination place and Operand::Move place used as an argument might be
437 // passed by a reference to the callee. Consequently they must be non-overlapping.
438 // Currently this simply checks for duplicate places.
439 self.place_cache.clear();
440 if let Some((destination, _)) = destination {
441 self.place_cache.push(destination.as_ref());
442 }
443 for arg in args {
444 if let Operand::Move(place) = arg {
445 self.place_cache.push(place.as_ref());
446 }
447 }
448 let all_len = self.place_cache.len();
449 self.place_cache.sort_unstable();
450 self.place_cache.dedup();
451 let has_duplicates = all_len != self.place_cache.len();
452 if has_duplicates {
453 self.fail(
454 location,
455 format!(
456 "encountered overlapping memory in `Call` terminator: {:?}",
457 terminator.kind,
458 ),
459 );
460 }
461 }
462 TerminatorKind::Assert { cond, target, cleanup, .. } => {
463 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
464 if cond_ty != self.tcx.types.bool {
465 self.fail(
466 location,
467 format!(
468 "encountered non-boolean condition of type {} in `Assert` terminator",
469 cond_ty
470 ),
471 );
472 }
473 self.check_edge(location, *target, EdgeKind::Normal);
474 if let Some(cleanup) = cleanup {
475 self.check_edge(location, *cleanup, EdgeKind::Unwind);
476 }
477 }
478 TerminatorKind::Yield { resume, drop, .. } => {
479 if self.mir_phase > MirPhase::GeneratorLowering {
480 self.fail(location, "`Yield` should have been replaced by generator lowering");
481 }
482 self.check_edge(location, *resume, EdgeKind::Normal);
483 if let Some(drop) = drop {
484 self.check_edge(location, *drop, EdgeKind::Normal);
485 }
486 }
487 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
488 self.check_edge(location, *real_target, EdgeKind::Normal);
489 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
490 }
491 TerminatorKind::FalseUnwind { real_target, unwind } => {
492 self.check_edge(location, *real_target, EdgeKind::Normal);
493 if let Some(unwind) = unwind {
494 self.check_edge(location, *unwind, EdgeKind::Unwind);
495 }
496 }
497 TerminatorKind::InlineAsm { destination, .. } => {
498 if let Some(destination) = destination {
499 self.check_edge(location, *destination, EdgeKind::Normal);
500 }
501 }
502 // Nothing to validate for these.
503 TerminatorKind::Resume
504 | TerminatorKind::Abort
505 | TerminatorKind::Return
506 | TerminatorKind::Unreachable
507 | TerminatorKind::GeneratorDrop => {}
508 }
509
510 self.super_terminator(terminator, location);
511 }
512
513 fn visit_source_scope(&mut self, scope: &SourceScope) {
514 if self.body.source_scopes.get(*scope).is_none() {
515 self.tcx.sess.diagnostic().delay_span_bug(
516 self.body.span,
517 &format!(
518 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
519 self.body.source.instance, self.when, scope,
520 ),
521 );
522 }
523 }
524 }