1 //! Validates the MIR to ensure that invariants are upheld.
3 use crate::dataflow
::impls
::MaybeStorageLive
;
4 use crate::dataflow
::{Analysis, ResultsCursor}
;
5 use crate::util
::storage
::AlwaysLiveLocals
;
8 use rustc_index
::bit_set
::BitSet
;
9 use rustc_infer
::infer
::TyCtxtInferExt
;
10 use rustc_middle
::mir
::interpret
::Scalar
;
11 use rustc_middle
::mir
::traversal
;
12 use rustc_middle
::mir
::visit
::{PlaceContext, Visitor}
;
13 use rustc_middle
::mir
::{
14 AggregateKind
, BasicBlock
, Body
, BorrowKind
, Local
, Location
, MirPhase
, Operand
, PlaceElem
,
15 PlaceRef
, ProjectionElem
, Rvalue
, SourceScope
, Statement
, StatementKind
, Terminator
,
18 use rustc_middle
::ty
::fold
::BottomUpFolder
;
19 use rustc_middle
::ty
::{self, ParamEnv, Ty, TyCtxt, TypeFoldable}
;
20 use rustc_target
::abi
::Size
;
22 #[derive(Copy, Clone, Debug)]
28 pub struct Validator
{
29 /// Describes at which point in the pipeline this validation is happening.
31 /// The phase for which we are upholding the dialect. If the given phase forbids a specific
32 /// element, this validator will now emit errors if that specific element is encountered.
33 /// Note that phases that change the dialect cause all *following* phases to check the
34 /// invariants of the new dialect. A phase that changes dialects never checks the new invariants
36 pub mir_phase
: MirPhase
,
39 impl<'tcx
> MirPass
<'tcx
> for Validator
{
40 fn run_pass(&self, tcx
: TyCtxt
<'tcx
>, body
: &mut Body
<'tcx
>) {
41 let def_id
= body
.source
.def_id();
42 let param_env
= tcx
.param_env(def_id
);
43 let mir_phase
= self.mir_phase
;
45 let always_live_locals
= AlwaysLiveLocals
::new(body
);
46 let storage_liveness
= MaybeStorageLive
::new(always_live_locals
)
47 .into_engine(tcx
, body
)
48 .iterate_to_fixpoint()
49 .into_results_cursor(body
);
57 reachable_blocks
: traversal
::reachable_as_bitset(body
),
59 place_cache
: Vec
::new(),
65 /// Returns whether the two types are equal up to lifetimes.
66 /// All lifetimes, including higher-ranked ones, get ignored for this comparison.
67 /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.)
69 /// The point of this function is to approximate "equal up to subtyping". However,
70 /// the approximation is incorrect as variance is ignored.
71 pub fn equal_up_to_regions(
73 param_env
: ParamEnv
<'tcx
>,
82 // Normalize lifetimes away on both sides, then compare.
83 let param_env
= param_env
.with_reveal_all_normalized(tcx
);
84 let normalize
= |ty
: Ty
<'tcx
>| {
85 tcx
.normalize_erasing_regions(
87 ty
.fold_with(&mut BottomUpFolder
{
89 // FIXME: We erase all late-bound lifetimes, but this is not fully correct.
90 // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`,
91 // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`,
92 // since one may have an `impl SomeTrait for fn(&32)` and
93 // `impl SomeTrait for fn(&'static u32)` at the same time which
94 // specify distinct values for Assoc. (See also #56105)
95 lt_op
: |_
| tcx
.lifetimes
.re_erased
,
96 // Leave consts and types unchanged.
102 tcx
.infer_ctxt().enter(|infcx
| infcx
.can_eq(param_env
, normalize(src
), normalize(dest
)).is_ok())
105 struct TypeChecker
<'a
, 'tcx
> {
107 body
: &'a Body
<'tcx
>,
109 param_env
: ParamEnv
<'tcx
>,
111 reachable_blocks
: BitSet
<BasicBlock
>,
112 storage_liveness
: ResultsCursor
<'a
, 'tcx
, MaybeStorageLive
>,
113 place_cache
: Vec
<PlaceRef
<'tcx
>>,
116 impl<'a
, 'tcx
> TypeChecker
<'a
, 'tcx
> {
117 fn fail(&self, location
: Location
, msg
: impl AsRef
<str>) {
118 let span
= self.body
.source_info(location
).span
;
119 // We use `delay_span_bug` as we might see broken MIR when other errors have already
121 self.tcx
.sess
.diagnostic().delay_span_bug(
124 "broken MIR in {:?} ({}) at {:?}:\n{}",
125 self.body
.source
.instance
,
133 fn check_edge(&self, location
: Location
, bb
: BasicBlock
, edge_kind
: EdgeKind
) {
134 if let Some(bb
) = self.body
.basic_blocks().get(bb
) {
135 let src
= self.body
.basic_blocks().get(location
.block
).unwrap();
136 match (src
.is_cleanup
, bb
.is_cleanup
, edge_kind
) {
137 // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
138 (false, false, EdgeKind
::Normal
)
139 // Non-cleanup blocks can jump to cleanup blocks along unwind edges
140 | (false, true, EdgeKind
::Unwind
)
141 // Cleanup blocks can jump to cleanup blocks along non-unwind edges
142 | (true, true, EdgeKind
::Normal
) => {}
143 // All other jumps are invalid
148 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
158 self.fail(location
, format
!("encountered jump to invalid basic block {:?}", bb
))
162 /// Check if src can be assigned into dest.
163 /// This is not precise, it will accept some incorrect assignments.
164 fn mir_assign_valid_types(&self, src
: Ty
<'tcx
>, dest
: Ty
<'tcx
>) -> bool
{
165 // Fast path before we normalize.
167 // Equal types, all is good.
170 // Normalize projections and things like that.
171 // FIXME: We need to reveal_all, as some optimizations change types in ways
172 // that require unfolding opaque types.
173 let param_env
= self.param_env
.with_reveal_all_normalized(self.tcx
);
174 let src
= self.tcx
.normalize_erasing_regions(param_env
, src
);
175 let dest
= self.tcx
.normalize_erasing_regions(param_env
, dest
);
177 // Type-changing assignments can happen when subtyping is used. While
178 // all normal lifetimes are erased, higher-ranked types with their
179 // late-bound lifetimes are still around and can lead to type
180 // differences. So we compare ignoring lifetimes.
181 equal_up_to_regions(self.tcx
, param_env
, src
, dest
)
185 impl<'a
, 'tcx
> Visitor
<'tcx
> for TypeChecker
<'a
, 'tcx
> {
186 fn visit_local(&mut self, local
: &Local
, context
: PlaceContext
, location
: Location
) {
187 if self.body
.local_decls
.get(*local
).is_none() {
190 format
!("local {:?} has no corresponding declaration in `body.local_decls`", local
),
194 if self.reachable_blocks
.contains(location
.block
) && context
.is_use() {
195 // Uses of locals must occur while the local's storage is allocated.
196 self.storage_liveness
.seek_after_primary_effect(location
);
197 let locals_with_storage
= self.storage_liveness
.get();
198 if !locals_with_storage
.contains(*local
) {
199 self.fail(location
, format
!("use of local {:?}, which has no storage here", local
));
204 fn visit_operand(&mut self, operand
: &Operand
<'tcx
>, location
: Location
) {
205 // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed.
206 if self.tcx
.sess
.opts
.debugging_opts
.validate_mir
{
207 // `Operand::Copy` is only supposed to be used with `Copy` types.
208 if let Operand
::Copy(place
) = operand
{
209 let ty
= place
.ty(&self.body
.local_decls
, self.tcx
).ty
;
210 let span
= self.body
.source_info(location
).span
;
212 if !ty
.is_copy_modulo_regions(self.tcx
.at(span
), self.param_env
) {
213 self.fail(location
, format
!("`Operand::Copy` with non-`Copy` type {}", ty
));
218 self.super_operand(operand
, location
);
221 fn visit_projection_elem(
224 proj_base
: &[PlaceElem
<'tcx
>],
225 elem
: PlaceElem
<'tcx
>,
226 context
: PlaceContext
,
229 if let ProjectionElem
::Index(index
) = elem
{
230 let index_ty
= self.body
.local_decls
[index
].ty
;
231 if index_ty
!= self.tcx
.types
.usize {
232 self.fail(location
, format
!("bad index ({:?} != usize)", index_ty
))
235 self.super_projection_elem(local
, proj_base
, elem
, context
, location
);
238 fn visit_statement(&mut self, statement
: &Statement
<'tcx
>, location
: Location
) {
239 match &statement
.kind
{
240 StatementKind
::Assign(box (dest
, rvalue
)) => {
241 // LHS and RHS of the assignment must have the same type.
242 let left_ty
= dest
.ty(&self.body
.local_decls
, self.tcx
).ty
;
243 let right_ty
= rvalue
.ty(&self.body
.local_decls
, self.tcx
);
244 if !self.mir_assign_valid_types(right_ty
, left_ty
) {
248 "encountered `{:?}` with incompatible types:\n\
249 left-hand side has type: {}\n\
250 right-hand side has type: {}",
251 statement
.kind
, left_ty
, right_ty
,
256 // The sides of an assignment must not alias. Currently this just checks whether the places
258 Rvalue
::Use(Operand
::Copy(src
) | Operand
::Move(src
)) => {
262 "encountered `Assign` statement with overlapping memory",
266 // The deaggregator currently does not deaggreagate arrays.
267 // So for now, we ignore them here.
268 Rvalue
::Aggregate(box AggregateKind
::Array { .. }
, _
) => {}
269 // All other aggregates must be gone after some phases.
270 Rvalue
::Aggregate(box kind
, _
) => {
271 if self.mir_phase
> MirPhase
::DropLowering
272 && !matches
!(kind
, AggregateKind
::Generator(..))
274 // Generators persist until the state machine transformation, but all
275 // other aggregates must have been lowered.
278 format
!("{:?} have been lowered to field assignments", rvalue
),
280 } else if self.mir_phase
> MirPhase
::GeneratorLowering
{
281 // No more aggregates after drop and generator lowering.
284 format
!("{:?} have been lowered to field assignments", rvalue
),
288 Rvalue
::Ref(_
, BorrowKind
::Shallow
, _
) => {
289 if self.mir_phase
> MirPhase
::DropLowering
{
292 "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase",
299 StatementKind
::AscribeUserType(..) => {
300 if self.mir_phase
> MirPhase
::DropLowering
{
303 "`AscribeUserType` should have been removed after drop lowering phase",
307 StatementKind
::FakeRead(..) => {
308 if self.mir_phase
> MirPhase
::DropLowering
{
311 "`FakeRead` should have been removed after drop lowering phase",
315 StatementKind
::CopyNonOverlapping(box rustc_middle
::mir
::CopyNonOverlapping
{
320 let src_ty
= src
.ty(&self.body
.local_decls
, self.tcx
);
321 let op_src_ty
= if let Some(src_deref
) = src_ty
.builtin_deref(true) {
326 format
!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty
),
330 let dst_ty
= dst
.ty(&self.body
.local_decls
, self.tcx
);
331 let op_dst_ty
= if let Some(dst_deref
) = dst_ty
.builtin_deref(true) {
336 format
!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty
),
340 // since CopyNonOverlapping is parametrized by 1 type,
341 // we only need to check that they are equal and not keep an extra parameter.
342 if op_src_ty
!= op_dst_ty
{
343 self.fail(location
, format
!("bad arg ({:?} != {:?})", op_src_ty
, op_dst_ty
));
346 let op_cnt_ty
= count
.ty(&self.body
.local_decls
, self.tcx
);
347 if op_cnt_ty
!= self.tcx
.types
.usize {
348 self.fail(location
, format
!("bad arg ({:?} != usize)", op_cnt_ty
))
351 StatementKind
::SetDiscriminant { .. }
352 | StatementKind
::StorageLive(..)
353 | StatementKind
::StorageDead(..)
354 | StatementKind
::LlvmInlineAsm(..)
355 | StatementKind
::Retag(_
, _
)
356 | StatementKind
::Coverage(_
)
357 | StatementKind
::Nop
=> {}
360 self.super_statement(statement
, location
);
363 fn visit_terminator(&mut self, terminator
: &Terminator
<'tcx
>, location
: Location
) {
364 match &terminator
.kind
{
365 TerminatorKind
::Goto { target }
=> {
366 self.check_edge(location
, *target
, EdgeKind
::Normal
);
368 TerminatorKind
::SwitchInt { targets, switch_ty, discr }
=> {
369 let ty
= discr
.ty(&self.body
.local_decls
, self.tcx
);
370 if ty
!= *switch_ty
{
374 "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}",
380 let target_width
= self.tcx
.sess
.target
.pointer_width
;
382 let size
= Size
::from_bits(match switch_ty
.kind() {
383 ty
::Uint(uint
) => uint
.normalize(target_width
).bit_width().unwrap(),
384 ty
::Int(int
) => int
.normalize(target_width
).bit_width().unwrap(),
387 other
=> bug
!("unhandled type: {:?}", other
),
390 for (value
, target
) in targets
.iter() {
391 if Scalar
::<()>::try_from_uint(value
, size
).is_none() {
394 format
!("the value {:#x} is not a proper {:?}", value
, switch_ty
),
398 self.check_edge(location
, target
, EdgeKind
::Normal
);
400 self.check_edge(location
, targets
.otherwise(), EdgeKind
::Normal
);
402 TerminatorKind
::Drop { target, unwind, .. }
=> {
403 self.check_edge(location
, *target
, EdgeKind
::Normal
);
404 if let Some(unwind
) = unwind
{
405 self.check_edge(location
, *unwind
, EdgeKind
::Unwind
);
408 TerminatorKind
::DropAndReplace { target, unwind, .. }
=> {
409 if self.mir_phase
> MirPhase
::DropLowering
{
412 "`DropAndReplace` is not permitted to exist after drop elaboration",
415 self.check_edge(location
, *target
, EdgeKind
::Normal
);
416 if let Some(unwind
) = unwind
{
417 self.check_edge(location
, *unwind
, EdgeKind
::Unwind
);
420 TerminatorKind
::Call { func, args, destination, cleanup, .. }
=> {
421 let func_ty
= func
.ty(&self.body
.local_decls
, self.tcx
);
422 match func_ty
.kind() {
423 ty
::FnPtr(..) | ty
::FnDef(..) => {}
426 format
!("encountered non-callable type {} in `Call` terminator", func_ty
),
429 if let Some((_
, target
)) = destination
{
430 self.check_edge(location
, *target
, EdgeKind
::Normal
);
432 if let Some(cleanup
) = cleanup
{
433 self.check_edge(location
, *cleanup
, EdgeKind
::Unwind
);
436 // The call destination place and Operand::Move place used as an argument might be
437 // passed by a reference to the callee. Consequently they must be non-overlapping.
438 // Currently this simply checks for duplicate places.
439 self.place_cache
.clear();
440 if let Some((destination
, _
)) = destination
{
441 self.place_cache
.push(destination
.as_ref());
444 if let Operand
::Move(place
) = arg
{
445 self.place_cache
.push(place
.as_ref());
448 let all_len
= self.place_cache
.len();
449 self.place_cache
.sort_unstable();
450 self.place_cache
.dedup();
451 let has_duplicates
= all_len
!= self.place_cache
.len();
456 "encountered overlapping memory in `Call` terminator: {:?}",
462 TerminatorKind
::Assert { cond, target, cleanup, .. }
=> {
463 let cond_ty
= cond
.ty(&self.body
.local_decls
, self.tcx
);
464 if cond_ty
!= self.tcx
.types
.bool
{
468 "encountered non-boolean condition of type {} in `Assert` terminator",
473 self.check_edge(location
, *target
, EdgeKind
::Normal
);
474 if let Some(cleanup
) = cleanup
{
475 self.check_edge(location
, *cleanup
, EdgeKind
::Unwind
);
478 TerminatorKind
::Yield { resume, drop, .. }
=> {
479 if self.mir_phase
> MirPhase
::GeneratorLowering
{
480 self.fail(location
, "`Yield` should have been replaced by generator lowering");
482 self.check_edge(location
, *resume
, EdgeKind
::Normal
);
483 if let Some(drop
) = drop
{
484 self.check_edge(location
, *drop
, EdgeKind
::Normal
);
487 TerminatorKind
::FalseEdge { real_target, imaginary_target }
=> {
488 self.check_edge(location
, *real_target
, EdgeKind
::Normal
);
489 self.check_edge(location
, *imaginary_target
, EdgeKind
::Normal
);
491 TerminatorKind
::FalseUnwind { real_target, unwind }
=> {
492 self.check_edge(location
, *real_target
, EdgeKind
::Normal
);
493 if let Some(unwind
) = unwind
{
494 self.check_edge(location
, *unwind
, EdgeKind
::Unwind
);
497 TerminatorKind
::InlineAsm { destination, .. }
=> {
498 if let Some(destination
) = destination
{
499 self.check_edge(location
, *destination
, EdgeKind
::Normal
);
502 // Nothing to validate for these.
503 TerminatorKind
::Resume
504 | TerminatorKind
::Abort
505 | TerminatorKind
::Return
506 | TerminatorKind
::Unreachable
507 | TerminatorKind
::GeneratorDrop
=> {}
510 self.super_terminator(terminator
, location
);
513 fn visit_source_scope(&mut self, scope
: &SourceScope
) {
514 if self.body
.source_scopes
.get(*scope
).is_none() {
515 self.tcx
.sess
.diagnostic().delay_span_bug(
518 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
519 self.body
.source
.instance
, self.when
, scope
,