1 //! Validates the MIR to ensure that invariants are upheld.
3 use rustc_index
::bit_set
::BitSet
;
4 use rustc_infer
::infer
::TyCtxtInferExt
;
5 use rustc_middle
::mir
::interpret
::Scalar
;
6 use rustc_middle
::mir
::traversal
;
7 use rustc_middle
::mir
::visit
::{PlaceContext, Visitor}
;
8 use rustc_middle
::mir
::{
9 AggregateKind
, BasicBlock
, Body
, BorrowKind
, Local
, Location
, MirPass
, MirPhase
, Operand
,
10 PlaceElem
, PlaceRef
, ProjectionElem
, Rvalue
, SourceScope
, Statement
, StatementKind
, Terminator
,
11 TerminatorKind
, START_BLOCK
,
13 use rustc_middle
::ty
::fold
::BottomUpFolder
;
14 use rustc_middle
::ty
::{self, ParamEnv, Ty, TyCtxt, TypeFoldable}
;
15 use rustc_mir_dataflow
::impls
::MaybeStorageLive
;
16 use rustc_mir_dataflow
::storage
::AlwaysLiveLocals
;
17 use rustc_mir_dataflow
::{Analysis, ResultsCursor}
;
18 use rustc_target
::abi
::Size
;
20 #[derive(Copy, Clone, Debug)]
26 pub struct Validator
{
27 /// Describes at which point in the pipeline this validation is happening.
29 /// The phase for which we are upholding the dialect. If the given phase forbids a specific
30 /// element, this validator will now emit errors if that specific element is encountered.
31 /// Note that phases that change the dialect cause all *following* phases to check the
32 /// invariants of the new dialect. A phase that changes dialects never checks the new invariants
34 pub mir_phase
: MirPhase
,
37 impl<'tcx
> MirPass
<'tcx
> for Validator
{
38 fn run_pass(&self, tcx
: TyCtxt
<'tcx
>, body
: &mut Body
<'tcx
>) {
39 let def_id
= body
.source
.def_id();
40 let param_env
= tcx
.param_env(def_id
);
41 let mir_phase
= self.mir_phase
;
43 let always_live_locals
= AlwaysLiveLocals
::new(body
);
44 let storage_liveness
= MaybeStorageLive
::new(always_live_locals
)
45 .into_engine(tcx
, body
)
46 .iterate_to_fixpoint()
47 .into_results_cursor(body
);
55 reachable_blocks
: traversal
::reachable_as_bitset(body
),
57 place_cache
: Vec
::new(),
58 value_cache
: Vec
::new(),
64 /// Returns whether the two types are equal up to lifetimes.
65 /// All lifetimes, including higher-ranked ones, get ignored for this comparison.
66 /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.)
68 /// The point of this function is to approximate "equal up to subtyping". However,
69 /// the approximation is incorrect as variance is ignored.
70 pub fn equal_up_to_regions
<'tcx
>(
72 param_env
: ParamEnv
<'tcx
>,
81 // Normalize lifetimes away on both sides, then compare.
82 let param_env
= param_env
.with_reveal_all_normalized(tcx
);
83 let normalize
= |ty
: Ty
<'tcx
>| {
84 tcx
.normalize_erasing_regions(
86 ty
.fold_with(&mut BottomUpFolder
{
88 // FIXME: We erase all late-bound lifetimes, but this is not fully correct.
89 // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`,
90 // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`,
91 // since one may have an `impl SomeTrait for fn(&32)` and
92 // `impl SomeTrait for fn(&'static u32)` at the same time which
93 // specify distinct values for Assoc. (See also #56105)
94 lt_op
: |_
| tcx
.lifetimes
.re_erased
,
95 // Leave consts and types unchanged.
101 tcx
.infer_ctxt().enter(|infcx
| infcx
.can_eq(param_env
, normalize(src
), normalize(dest
)).is_ok())
104 struct TypeChecker
<'a
, 'tcx
> {
106 body
: &'a Body
<'tcx
>,
108 param_env
: ParamEnv
<'tcx
>,
110 reachable_blocks
: BitSet
<BasicBlock
>,
111 storage_liveness
: ResultsCursor
<'a
, 'tcx
, MaybeStorageLive
>,
112 place_cache
: Vec
<PlaceRef
<'tcx
>>,
113 value_cache
: Vec
<u128
>,
116 impl<'a
, 'tcx
> TypeChecker
<'a
, 'tcx
> {
117 fn fail(&self, location
: Location
, msg
: impl AsRef
<str>) {
118 let span
= self.body
.source_info(location
).span
;
119 // We use `delay_span_bug` as we might see broken MIR when other errors have already
121 self.tcx
.sess
.diagnostic().delay_span_bug(
124 "broken MIR in {:?} ({}) at {:?}:\n{}",
125 self.body
.source
.instance
,
133 fn check_edge(&self, location
: Location
, bb
: BasicBlock
, edge_kind
: EdgeKind
) {
134 if bb
== START_BLOCK
{
135 self.fail(location
, "start block must not have predecessors")
137 if let Some(bb
) = self.body
.basic_blocks().get(bb
) {
138 let src
= self.body
.basic_blocks().get(location
.block
).unwrap();
139 match (src
.is_cleanup
, bb
.is_cleanup
, edge_kind
) {
140 // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
141 (false, false, EdgeKind
::Normal
)
142 // Non-cleanup blocks can jump to cleanup blocks along unwind edges
143 | (false, true, EdgeKind
::Unwind
)
144 // Cleanup blocks can jump to cleanup blocks along non-unwind edges
145 | (true, true, EdgeKind
::Normal
) => {}
146 // All other jumps are invalid
151 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
161 self.fail(location
, format
!("encountered jump to invalid basic block {:?}", bb
))
165 /// Check if src can be assigned into dest.
166 /// This is not precise, it will accept some incorrect assignments.
167 fn mir_assign_valid_types(&self, src
: Ty
<'tcx
>, dest
: Ty
<'tcx
>) -> bool
{
168 // Fast path before we normalize.
170 // Equal types, all is good.
173 // Normalize projections and things like that.
174 // FIXME: We need to reveal_all, as some optimizations change types in ways
175 // that require unfolding opaque types.
176 let param_env
= self.param_env
.with_reveal_all_normalized(self.tcx
);
177 let src
= self.tcx
.normalize_erasing_regions(param_env
, src
);
178 let dest
= self.tcx
.normalize_erasing_regions(param_env
, dest
);
180 // Type-changing assignments can happen when subtyping is used. While
181 // all normal lifetimes are erased, higher-ranked types with their
182 // late-bound lifetimes are still around and can lead to type
183 // differences. So we compare ignoring lifetimes.
184 equal_up_to_regions(self.tcx
, param_env
, src
, dest
)
188 impl<'a
, 'tcx
> Visitor
<'tcx
> for TypeChecker
<'a
, 'tcx
> {
189 fn visit_local(&mut self, local
: &Local
, context
: PlaceContext
, location
: Location
) {
190 if self.body
.local_decls
.get(*local
).is_none() {
193 format
!("local {:?} has no corresponding declaration in `body.local_decls`", local
),
197 if self.reachable_blocks
.contains(location
.block
) && context
.is_use() {
198 // Uses of locals must occur while the local's storage is allocated.
199 self.storage_liveness
.seek_after_primary_effect(location
);
200 let locals_with_storage
= self.storage_liveness
.get();
201 if !locals_with_storage
.contains(*local
) {
202 self.fail(location
, format
!("use of local {:?}, which has no storage here", local
));
207 fn visit_operand(&mut self, operand
: &Operand
<'tcx
>, location
: Location
) {
208 // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed.
209 if self.tcx
.sess
.opts
.debugging_opts
.validate_mir
{
210 // `Operand::Copy` is only supposed to be used with `Copy` types.
211 if let Operand
::Copy(place
) = operand
{
212 let ty
= place
.ty(&self.body
.local_decls
, self.tcx
).ty
;
213 let span
= self.body
.source_info(location
).span
;
215 if !ty
.is_copy_modulo_regions(self.tcx
.at(span
), self.param_env
) {
216 self.fail(location
, format
!("`Operand::Copy` with non-`Copy` type {}", ty
));
221 self.super_operand(operand
, location
);
224 fn visit_projection_elem(
227 proj_base
: &[PlaceElem
<'tcx
>],
228 elem
: PlaceElem
<'tcx
>,
229 context
: PlaceContext
,
232 if let ProjectionElem
::Index(index
) = elem
{
233 let index_ty
= self.body
.local_decls
[index
].ty
;
234 if index_ty
!= self.tcx
.types
.usize {
235 self.fail(location
, format
!("bad index ({:?} != usize)", index_ty
))
238 self.super_projection_elem(local
, proj_base
, elem
, context
, location
);
241 fn visit_statement(&mut self, statement
: &Statement
<'tcx
>, location
: Location
) {
242 match &statement
.kind
{
243 StatementKind
::Assign(box (dest
, rvalue
)) => {
244 // LHS and RHS of the assignment must have the same type.
245 let left_ty
= dest
.ty(&self.body
.local_decls
, self.tcx
).ty
;
246 let right_ty
= rvalue
.ty(&self.body
.local_decls
, self.tcx
);
247 if !self.mir_assign_valid_types(right_ty
, left_ty
) {
251 "encountered `{:?}` with incompatible types:\n\
252 left-hand side has type: {}\n\
253 right-hand side has type: {}",
254 statement
.kind
, left_ty
, right_ty
,
259 // The sides of an assignment must not alias. Currently this just checks whether the places
261 Rvalue
::Use(Operand
::Copy(src
) | Operand
::Move(src
)) => {
265 "encountered `Assign` statement with overlapping memory",
269 // The deaggregator currently does not deaggreagate arrays.
270 // So for now, we ignore them here.
271 Rvalue
::Aggregate(box AggregateKind
::Array { .. }
, _
) => {}
272 // All other aggregates must be gone after some phases.
273 Rvalue
::Aggregate(box kind
, _
) => {
274 if self.mir_phase
> MirPhase
::DropLowering
275 && !matches
!(kind
, AggregateKind
::Generator(..))
277 // Generators persist until the state machine transformation, but all
278 // other aggregates must have been lowered.
281 format
!("{:?} have been lowered to field assignments", rvalue
),
283 } else if self.mir_phase
> MirPhase
::GeneratorLowering
{
284 // No more aggregates after drop and generator lowering.
287 format
!("{:?} have been lowered to field assignments", rvalue
),
291 Rvalue
::Ref(_
, BorrowKind
::Shallow
, _
) => {
292 if self.mir_phase
> MirPhase
::DropLowering
{
295 "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase",
302 StatementKind
::AscribeUserType(..) => {
303 if self.mir_phase
> MirPhase
::DropLowering
{
306 "`AscribeUserType` should have been removed after drop lowering phase",
310 StatementKind
::FakeRead(..) => {
311 if self.mir_phase
> MirPhase
::DropLowering
{
314 "`FakeRead` should have been removed after drop lowering phase",
318 StatementKind
::CopyNonOverlapping(box rustc_middle
::mir
::CopyNonOverlapping
{
323 let src_ty
= src
.ty(&self.body
.local_decls
, self.tcx
);
324 let op_src_ty
= if let Some(src_deref
) = src_ty
.builtin_deref(true) {
329 format
!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty
),
333 let dst_ty
= dst
.ty(&self.body
.local_decls
, self.tcx
);
334 let op_dst_ty
= if let Some(dst_deref
) = dst_ty
.builtin_deref(true) {
339 format
!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty
),
343 // since CopyNonOverlapping is parametrized by 1 type,
344 // we only need to check that they are equal and not keep an extra parameter.
345 if op_src_ty
!= op_dst_ty
{
346 self.fail(location
, format
!("bad arg ({:?} != {:?})", op_src_ty
, op_dst_ty
));
349 let op_cnt_ty
= count
.ty(&self.body
.local_decls
, self.tcx
);
350 if op_cnt_ty
!= self.tcx
.types
.usize {
351 self.fail(location
, format
!("bad arg ({:?} != usize)", op_cnt_ty
))
354 StatementKind
::SetDiscriminant { .. }
355 | StatementKind
::StorageLive(..)
356 | StatementKind
::StorageDead(..)
357 | StatementKind
::Retag(_
, _
)
358 | StatementKind
::Coverage(_
)
359 | StatementKind
::Nop
=> {}
362 self.super_statement(statement
, location
);
365 fn visit_terminator(&mut self, terminator
: &Terminator
<'tcx
>, location
: Location
) {
366 match &terminator
.kind
{
367 TerminatorKind
::Goto { target }
=> {
368 self.check_edge(location
, *target
, EdgeKind
::Normal
);
370 TerminatorKind
::SwitchInt { targets, switch_ty, discr }
=> {
371 let ty
= discr
.ty(&self.body
.local_decls
, self.tcx
);
372 if ty
!= *switch_ty
{
376 "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}",
382 let target_width
= self.tcx
.sess
.target
.pointer_width
;
384 let size
= Size
::from_bits(match switch_ty
.kind() {
385 ty
::Uint(uint
) => uint
.normalize(target_width
).bit_width().unwrap(),
386 ty
::Int(int
) => int
.normalize(target_width
).bit_width().unwrap(),
389 other
=> bug
!("unhandled type: {:?}", other
),
392 for (value
, target
) in targets
.iter() {
393 if Scalar
::<()>::try_from_uint(value
, size
).is_none() {
396 format
!("the value {:#x} is not a proper {:?}", value
, switch_ty
),
400 self.check_edge(location
, target
, EdgeKind
::Normal
);
402 self.check_edge(location
, targets
.otherwise(), EdgeKind
::Normal
);
404 self.value_cache
.clear();
405 self.value_cache
.extend(targets
.iter().map(|(value
, _
)| value
));
406 let all_len
= self.value_cache
.len();
407 self.value_cache
.sort_unstable();
408 self.value_cache
.dedup();
409 let has_duplicates
= all_len
!= self.value_cache
.len();
414 "duplicated values in `SwitchInt` terminator: {:?}",
420 TerminatorKind
::Drop { target, unwind, .. }
=> {
421 self.check_edge(location
, *target
, EdgeKind
::Normal
);
422 if let Some(unwind
) = unwind
{
423 self.check_edge(location
, *unwind
, EdgeKind
::Unwind
);
426 TerminatorKind
::DropAndReplace { target, unwind, .. }
=> {
427 if self.mir_phase
> MirPhase
::DropLowering
{
430 "`DropAndReplace` is not permitted to exist after drop elaboration",
433 self.check_edge(location
, *target
, EdgeKind
::Normal
);
434 if let Some(unwind
) = unwind
{
435 self.check_edge(location
, *unwind
, EdgeKind
::Unwind
);
438 TerminatorKind
::Call { func, args, destination, cleanup, .. }
=> {
439 let func_ty
= func
.ty(&self.body
.local_decls
, self.tcx
);
440 match func_ty
.kind() {
441 ty
::FnPtr(..) | ty
::FnDef(..) => {}
444 format
!("encountered non-callable type {} in `Call` terminator", func_ty
),
447 if let Some((_
, target
)) = destination
{
448 self.check_edge(location
, *target
, EdgeKind
::Normal
);
450 if let Some(cleanup
) = cleanup
{
451 self.check_edge(location
, *cleanup
, EdgeKind
::Unwind
);
454 // The call destination place and Operand::Move place used as an argument might be
455 // passed by a reference to the callee. Consequently they must be non-overlapping.
456 // Currently this simply checks for duplicate places.
457 self.place_cache
.clear();
458 if let Some((destination
, _
)) = destination
{
459 self.place_cache
.push(destination
.as_ref());
462 if let Operand
::Move(place
) = arg
{
463 self.place_cache
.push(place
.as_ref());
466 let all_len
= self.place_cache
.len();
467 self.place_cache
.sort_unstable();
468 self.place_cache
.dedup();
469 let has_duplicates
= all_len
!= self.place_cache
.len();
474 "encountered overlapping memory in `Call` terminator: {:?}",
480 TerminatorKind
::Assert { cond, target, cleanup, .. }
=> {
481 let cond_ty
= cond
.ty(&self.body
.local_decls
, self.tcx
);
482 if cond_ty
!= self.tcx
.types
.bool
{
486 "encountered non-boolean condition of type {} in `Assert` terminator",
491 self.check_edge(location
, *target
, EdgeKind
::Normal
);
492 if let Some(cleanup
) = cleanup
{
493 self.check_edge(location
, *cleanup
, EdgeKind
::Unwind
);
496 TerminatorKind
::Yield { resume, drop, .. }
=> {
497 if self.mir_phase
> MirPhase
::GeneratorLowering
{
498 self.fail(location
, "`Yield` should have been replaced by generator lowering");
500 self.check_edge(location
, *resume
, EdgeKind
::Normal
);
501 if let Some(drop
) = drop
{
502 self.check_edge(location
, *drop
, EdgeKind
::Normal
);
505 TerminatorKind
::FalseEdge { real_target, imaginary_target }
=> {
506 self.check_edge(location
, *real_target
, EdgeKind
::Normal
);
507 self.check_edge(location
, *imaginary_target
, EdgeKind
::Normal
);
509 TerminatorKind
::FalseUnwind { real_target, unwind }
=> {
510 self.check_edge(location
, *real_target
, EdgeKind
::Normal
);
511 if let Some(unwind
) = unwind
{
512 self.check_edge(location
, *unwind
, EdgeKind
::Unwind
);
515 TerminatorKind
::InlineAsm { destination, cleanup, .. }
=> {
516 if let Some(destination
) = destination
{
517 self.check_edge(location
, *destination
, EdgeKind
::Normal
);
519 if let Some(cleanup
) = cleanup
{
520 self.check_edge(location
, *cleanup
, EdgeKind
::Unwind
);
523 // Nothing to validate for these.
524 TerminatorKind
::Resume
525 | TerminatorKind
::Abort
526 | TerminatorKind
::Return
527 | TerminatorKind
::Unreachable
528 | TerminatorKind
::GeneratorDrop
=> {}
531 self.super_terminator(terminator
, location
);
534 fn visit_source_scope(&mut self, scope
: &SourceScope
) {
535 if self.body
.source_scopes
.get(*scope
).is_none() {
536 self.tcx
.sess
.diagnostic().delay_span_bug(
539 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
540 self.body
.source
.instance
, self.when
, scope
,