]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_const_eval/src/transform/validate.rs
New upstream version 1.60.0+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / transform / validate.rs
1 //! Validates the MIR to ensure that invariants are upheld.
2
3 use rustc_index::bit_set::BitSet;
4 use rustc_infer::infer::TyCtxtInferExt;
5 use rustc_middle::mir::interpret::Scalar;
6 use rustc_middle::mir::traversal;
7 use rustc_middle::mir::visit::{PlaceContext, Visitor};
8 use rustc_middle::mir::{
9 AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPass, MirPhase, Operand,
10 PlaceElem, PlaceRef, ProjectionElem, Rvalue, SourceScope, Statement, StatementKind, Terminator,
11 TerminatorKind, START_BLOCK,
12 };
13 use rustc_middle::ty::fold::BottomUpFolder;
14 use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, TypeFoldable};
15 use rustc_mir_dataflow::impls::MaybeStorageLive;
16 use rustc_mir_dataflow::storage::AlwaysLiveLocals;
17 use rustc_mir_dataflow::{Analysis, ResultsCursor};
18 use rustc_target::abi::Size;
19
20 #[derive(Copy, Clone, Debug)]
21 enum EdgeKind {
22 Unwind,
23 Normal,
24 }
25
26 pub struct Validator {
27 /// Describes at which point in the pipeline this validation is happening.
28 pub when: String,
29 /// The phase for which we are upholding the dialect. If the given phase forbids a specific
30 /// element, this validator will now emit errors if that specific element is encountered.
31 /// Note that phases that change the dialect cause all *following* phases to check the
32 /// invariants of the new dialect. A phase that changes dialects never checks the new invariants
33 /// itself.
34 pub mir_phase: MirPhase,
35 }
36
37 impl<'tcx> MirPass<'tcx> for Validator {
38 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
39 let def_id = body.source.def_id();
40 let param_env = tcx.param_env(def_id);
41 let mir_phase = self.mir_phase;
42
43 let always_live_locals = AlwaysLiveLocals::new(body);
44 let storage_liveness = MaybeStorageLive::new(always_live_locals)
45 .into_engine(tcx, body)
46 .iterate_to_fixpoint()
47 .into_results_cursor(body);
48
49 TypeChecker {
50 when: &self.when,
51 body,
52 tcx,
53 param_env,
54 mir_phase,
55 reachable_blocks: traversal::reachable_as_bitset(body),
56 storage_liveness,
57 place_cache: Vec::new(),
58 value_cache: Vec::new(),
59 }
60 .visit_body(body);
61 }
62 }
63
64 /// Returns whether the two types are equal up to lifetimes.
65 /// All lifetimes, including higher-ranked ones, get ignored for this comparison.
66 /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.)
67 ///
68 /// The point of this function is to approximate "equal up to subtyping". However,
69 /// the approximation is incorrect as variance is ignored.
70 pub fn equal_up_to_regions<'tcx>(
71 tcx: TyCtxt<'tcx>,
72 param_env: ParamEnv<'tcx>,
73 src: Ty<'tcx>,
74 dest: Ty<'tcx>,
75 ) -> bool {
76 // Fast path.
77 if src == dest {
78 return true;
79 }
80
81 // Normalize lifetimes away on both sides, then compare.
82 let param_env = param_env.with_reveal_all_normalized(tcx);
83 let normalize = |ty: Ty<'tcx>| {
84 tcx.normalize_erasing_regions(
85 param_env,
86 ty.fold_with(&mut BottomUpFolder {
87 tcx,
88 // FIXME: We erase all late-bound lifetimes, but this is not fully correct.
89 // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`,
90 // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`,
91 // since one may have an `impl SomeTrait for fn(&32)` and
92 // `impl SomeTrait for fn(&'static u32)` at the same time which
93 // specify distinct values for Assoc. (See also #56105)
94 lt_op: |_| tcx.lifetimes.re_erased,
95 // Leave consts and types unchanged.
96 ct_op: |ct| ct,
97 ty_op: |ty| ty,
98 }),
99 )
100 };
101 tcx.infer_ctxt().enter(|infcx| infcx.can_eq(param_env, normalize(src), normalize(dest)).is_ok())
102 }
103
104 struct TypeChecker<'a, 'tcx> {
105 when: &'a str,
106 body: &'a Body<'tcx>,
107 tcx: TyCtxt<'tcx>,
108 param_env: ParamEnv<'tcx>,
109 mir_phase: MirPhase,
110 reachable_blocks: BitSet<BasicBlock>,
111 storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>,
112 place_cache: Vec<PlaceRef<'tcx>>,
113 value_cache: Vec<u128>,
114 }
115
116 impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
117 fn fail(&self, location: Location, msg: impl AsRef<str>) {
118 let span = self.body.source_info(location).span;
119 // We use `delay_span_bug` as we might see broken MIR when other errors have already
120 // occurred.
121 self.tcx.sess.diagnostic().delay_span_bug(
122 span,
123 &format!(
124 "broken MIR in {:?} ({}) at {:?}:\n{}",
125 self.body.source.instance,
126 self.when,
127 location,
128 msg.as_ref()
129 ),
130 );
131 }
132
133 fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
134 if bb == START_BLOCK {
135 self.fail(location, "start block must not have predecessors")
136 }
137 if let Some(bb) = self.body.basic_blocks().get(bb) {
138 let src = self.body.basic_blocks().get(location.block).unwrap();
139 match (src.is_cleanup, bb.is_cleanup, edge_kind) {
140 // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
141 (false, false, EdgeKind::Normal)
142 // Non-cleanup blocks can jump to cleanup blocks along unwind edges
143 | (false, true, EdgeKind::Unwind)
144 // Cleanup blocks can jump to cleanup blocks along non-unwind edges
145 | (true, true, EdgeKind::Normal) => {}
146 // All other jumps are invalid
147 _ => {
148 self.fail(
149 location,
150 format!(
151 "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
152 edge_kind,
153 bb,
154 src.is_cleanup,
155 bb.is_cleanup,
156 )
157 )
158 }
159 }
160 } else {
161 self.fail(location, format!("encountered jump to invalid basic block {:?}", bb))
162 }
163 }
164
165 /// Check if src can be assigned into dest.
166 /// This is not precise, it will accept some incorrect assignments.
167 fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
168 // Fast path before we normalize.
169 if src == dest {
170 // Equal types, all is good.
171 return true;
172 }
173 // Normalize projections and things like that.
174 // FIXME: We need to reveal_all, as some optimizations change types in ways
175 // that require unfolding opaque types.
176 let param_env = self.param_env.with_reveal_all_normalized(self.tcx);
177 let src = self.tcx.normalize_erasing_regions(param_env, src);
178 let dest = self.tcx.normalize_erasing_regions(param_env, dest);
179
180 // Type-changing assignments can happen when subtyping is used. While
181 // all normal lifetimes are erased, higher-ranked types with their
182 // late-bound lifetimes are still around and can lead to type
183 // differences. So we compare ignoring lifetimes.
184 equal_up_to_regions(self.tcx, param_env, src, dest)
185 }
186 }
187
188 impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
189 fn visit_local(&mut self, local: &Local, context: PlaceContext, location: Location) {
190 if self.body.local_decls.get(*local).is_none() {
191 self.fail(
192 location,
193 format!("local {:?} has no corresponding declaration in `body.local_decls`", local),
194 );
195 }
196
197 if self.reachable_blocks.contains(location.block) && context.is_use() {
198 // Uses of locals must occur while the local's storage is allocated.
199 self.storage_liveness.seek_after_primary_effect(location);
200 let locals_with_storage = self.storage_liveness.get();
201 if !locals_with_storage.contains(*local) {
202 self.fail(location, format!("use of local {:?}, which has no storage here", local));
203 }
204 }
205 }
206
207 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
208 // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed.
209 if self.tcx.sess.opts.debugging_opts.validate_mir {
210 // `Operand::Copy` is only supposed to be used with `Copy` types.
211 if let Operand::Copy(place) = operand {
212 let ty = place.ty(&self.body.local_decls, self.tcx).ty;
213 let span = self.body.source_info(location).span;
214
215 if !ty.is_copy_modulo_regions(self.tcx.at(span), self.param_env) {
216 self.fail(location, format!("`Operand::Copy` with non-`Copy` type {}", ty));
217 }
218 }
219 }
220
221 self.super_operand(operand, location);
222 }
223
224 fn visit_projection_elem(
225 &mut self,
226 local: Local,
227 proj_base: &[PlaceElem<'tcx>],
228 elem: PlaceElem<'tcx>,
229 context: PlaceContext,
230 location: Location,
231 ) {
232 if let ProjectionElem::Index(index) = elem {
233 let index_ty = self.body.local_decls[index].ty;
234 if index_ty != self.tcx.types.usize {
235 self.fail(location, format!("bad index ({:?} != usize)", index_ty))
236 }
237 }
238 self.super_projection_elem(local, proj_base, elem, context, location);
239 }
240
241 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
242 match &statement.kind {
243 StatementKind::Assign(box (dest, rvalue)) => {
244 // LHS and RHS of the assignment must have the same type.
245 let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
246 let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
247 if !self.mir_assign_valid_types(right_ty, left_ty) {
248 self.fail(
249 location,
250 format!(
251 "encountered `{:?}` with incompatible types:\n\
252 left-hand side has type: {}\n\
253 right-hand side has type: {}",
254 statement.kind, left_ty, right_ty,
255 ),
256 );
257 }
258 match rvalue {
259 // The sides of an assignment must not alias. Currently this just checks whether the places
260 // are identical.
261 Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) => {
262 if dest == src {
263 self.fail(
264 location,
265 "encountered `Assign` statement with overlapping memory",
266 );
267 }
268 }
269 // The deaggregator currently does not deaggreagate arrays.
270 // So for now, we ignore them here.
271 Rvalue::Aggregate(box AggregateKind::Array { .. }, _) => {}
272 // All other aggregates must be gone after some phases.
273 Rvalue::Aggregate(box kind, _) => {
274 if self.mir_phase > MirPhase::DropLowering
275 && !matches!(kind, AggregateKind::Generator(..))
276 {
277 // Generators persist until the state machine transformation, but all
278 // other aggregates must have been lowered.
279 self.fail(
280 location,
281 format!("{:?} have been lowered to field assignments", rvalue),
282 )
283 } else if self.mir_phase > MirPhase::GeneratorLowering {
284 // No more aggregates after drop and generator lowering.
285 self.fail(
286 location,
287 format!("{:?} have been lowered to field assignments", rvalue),
288 )
289 }
290 }
291 Rvalue::Ref(_, BorrowKind::Shallow, _) => {
292 if self.mir_phase > MirPhase::DropLowering {
293 self.fail(
294 location,
295 "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase",
296 );
297 }
298 }
299 _ => {}
300 }
301 }
302 StatementKind::AscribeUserType(..) => {
303 if self.mir_phase > MirPhase::DropLowering {
304 self.fail(
305 location,
306 "`AscribeUserType` should have been removed after drop lowering phase",
307 );
308 }
309 }
310 StatementKind::FakeRead(..) => {
311 if self.mir_phase > MirPhase::DropLowering {
312 self.fail(
313 location,
314 "`FakeRead` should have been removed after drop lowering phase",
315 );
316 }
317 }
318 StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping {
319 ref src,
320 ref dst,
321 ref count,
322 }) => {
323 let src_ty = src.ty(&self.body.local_decls, self.tcx);
324 let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
325 src_deref.ty
326 } else {
327 self.fail(
328 location,
329 format!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty),
330 );
331 return;
332 };
333 let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
334 let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
335 dst_deref.ty
336 } else {
337 self.fail(
338 location,
339 format!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty),
340 );
341 return;
342 };
343 // since CopyNonOverlapping is parametrized by 1 type,
344 // we only need to check that they are equal and not keep an extra parameter.
345 if op_src_ty != op_dst_ty {
346 self.fail(location, format!("bad arg ({:?} != {:?})", op_src_ty, op_dst_ty));
347 }
348
349 let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
350 if op_cnt_ty != self.tcx.types.usize {
351 self.fail(location, format!("bad arg ({:?} != usize)", op_cnt_ty))
352 }
353 }
354 StatementKind::SetDiscriminant { .. }
355 | StatementKind::StorageLive(..)
356 | StatementKind::StorageDead(..)
357 | StatementKind::Retag(_, _)
358 | StatementKind::Coverage(_)
359 | StatementKind::Nop => {}
360 }
361
362 self.super_statement(statement, location);
363 }
364
365 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
366 match &terminator.kind {
367 TerminatorKind::Goto { target } => {
368 self.check_edge(location, *target, EdgeKind::Normal);
369 }
370 TerminatorKind::SwitchInt { targets, switch_ty, discr } => {
371 let ty = discr.ty(&self.body.local_decls, self.tcx);
372 if ty != *switch_ty {
373 self.fail(
374 location,
375 format!(
376 "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}",
377 ty, switch_ty,
378 ),
379 );
380 }
381
382 let target_width = self.tcx.sess.target.pointer_width;
383
384 let size = Size::from_bits(match switch_ty.kind() {
385 ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
386 ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
387 ty::Char => 32,
388 ty::Bool => 1,
389 other => bug!("unhandled type: {:?}", other),
390 });
391
392 for (value, target) in targets.iter() {
393 if Scalar::<()>::try_from_uint(value, size).is_none() {
394 self.fail(
395 location,
396 format!("the value {:#x} is not a proper {:?}", value, switch_ty),
397 )
398 }
399
400 self.check_edge(location, target, EdgeKind::Normal);
401 }
402 self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
403
404 self.value_cache.clear();
405 self.value_cache.extend(targets.iter().map(|(value, _)| value));
406 let all_len = self.value_cache.len();
407 self.value_cache.sort_unstable();
408 self.value_cache.dedup();
409 let has_duplicates = all_len != self.value_cache.len();
410 if has_duplicates {
411 self.fail(
412 location,
413 format!(
414 "duplicated values in `SwitchInt` terminator: {:?}",
415 terminator.kind,
416 ),
417 );
418 }
419 }
420 TerminatorKind::Drop { target, unwind, .. } => {
421 self.check_edge(location, *target, EdgeKind::Normal);
422 if let Some(unwind) = unwind {
423 self.check_edge(location, *unwind, EdgeKind::Unwind);
424 }
425 }
426 TerminatorKind::DropAndReplace { target, unwind, .. } => {
427 if self.mir_phase > MirPhase::DropLowering {
428 self.fail(
429 location,
430 "`DropAndReplace` is not permitted to exist after drop elaboration",
431 );
432 }
433 self.check_edge(location, *target, EdgeKind::Normal);
434 if let Some(unwind) = unwind {
435 self.check_edge(location, *unwind, EdgeKind::Unwind);
436 }
437 }
438 TerminatorKind::Call { func, args, destination, cleanup, .. } => {
439 let func_ty = func.ty(&self.body.local_decls, self.tcx);
440 match func_ty.kind() {
441 ty::FnPtr(..) | ty::FnDef(..) => {}
442 _ => self.fail(
443 location,
444 format!("encountered non-callable type {} in `Call` terminator", func_ty),
445 ),
446 }
447 if let Some((_, target)) = destination {
448 self.check_edge(location, *target, EdgeKind::Normal);
449 }
450 if let Some(cleanup) = cleanup {
451 self.check_edge(location, *cleanup, EdgeKind::Unwind);
452 }
453
454 // The call destination place and Operand::Move place used as an argument might be
455 // passed by a reference to the callee. Consequently they must be non-overlapping.
456 // Currently this simply checks for duplicate places.
457 self.place_cache.clear();
458 if let Some((destination, _)) = destination {
459 self.place_cache.push(destination.as_ref());
460 }
461 for arg in args {
462 if let Operand::Move(place) = arg {
463 self.place_cache.push(place.as_ref());
464 }
465 }
466 let all_len = self.place_cache.len();
467 self.place_cache.sort_unstable();
468 self.place_cache.dedup();
469 let has_duplicates = all_len != self.place_cache.len();
470 if has_duplicates {
471 self.fail(
472 location,
473 format!(
474 "encountered overlapping memory in `Call` terminator: {:?}",
475 terminator.kind,
476 ),
477 );
478 }
479 }
480 TerminatorKind::Assert { cond, target, cleanup, .. } => {
481 let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
482 if cond_ty != self.tcx.types.bool {
483 self.fail(
484 location,
485 format!(
486 "encountered non-boolean condition of type {} in `Assert` terminator",
487 cond_ty
488 ),
489 );
490 }
491 self.check_edge(location, *target, EdgeKind::Normal);
492 if let Some(cleanup) = cleanup {
493 self.check_edge(location, *cleanup, EdgeKind::Unwind);
494 }
495 }
496 TerminatorKind::Yield { resume, drop, .. } => {
497 if self.mir_phase > MirPhase::GeneratorLowering {
498 self.fail(location, "`Yield` should have been replaced by generator lowering");
499 }
500 self.check_edge(location, *resume, EdgeKind::Normal);
501 if let Some(drop) = drop {
502 self.check_edge(location, *drop, EdgeKind::Normal);
503 }
504 }
505 TerminatorKind::FalseEdge { real_target, imaginary_target } => {
506 self.check_edge(location, *real_target, EdgeKind::Normal);
507 self.check_edge(location, *imaginary_target, EdgeKind::Normal);
508 }
509 TerminatorKind::FalseUnwind { real_target, unwind } => {
510 self.check_edge(location, *real_target, EdgeKind::Normal);
511 if let Some(unwind) = unwind {
512 self.check_edge(location, *unwind, EdgeKind::Unwind);
513 }
514 }
515 TerminatorKind::InlineAsm { destination, cleanup, .. } => {
516 if let Some(destination) = destination {
517 self.check_edge(location, *destination, EdgeKind::Normal);
518 }
519 if let Some(cleanup) = cleanup {
520 self.check_edge(location, *cleanup, EdgeKind::Unwind);
521 }
522 }
523 // Nothing to validate for these.
524 TerminatorKind::Resume
525 | TerminatorKind::Abort
526 | TerminatorKind::Return
527 | TerminatorKind::Unreachable
528 | TerminatorKind::GeneratorDrop => {}
529 }
530
531 self.super_terminator(terminator, location);
532 }
533
534 fn visit_source_scope(&mut self, scope: &SourceScope) {
535 if self.body.source_scopes.get(*scope).is_none() {
536 self.tcx.sess.diagnostic().delay_span_bug(
537 self.body.span,
538 &format!(
539 "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
540 self.body.source.instance, self.when, scope,
541 ),
542 );
543 }
544 }
545 }