]>
Commit | Line | Data |
---|---|---|
f9f354fc XL |
1 | //! Validates the MIR to ensure that invariants are upheld. |
2 | ||
9c376795 | 3 | use rustc_data_structures::fx::{FxHashMap, FxHashSet}; |
29967ef6 | 4 | use rustc_index::bit_set::BitSet; |
9c376795 | 5 | use rustc_index::vec::IndexVec; |
487cf647 | 6 | use rustc_infer::traits::Reveal; |
29967ef6 | 7 | use rustc_middle::mir::interpret::Scalar; |
923072b8 | 8 | use rustc_middle::mir::visit::NonUseContext::VarDebugInfo; |
1b1a35ee XL |
9 | use rustc_middle::mir::visit::{PlaceContext, Visitor}; |
10 | use rustc_middle::mir::{ | |
9ffffee4 FG |
11 | traversal, BasicBlock, BinOp, Body, BorrowKind, CastKind, CopyNonOverlapping, Local, Location, |
12 | MirPass, MirPhase, NonDivergingIntrinsic, Operand, Place, PlaceElem, PlaceRef, ProjectionElem, | |
13 | RetagKind, RuntimePhase, Rvalue, SourceScope, Statement, StatementKind, Terminator, | |
14 | TerminatorKind, UnOp, START_BLOCK, | |
f9f354fc | 15 | }; |
9ffffee4 | 16 | use rustc_middle::ty::{self, InstanceDef, ParamEnv, Ty, TyCtxt, TypeVisitableExt}; |
c295e0f8 | 17 | use rustc_mir_dataflow::impls::MaybeStorageLive; |
064997fb | 18 | use rustc_mir_dataflow::storage::always_storage_live_locals; |
c295e0f8 | 19 | use rustc_mir_dataflow::{Analysis, ResultsCursor}; |
04454e1e | 20 | use rustc_target::abi::{Size, VariantIdx}; |
f035d41b | 21 | |
9c376795 | 22 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] |
f035d41b XL |
23 | enum EdgeKind { |
24 | Unwind, | |
25 | Normal, | |
26 | } | |
f9f354fc XL |
27 | |
28 | pub struct Validator { | |
29 | /// Describes at which point in the pipeline this validation is happening. | |
30 | pub when: String, | |
3dfed10e XL |
31 | /// The phase for which we are upholding the dialect. If the given phase forbids a specific |
32 | /// element, this validator will now emit errors if that specific element is encountered. | |
33 | /// Note that phases that change the dialect cause all *following* phases to check the | |
34 | /// invariants of the new dialect. A phase that changes dialects never checks the new invariants | |
35 | /// itself. | |
36 | pub mir_phase: MirPhase, | |
f9f354fc XL |
37 | } |
38 | ||
39 | impl<'tcx> MirPass<'tcx> for Validator { | |
29967ef6 | 40 | fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { |
04454e1e FG |
41 | // FIXME(JakobDegen): These bodies never instantiated in codegend anyway, so it's not |
42 | // terribly important that they pass the validator. However, I think other passes might | |
43 | // still see them, in which case they might be surprised. It would probably be better if we | |
44 | // didn't put this through the MIR pipeline at all. | |
45 | if matches!(body.source.instance, InstanceDef::Intrinsic(..) | InstanceDef::Virtual(..)) { | |
46 | return; | |
47 | } | |
29967ef6 | 48 | let def_id = body.source.def_id(); |
3dfed10e | 49 | let mir_phase = self.mir_phase; |
487cf647 FG |
50 | let param_env = match mir_phase.reveal() { |
51 | Reveal::UserFacing => tcx.param_env(def_id), | |
52 | Reveal::All => tcx.param_env_reveal_all_normalized(def_id), | |
53 | }; | |
1b1a35ee | 54 | |
064997fb | 55 | let always_live_locals = always_storage_live_locals(body); |
9c376795 | 56 | let storage_liveness = MaybeStorageLive::new(std::borrow::Cow::Owned(always_live_locals)) |
29967ef6 | 57 | .into_engine(tcx, body) |
1b1a35ee XL |
58 | .iterate_to_fixpoint() |
59 | .into_results_cursor(body); | |
60 | ||
9c376795 | 61 | let mut checker = TypeChecker { |
29967ef6 XL |
62 | when: &self.when, |
63 | body, | |
64 | tcx, | |
65 | param_env, | |
66 | mir_phase, | |
9c376795 | 67 | unwind_edge_count: 0, |
29967ef6 XL |
68 | reachable_blocks: traversal::reachable_as_bitset(body), |
69 | storage_liveness, | |
70 | place_cache: Vec::new(), | |
5099ac24 | 71 | value_cache: Vec::new(), |
9c376795 FG |
72 | }; |
73 | checker.visit_body(body); | |
74 | checker.check_cleanup_control_flow(); | |
f9f354fc XL |
75 | } |
76 | } | |
77 | ||
78 | struct TypeChecker<'a, 'tcx> { | |
79 | when: &'a str, | |
f9f354fc XL |
80 | body: &'a Body<'tcx>, |
81 | tcx: TyCtxt<'tcx>, | |
82 | param_env: ParamEnv<'tcx>, | |
3dfed10e | 83 | mir_phase: MirPhase, |
9c376795 | 84 | unwind_edge_count: usize, |
29967ef6 | 85 | reachable_blocks: BitSet<BasicBlock>, |
9c376795 | 86 | storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive<'static>>, |
29967ef6 | 87 | place_cache: Vec<PlaceRef<'tcx>>, |
5099ac24 | 88 | value_cache: Vec<u128>, |
f9f354fc XL |
89 | } |
90 | ||
91 | impl<'a, 'tcx> TypeChecker<'a, 'tcx> { | |
487cf647 | 92 | #[track_caller] |
f035d41b XL |
93 | fn fail(&self, location: Location, msg: impl AsRef<str>) { |
94 | let span = self.body.source_info(location).span; | |
f9f354fc XL |
95 | // We use `delay_span_bug` as we might see broken MIR when other errors have already |
96 | // occurred. | |
97 | self.tcx.sess.diagnostic().delay_span_bug( | |
98 | span, | |
f035d41b XL |
99 | &format!( |
100 | "broken MIR in {:?} ({}) at {:?}:\n{}", | |
29967ef6 | 101 | self.body.source.instance, |
f035d41b XL |
102 | self.when, |
103 | location, | |
104 | msg.as_ref() | |
105 | ), | |
f9f354fc XL |
106 | ); |
107 | } | |
f035d41b | 108 | |
9c376795 | 109 | fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) { |
c295e0f8 XL |
110 | if bb == START_BLOCK { |
111 | self.fail(location, "start block must not have predecessors") | |
112 | } | |
f2b60f7d FG |
113 | if let Some(bb) = self.body.basic_blocks.get(bb) { |
114 | let src = self.body.basic_blocks.get(location.block).unwrap(); | |
f035d41b XL |
115 | match (src.is_cleanup, bb.is_cleanup, edge_kind) { |
116 | // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges | |
117 | (false, false, EdgeKind::Normal) | |
f035d41b XL |
118 | // Cleanup blocks can jump to cleanup blocks along non-unwind edges |
119 | | (true, true, EdgeKind::Normal) => {} | |
9c376795 FG |
120 | // Non-cleanup blocks can jump to cleanup blocks along unwind edges |
121 | (false, true, EdgeKind::Unwind) => { | |
122 | self.unwind_edge_count += 1; | |
123 | } | |
f035d41b XL |
124 | // All other jumps are invalid |
125 | _ => { | |
126 | self.fail( | |
127 | location, | |
128 | format!( | |
129 | "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})", | |
130 | edge_kind, | |
131 | bb, | |
132 | src.is_cleanup, | |
133 | bb.is_cleanup, | |
134 | ) | |
135 | ) | |
136 | } | |
137 | } | |
138 | } else { | |
139 | self.fail(location, format!("encountered jump to invalid basic block {:?}", bb)) | |
140 | } | |
141 | } | |
142 | ||
9c376795 FG |
143 | fn check_cleanup_control_flow(&self) { |
144 | if self.unwind_edge_count <= 1 { | |
145 | return; | |
146 | } | |
147 | let doms = self.body.basic_blocks.dominators(); | |
148 | let mut post_contract_node = FxHashMap::default(); | |
149 | // Reusing the allocation across invocations of the closure | |
150 | let mut dom_path = vec![]; | |
151 | let mut get_post_contract_node = |mut bb| { | |
152 | let root = loop { | |
153 | if let Some(root) = post_contract_node.get(&bb) { | |
154 | break *root; | |
155 | } | |
156 | let parent = doms.immediate_dominator(bb); | |
157 | dom_path.push(bb); | |
158 | if !self.body.basic_blocks[parent].is_cleanup { | |
159 | break bb; | |
160 | } | |
161 | bb = parent; | |
162 | }; | |
163 | for bb in dom_path.drain(..) { | |
164 | post_contract_node.insert(bb, root); | |
165 | } | |
166 | root | |
167 | }; | |
168 | ||
169 | let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks); | |
170 | for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() { | |
171 | if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) { | |
172 | continue; | |
173 | } | |
174 | let bb = get_post_contract_node(bb); | |
175 | for s in bb_data.terminator().successors() { | |
176 | let s = get_post_contract_node(s); | |
177 | if s == bb { | |
178 | continue; | |
179 | } | |
180 | let parent = &mut parent[bb]; | |
181 | match parent { | |
182 | None => { | |
183 | *parent = Some(s); | |
184 | } | |
185 | Some(e) if *e == s => (), | |
186 | Some(e) => self.fail( | |
187 | Location { block: bb, statement_index: 0 }, | |
188 | format!( | |
189 | "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}", | |
190 | bb, | |
191 | s, | |
192 | *e | |
193 | ) | |
194 | ), | |
195 | } | |
196 | } | |
197 | } | |
198 | ||
199 | // Check for cycles | |
200 | let mut stack = FxHashSet::default(); | |
201 | for i in 0..parent.len() { | |
202 | let mut bb = BasicBlock::from_usize(i); | |
203 | stack.clear(); | |
204 | stack.insert(bb); | |
205 | loop { | |
206 | let Some(parent)= parent[bb].take() else { | |
207 | break | |
208 | }; | |
209 | let no_cycle = stack.insert(parent); | |
210 | if !no_cycle { | |
211 | self.fail( | |
212 | Location { block: bb, statement_index: 0 }, | |
213 | format!( | |
214 | "Cleanup control flow violation: Cycle involving edge {:?} -> {:?}", | |
215 | bb, parent, | |
216 | ), | |
217 | ); | |
218 | break; | |
219 | } | |
220 | bb = parent; | |
221 | } | |
222 | } | |
223 | } | |
224 | ||
f035d41b XL |
225 | /// Check if src can be assigned into dest. |
226 | /// This is not precise, it will accept some incorrect assignments. | |
227 | fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool { | |
228 | // Fast path before we normalize. | |
229 | if src == dest { | |
230 | // Equal types, all is good. | |
231 | return true; | |
232 | } | |
9ffffee4 FG |
233 | |
234 | // We sometimes have to use `defining_opaque_types` for subtyping | |
235 | // to succeed here and figuring out how exactly that should work | |
236 | // is annoying. It is harmless enough to just not validate anything | |
237 | // in that case. We still check this after analysis as all opque | |
238 | // types have been revealed at this point. | |
5e7ed085 FG |
239 | if (src, dest).has_opaque_types() { |
240 | return true; | |
241 | } | |
f035d41b | 242 | |
487cf647 | 243 | crate::util::is_subtype(self.tcx, self.param_env, src, dest) |
f035d41b | 244 | } |
f9f354fc XL |
245 | } |
246 | ||
247 | impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { | |
064997fb FG |
248 | fn visit_local(&mut self, local: Local, context: PlaceContext, location: Location) { |
249 | if self.body.local_decls.get(local).is_none() { | |
fc512014 XL |
250 | self.fail( |
251 | location, | |
252 | format!("local {:?} has no corresponding declaration in `body.local_decls`", local), | |
253 | ); | |
254 | } | |
255 | ||
29967ef6 | 256 | if self.reachable_blocks.contains(location.block) && context.is_use() { |
064997fb FG |
257 | // We check that the local is live whenever it is used. Technically, violating this |
258 | // restriction is only UB and not actually indicative of not well-formed MIR. This means | |
259 | // that an optimization which turns MIR that already has UB into MIR that fails this | |
260 | // check is not necessarily wrong. However, we have no such optimizations at the moment, | |
261 | // and so we include this check anyway to help us catch bugs. If you happen to write an | |
262 | // optimization that might cause this to incorrectly fire, feel free to remove this | |
263 | // check. | |
1b1a35ee XL |
264 | self.storage_liveness.seek_after_primary_effect(location); |
265 | let locals_with_storage = self.storage_liveness.get(); | |
064997fb | 266 | if !locals_with_storage.contains(local) { |
1b1a35ee XL |
267 | self.fail(location, format!("use of local {:?}, which has no storage here", local)); |
268 | } | |
269 | } | |
270 | } | |
271 | ||
f9f354fc | 272 | fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { |
29967ef6 | 273 | // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed. |
f2b60f7d FG |
274 | if self.tcx.sess.opts.unstable_opts.validate_mir |
275 | && self.mir_phase < MirPhase::Runtime(RuntimePhase::Initial) | |
04454e1e | 276 | { |
29967ef6 XL |
277 | // `Operand::Copy` is only supposed to be used with `Copy` types. |
278 | if let Operand::Copy(place) = operand { | |
279 | let ty = place.ty(&self.body.local_decls, self.tcx).ty; | |
f9f354fc | 280 | |
2b03887a | 281 | if !ty.is_copy_modulo_regions(self.tcx, self.param_env) { |
29967ef6 XL |
282 | self.fail(location, format!("`Operand::Copy` with non-`Copy` type {}", ty)); |
283 | } | |
f9f354fc XL |
284 | } |
285 | } | |
286 | ||
287 | self.super_operand(operand, location); | |
288 | } | |
289 | ||
17df50a5 XL |
290 | fn visit_projection_elem( |
291 | &mut self, | |
292 | local: Local, | |
293 | proj_base: &[PlaceElem<'tcx>], | |
294 | elem: PlaceElem<'tcx>, | |
295 | context: PlaceContext, | |
296 | location: Location, | |
297 | ) { | |
923072b8 FG |
298 | match elem { |
299 | ProjectionElem::Index(index) => { | |
300 | let index_ty = self.body.local_decls[index].ty; | |
301 | if index_ty != self.tcx.types.usize { | |
302 | self.fail(location, format!("bad index ({:?} != usize)", index_ty)) | |
303 | } | |
17df50a5 | 304 | } |
f2b60f7d FG |
305 | ProjectionElem::Deref |
306 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::PostCleanup) => | |
307 | { | |
923072b8 FG |
308 | let base_ty = Place::ty_from(local, proj_base, &self.body.local_decls, self.tcx).ty; |
309 | ||
310 | if base_ty.is_box() { | |
311 | self.fail( | |
312 | location, | |
313 | format!("{:?} dereferenced after ElaborateBoxDerefs", base_ty), | |
314 | ) | |
315 | } | |
316 | } | |
317 | ProjectionElem::Field(f, ty) => { | |
9ffffee4 | 318 | let parent = Place { local, projection: self.tcx.mk_place_elems(proj_base) }; |
923072b8 FG |
319 | let parent_ty = parent.ty(&self.body.local_decls, self.tcx); |
320 | let fail_out_of_bounds = |this: &Self, location| { | |
321 | this.fail(location, format!("Out of bounds field {:?} for {:?}", f, parent_ty)); | |
322 | }; | |
323 | let check_equal = |this: &Self, location, f_ty| { | |
324 | if !this.mir_assign_valid_types(ty, f_ty) { | |
325 | this.fail( | |
487cf647 FG |
326 | location, |
327 | format!( | |
328 | "Field projection `{:?}.{:?}` specified type `{:?}`, but actual type is `{:?}`", | |
329 | parent, f, ty, f_ty | |
330 | ) | |
04454e1e | 331 | ) |
923072b8 FG |
332 | } |
333 | }; | |
334 | ||
064997fb | 335 | let kind = match parent_ty.ty.kind() { |
9c376795 | 336 | &ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => { |
9ffffee4 | 337 | self.tcx.type_of(def_id).subst(self.tcx, substs).kind() |
064997fb FG |
338 | } |
339 | kind => kind, | |
340 | }; | |
341 | ||
342 | match kind { | |
923072b8 FG |
343 | ty::Tuple(fields) => { |
344 | let Some(f_ty) = fields.get(f.as_usize()) else { | |
345 | fail_out_of_bounds(self, location); | |
346 | return; | |
347 | }; | |
348 | check_equal(self, location, *f_ty); | |
349 | } | |
350 | ty::Adt(adt_def, substs) => { | |
351 | let var = parent_ty.variant_index.unwrap_or(VariantIdx::from_u32(0)); | |
352 | let Some(field) = adt_def.variant(var).fields.get(f.as_usize()) else { | |
353 | fail_out_of_bounds(self, location); | |
354 | return; | |
355 | }; | |
356 | check_equal(self, location, field.ty(self.tcx, substs)); | |
357 | } | |
358 | ty::Closure(_, substs) => { | |
359 | let substs = substs.as_closure(); | |
360 | let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else { | |
361 | fail_out_of_bounds(self, location); | |
362 | return; | |
363 | }; | |
364 | check_equal(self, location, f_ty); | |
365 | } | |
064997fb FG |
366 | &ty::Generator(def_id, substs, _) => { |
367 | let f_ty = if let Some(var) = parent_ty.variant_index { | |
368 | let gen_body = if def_id == self.body.source.def_id() { | |
369 | self.body | |
370 | } else { | |
371 | self.tcx.optimized_mir(def_id) | |
372 | }; | |
373 | ||
374 | let Some(layout) = gen_body.generator_layout() else { | |
375 | self.fail(location, format!("No generator layout for {:?}", parent_ty)); | |
376 | return; | |
377 | }; | |
378 | ||
379 | let Some(&local) = layout.variant_fields[var].get(f) else { | |
380 | fail_out_of_bounds(self, location); | |
381 | return; | |
382 | }; | |
383 | ||
9ffffee4 | 384 | let Some(f_ty) = layout.field_tys.get(local) else { |
064997fb FG |
385 | self.fail(location, format!("Out of bounds local {:?} for {:?}", local, parent_ty)); |
386 | return; | |
387 | }; | |
388 | ||
9ffffee4 | 389 | f_ty.ty |
064997fb FG |
390 | } else { |
391 | let Some(f_ty) = substs.as_generator().prefix_tys().nth(f.index()) else { | |
392 | fail_out_of_bounds(self, location); | |
393 | return; | |
394 | }; | |
395 | ||
396 | f_ty | |
923072b8 | 397 | }; |
064997fb | 398 | |
923072b8 FG |
399 | check_equal(self, location, f_ty); |
400 | } | |
401 | _ => { | |
402 | self.fail(location, format!("{:?} does not have fields", parent_ty.ty)); | |
403 | } | |
04454e1e FG |
404 | } |
405 | } | |
923072b8 | 406 | _ => {} |
04454e1e | 407 | } |
17df50a5 XL |
408 | self.super_projection_elem(local, proj_base, elem, context, location); |
409 | } | |
410 | ||
923072b8 | 411 | fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) { |
04454e1e FG |
412 | // Set off any `bug!`s in the type computation code |
413 | let _ = place.ty(&self.body.local_decls, self.tcx); | |
923072b8 | 414 | |
f2b60f7d | 415 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) |
923072b8 FG |
416 | && place.projection.len() > 1 |
417 | && cntxt != PlaceContext::NonUse(VarDebugInfo) | |
418 | && place.projection[1..].contains(&ProjectionElem::Deref) | |
419 | { | |
420 | self.fail(location, format!("{:?}, has deref at the wrong place", place)); | |
421 | } | |
064997fb FG |
422 | |
423 | self.super_place(place, cntxt, location); | |
04454e1e FG |
424 | } |
425 | ||
426 | fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { | |
427 | macro_rules! check_kinds { | |
428 | ($t:expr, $text:literal, $($patterns:tt)*) => { | |
429 | if !matches!(($t).kind(), $($patterns)*) { | |
430 | self.fail(location, format!($text, $t)); | |
431 | } | |
432 | }; | |
433 | } | |
434 | match rvalue { | |
9ffffee4 | 435 | Rvalue::Use(_) | Rvalue::CopyForDeref(_) | Rvalue::Aggregate(..) => {} |
04454e1e | 436 | Rvalue::Ref(_, BorrowKind::Shallow, _) => { |
f2b60f7d | 437 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { |
04454e1e FG |
438 | self.fail( |
439 | location, | |
f2b60f7d | 440 | "`Assign` statement with a `Shallow` borrow should have been removed in runtime MIR", |
f035d41b XL |
441 | ); |
442 | } | |
04454e1e | 443 | } |
064997fb | 444 | Rvalue::Ref(..) => {} |
04454e1e FG |
445 | Rvalue::Len(p) => { |
446 | let pty = p.ty(&self.body.local_decls, self.tcx).ty; | |
447 | check_kinds!( | |
448 | pty, | |
449 | "Cannot compute length of non-array type {:?}", | |
450 | ty::Array(..) | ty::Slice(..) | |
451 | ); | |
452 | } | |
453 | Rvalue::BinaryOp(op, vals) => { | |
454 | use BinOp::*; | |
455 | let a = vals.0.ty(&self.body.local_decls, self.tcx); | |
456 | let b = vals.1.ty(&self.body.local_decls, self.tcx); | |
457 | match op { | |
458 | Offset => { | |
459 | check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..)); | |
460 | if b != self.tcx.types.isize && b != self.tcx.types.usize { | |
461 | self.fail(location, format!("Cannot offset by non-isize type {:?}", b)); | |
462 | } | |
463 | } | |
464 | Eq | Lt | Le | Ne | Ge | Gt => { | |
465 | for x in [a, b] { | |
466 | check_kinds!( | |
467 | x, | |
468 | "Cannot compare type {:?}", | |
469 | ty::Bool | |
470 | | ty::Char | |
471 | | ty::Int(..) | |
472 | | ty::Uint(..) | |
473 | | ty::Float(..) | |
474 | | ty::RawPtr(..) | |
475 | | ty::FnPtr(..) | |
476 | ) | |
477 | } | |
478 | // The function pointer types can have lifetimes | |
479 | if !self.mir_assign_valid_types(a, b) { | |
f035d41b XL |
480 | self.fail( |
481 | location, | |
04454e1e | 482 | format!("Cannot compare unequal types {:?} and {:?}", a, b), |
f035d41b XL |
483 | ); |
484 | } | |
f9f354fc | 485 | } |
04454e1e FG |
486 | Shl | Shr => { |
487 | for x in [a, b] { | |
488 | check_kinds!( | |
489 | x, | |
490 | "Cannot shift non-integer type {:?}", | |
491 | ty::Uint(..) | ty::Int(..) | |
492 | ) | |
493 | } | |
494 | } | |
495 | BitAnd | BitOr | BitXor => { | |
496 | for x in [a, b] { | |
497 | check_kinds!( | |
498 | x, | |
499 | "Cannot perform bitwise op on type {:?}", | |
500 | ty::Uint(..) | ty::Int(..) | ty::Bool | |
501 | ) | |
502 | } | |
503 | if a != b { | |
3dfed10e XL |
504 | self.fail( |
505 | location, | |
04454e1e FG |
506 | format!( |
507 | "Cannot perform bitwise op on unequal types {:?} and {:?}", | |
508 | a, b | |
509 | ), | |
510 | ); | |
511 | } | |
512 | } | |
513 | Add | Sub | Mul | Div | Rem => { | |
514 | for x in [a, b] { | |
515 | check_kinds!( | |
516 | x, | |
517 | "Cannot perform arithmetic on type {:?}", | |
518 | ty::Uint(..) | ty::Int(..) | ty::Float(..) | |
3dfed10e XL |
519 | ) |
520 | } | |
04454e1e FG |
521 | if a != b { |
522 | self.fail( | |
523 | location, | |
524 | format!( | |
525 | "Cannot perform arithmetic on unequal types {:?} and {:?}", | |
526 | a, b | |
527 | ), | |
528 | ); | |
529 | } | |
3dfed10e | 530 | } |
04454e1e FG |
531 | } |
532 | } | |
533 | Rvalue::CheckedBinaryOp(op, vals) => { | |
534 | use BinOp::*; | |
535 | let a = vals.0.ty(&self.body.local_decls, self.tcx); | |
536 | let b = vals.1.ty(&self.body.local_decls, self.tcx); | |
537 | match op { | |
538 | Add | Sub | Mul => { | |
539 | for x in [a, b] { | |
540 | check_kinds!( | |
541 | x, | |
542 | "Cannot perform checked arithmetic on type {:?}", | |
543 | ty::Uint(..) | ty::Int(..) | |
544 | ) | |
545 | } | |
546 | if a != b { | |
1b1a35ee XL |
547 | self.fail( |
548 | location, | |
04454e1e FG |
549 | format!( |
550 | "Cannot perform checked arithmetic on unequal types {:?} and {:?}", | |
551 | a, b | |
552 | ), | |
1b1a35ee XL |
553 | ); |
554 | } | |
555 | } | |
04454e1e FG |
556 | Shl | Shr => { |
557 | for x in [a, b] { | |
558 | check_kinds!( | |
559 | x, | |
560 | "Cannot perform checked shift on non-integer type {:?}", | |
561 | ty::Uint(..) | ty::Int(..) | |
562 | ) | |
563 | } | |
564 | } | |
565 | _ => self.fail(location, format!("There is no checked version of {:?}", op)), | |
566 | } | |
567 | } | |
568 | Rvalue::UnaryOp(op, operand) => { | |
569 | let a = operand.ty(&self.body.local_decls, self.tcx); | |
570 | match op { | |
571 | UnOp::Neg => { | |
572 | check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..)) | |
573 | } | |
574 | UnOp::Not => { | |
575 | check_kinds!( | |
576 | a, | |
577 | "Cannot binary not type {:?}", | |
578 | ty::Int(..) | ty::Uint(..) | ty::Bool | |
579 | ); | |
580 | } | |
581 | } | |
582 | } | |
583 | Rvalue::ShallowInitBox(operand, _) => { | |
584 | let a = operand.ty(&self.body.local_decls, self.tcx); | |
585 | check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..)); | |
586 | } | |
064997fb | 587 | Rvalue::Cast(kind, operand, target_type) => { |
2b03887a | 588 | let op_ty = operand.ty(self.body, self.tcx); |
064997fb | 589 | match kind { |
2b03887a FG |
590 | CastKind::DynStar => { |
591 | // FIXME(dyn-star): make sure nothing needs to be done here. | |
592 | } | |
593 | // FIXME: Add Checks for these | |
594 | CastKind::PointerFromExposedAddress | |
595 | | CastKind::PointerExposeAddress | |
596 | | CastKind::Pointer(_) => {} | |
597 | CastKind::IntToInt | CastKind::IntToFloat => { | |
598 | let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool(); | |
599 | let target_valid = target_type.is_numeric() || target_type.is_char(); | |
600 | if !input_valid || !target_valid { | |
601 | self.fail( | |
602 | location, | |
603 | format!("Wrong cast kind {kind:?} for the type {op_ty}",), | |
604 | ); | |
605 | } | |
606 | } | |
607 | CastKind::FnPtrToPtr | CastKind::PtrToPtr => { | |
608 | if !(op_ty.is_any_ptr() && target_type.is_unsafe_ptr()) { | |
609 | self.fail(location, "Can't cast {op_ty} into 'Ptr'"); | |
610 | } | |
611 | } | |
612 | CastKind::FloatToFloat | CastKind::FloatToInt => { | |
613 | if !op_ty.is_floating_point() || !target_type.is_numeric() { | |
064997fb FG |
614 | self.fail( |
615 | location, | |
616 | format!( | |
2b03887a | 617 | "Trying to cast non 'Float' as {kind:?} into {target_type:?}" |
064997fb FG |
618 | ), |
619 | ); | |
620 | } | |
621 | } | |
064997fb FG |
622 | } |
623 | } | |
624 | Rvalue::Repeat(_, _) | |
625 | | Rvalue::ThreadLocalRef(_) | |
626 | | Rvalue::AddressOf(_, _) | |
627 | | Rvalue::NullaryOp(_, _) | |
628 | | Rvalue::Discriminant(_) => {} | |
04454e1e FG |
629 | } |
630 | self.super_rvalue(rvalue, location); | |
631 | } | |
632 | ||
633 | fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { | |
634 | match &statement.kind { | |
635 | StatementKind::Assign(box (dest, rvalue)) => { | |
636 | // LHS and RHS of the assignment must have the same type. | |
637 | let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty; | |
638 | let right_ty = rvalue.ty(&self.body.local_decls, self.tcx); | |
639 | if !self.mir_assign_valid_types(right_ty, left_ty) { | |
640 | self.fail( | |
641 | location, | |
642 | format!( | |
643 | "encountered `{:?}` with incompatible types:\n\ | |
644 | left-hand side has type: {}\n\ | |
645 | right-hand side has type: {}", | |
646 | statement.kind, left_ty, right_ty, | |
647 | ), | |
648 | ); | |
649 | } | |
064997fb FG |
650 | if let Rvalue::CopyForDeref(place) = rvalue { |
651 | if !place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_some() | |
652 | { | |
653 | self.fail( | |
654 | location, | |
655 | "`CopyForDeref` should only be used for dereferenceable types", | |
656 | ) | |
657 | } | |
658 | } | |
04454e1e FG |
659 | // FIXME(JakobDegen): Check this for all rvalues, not just this one. |
660 | if let Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) = rvalue { | |
661 | // The sides of an assignment must not alias. Currently this just checks whether | |
662 | // the places are identical. | |
663 | if dest == src { | |
664 | self.fail( | |
665 | location, | |
666 | "encountered `Assign` statement with overlapping memory", | |
667 | ); | |
668 | } | |
f035d41b XL |
669 | } |
670 | } | |
1b1a35ee | 671 | StatementKind::AscribeUserType(..) => { |
f2b60f7d | 672 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { |
1b1a35ee XL |
673 | self.fail( |
674 | location, | |
675 | "`AscribeUserType` should have been removed after drop lowering phase", | |
676 | ); | |
677 | } | |
678 | } | |
679 | StatementKind::FakeRead(..) => { | |
f2b60f7d | 680 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { |
1b1a35ee XL |
681 | self.fail( |
682 | location, | |
683 | "`FakeRead` should have been removed after drop lowering phase", | |
684 | ); | |
685 | } | |
686 | } | |
f2b60f7d FG |
687 | StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => { |
688 | let ty = op.ty(&self.body.local_decls, self.tcx); | |
689 | if !ty.is_bool() { | |
690 | self.fail( | |
691 | location, | |
692 | format!("`assume` argument must be `bool`, but got: `{}`", ty), | |
693 | ); | |
694 | } | |
695 | } | |
696 | StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping( | |
697 | CopyNonOverlapping { src, dst, count }, | |
698 | )) => { | |
6a06907d XL |
699 | let src_ty = src.ty(&self.body.local_decls, self.tcx); |
700 | let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) { | |
701 | src_deref.ty | |
702 | } else { | |
703 | self.fail( | |
704 | location, | |
705 | format!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty), | |
706 | ); | |
707 | return; | |
708 | }; | |
709 | let dst_ty = dst.ty(&self.body.local_decls, self.tcx); | |
710 | let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) { | |
711 | dst_deref.ty | |
712 | } else { | |
713 | self.fail( | |
714 | location, | |
715 | format!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty), | |
716 | ); | |
717 | return; | |
718 | }; | |
719 | // since CopyNonOverlapping is parametrized by 1 type, | |
720 | // we only need to check that they are equal and not keep an extra parameter. | |
04454e1e | 721 | if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) { |
6a06907d XL |
722 | self.fail(location, format!("bad arg ({:?} != {:?})", op_src_ty, op_dst_ty)); |
723 | } | |
724 | ||
725 | let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx); | |
726 | if op_cnt_ty != self.tcx.types.usize { | |
727 | self.fail(location, format!("bad arg ({:?} != usize)", op_cnt_ty)) | |
728 | } | |
729 | } | |
04454e1e | 730 | StatementKind::SetDiscriminant { place, .. } => { |
f2b60f7d | 731 | if self.mir_phase < MirPhase::Runtime(RuntimePhase::Initial) { |
04454e1e FG |
732 | self.fail(location, "`SetDiscriminant`is not allowed until deaggregation"); |
733 | } | |
734 | let pty = place.ty(&self.body.local_decls, self.tcx).ty.kind(); | |
9c376795 | 735 | if !matches!(pty, ty::Adt(..) | ty::Generator(..) | ty::Alias(ty::Opaque, ..)) { |
04454e1e FG |
736 | self.fail( |
737 | location, | |
738 | format!( | |
739 | "`SetDiscriminant` is only allowed on ADTs and generators, not {:?}", | |
740 | pty | |
741 | ), | |
742 | ); | |
743 | } | |
744 | } | |
745 | StatementKind::Deinit(..) => { | |
f2b60f7d | 746 | if self.mir_phase < MirPhase::Runtime(RuntimePhase::Initial) { |
04454e1e | 747 | self.fail(location, "`Deinit`is not allowed until deaggregation"); |
5e7ed085 FG |
748 | } |
749 | } | |
9c376795 | 750 | StatementKind::Retag(kind, _) => { |
5e7ed085 FG |
751 | // FIXME(JakobDegen) The validator should check that `self.mir_phase < |
752 | // DropsLowered`. However, this causes ICEs with generation of drop shims, which | |
753 | // seem to fail to set their `MirPhase` correctly. | |
9ffffee4 | 754 | if matches!(kind, RetagKind::Raw | RetagKind::TwoPhase) { |
9c376795 FG |
755 | self.fail(location, format!("explicit `{:?}` is forbidden", kind)); |
756 | } | |
5e7ed085 | 757 | } |
9ffffee4 FG |
758 | StatementKind::StorageLive(local) => { |
759 | // We check that the local is not live when entering a `StorageLive` for it. | |
760 | // Technically, violating this restriction is only UB and not actually indicative | |
761 | // of not well-formed MIR. This means that an optimization which turns MIR that | |
762 | // already has UB into MIR that fails this check is not necessarily wrong. However, | |
763 | // we have no such optimizations at the moment, and so we include this check anyway | |
764 | // to help us catch bugs. If you happen to write an optimization that might cause | |
765 | // this to incorrectly fire, feel free to remove this check. | |
766 | if self.reachable_blocks.contains(location.block) { | |
767 | self.storage_liveness.seek_before_primary_effect(location); | |
768 | let locals_with_storage = self.storage_liveness.get(); | |
769 | if locals_with_storage.contains(*local) { | |
770 | self.fail( | |
771 | location, | |
772 | format!("StorageLive({local:?}) which already has storage here"), | |
773 | ); | |
774 | } | |
775 | } | |
776 | } | |
777 | StatementKind::StorageDead(_) | |
6a06907d | 778 | | StatementKind::Coverage(_) |
9ffffee4 | 779 | | StatementKind::ConstEvalCounter |
6a06907d | 780 | | StatementKind::Nop => {} |
f035d41b | 781 | } |
29967ef6 XL |
782 | |
783 | self.super_statement(statement, location); | |
f035d41b XL |
784 | } |
785 | ||
786 | fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { | |
787 | match &terminator.kind { | |
788 | TerminatorKind::Goto { target } => { | |
789 | self.check_edge(location, *target, EdgeKind::Normal); | |
790 | } | |
9c376795 FG |
791 | TerminatorKind::SwitchInt { targets, discr } => { |
792 | let switch_ty = discr.ty(&self.body.local_decls, self.tcx); | |
29967ef6 XL |
793 | |
794 | let target_width = self.tcx.sess.target.pointer_width; | |
795 | ||
796 | let size = Size::from_bits(match switch_ty.kind() { | |
797 | ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(), | |
798 | ty::Int(int) => int.normalize(target_width).bit_width().unwrap(), | |
799 | ty::Char => 32, | |
800 | ty::Bool => 1, | |
801 | other => bug!("unhandled type: {:?}", other), | |
802 | }); | |
803 | ||
804 | for (value, target) in targets.iter() { | |
805 | if Scalar::<()>::try_from_uint(value, size).is_none() { | |
806 | self.fail( | |
807 | location, | |
808 | format!("the value {:#x} is not a proper {:?}", value, switch_ty), | |
809 | ) | |
810 | } | |
811 | ||
812 | self.check_edge(location, target, EdgeKind::Normal); | |
f035d41b | 813 | } |
29967ef6 | 814 | self.check_edge(location, targets.otherwise(), EdgeKind::Normal); |
5099ac24 FG |
815 | |
816 | self.value_cache.clear(); | |
817 | self.value_cache.extend(targets.iter().map(|(value, _)| value)); | |
818 | let all_len = self.value_cache.len(); | |
819 | self.value_cache.sort_unstable(); | |
820 | self.value_cache.dedup(); | |
821 | let has_duplicates = all_len != self.value_cache.len(); | |
822 | if has_duplicates { | |
823 | self.fail( | |
824 | location, | |
825 | format!( | |
826 | "duplicated values in `SwitchInt` terminator: {:?}", | |
827 | terminator.kind, | |
828 | ), | |
829 | ); | |
830 | } | |
f035d41b XL |
831 | } |
832 | TerminatorKind::Drop { target, unwind, .. } => { | |
833 | self.check_edge(location, *target, EdgeKind::Normal); | |
834 | if let Some(unwind) = unwind { | |
835 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
836 | } | |
837 | } | |
838 | TerminatorKind::DropAndReplace { target, unwind, .. } => { | |
f2b60f7d | 839 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { |
3dfed10e XL |
840 | self.fail( |
841 | location, | |
5e7ed085 | 842 | "`DropAndReplace` should have been removed during drop elaboration", |
3dfed10e XL |
843 | ); |
844 | } | |
f035d41b XL |
845 | self.check_edge(location, *target, EdgeKind::Normal); |
846 | if let Some(unwind) = unwind { | |
847 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
848 | } | |
849 | } | |
923072b8 | 850 | TerminatorKind::Call { func, args, destination, target, cleanup, .. } => { |
f035d41b | 851 | let func_ty = func.ty(&self.body.local_decls, self.tcx); |
1b1a35ee | 852 | match func_ty.kind() { |
f035d41b XL |
853 | ty::FnPtr(..) | ty::FnDef(..) => {} |
854 | _ => self.fail( | |
855 | location, | |
856 | format!("encountered non-callable type {} in `Call` terminator", func_ty), | |
857 | ), | |
858 | } | |
923072b8 | 859 | if let Some(target) = target { |
f035d41b XL |
860 | self.check_edge(location, *target, EdgeKind::Normal); |
861 | } | |
862 | if let Some(cleanup) = cleanup { | |
863 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
864 | } | |
29967ef6 XL |
865 | |
866 | // The call destination place and Operand::Move place used as an argument might be | |
867 | // passed by a reference to the callee. Consequently they must be non-overlapping. | |
868 | // Currently this simply checks for duplicate places. | |
869 | self.place_cache.clear(); | |
923072b8 | 870 | self.place_cache.push(destination.as_ref()); |
29967ef6 XL |
871 | for arg in args { |
872 | if let Operand::Move(place) = arg { | |
873 | self.place_cache.push(place.as_ref()); | |
874 | } | |
875 | } | |
876 | let all_len = self.place_cache.len(); | |
04454e1e FG |
877 | let mut dedup = FxHashSet::default(); |
878 | self.place_cache.retain(|p| dedup.insert(*p)); | |
29967ef6 XL |
879 | let has_duplicates = all_len != self.place_cache.len(); |
880 | if has_duplicates { | |
881 | self.fail( | |
882 | location, | |
883 | format!( | |
884 | "encountered overlapping memory in `Call` terminator: {:?}", | |
885 | terminator.kind, | |
886 | ), | |
887 | ); | |
888 | } | |
f035d41b XL |
889 | } |
890 | TerminatorKind::Assert { cond, target, cleanup, .. } => { | |
891 | let cond_ty = cond.ty(&self.body.local_decls, self.tcx); | |
892 | if cond_ty != self.tcx.types.bool { | |
893 | self.fail( | |
894 | location, | |
895 | format!( | |
896 | "encountered non-boolean condition of type {} in `Assert` terminator", | |
897 | cond_ty | |
898 | ), | |
899 | ); | |
900 | } | |
901 | self.check_edge(location, *target, EdgeKind::Normal); | |
902 | if let Some(cleanup) = cleanup { | |
903 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
904 | } | |
905 | } | |
906 | TerminatorKind::Yield { resume, drop, .. } => { | |
04454e1e FG |
907 | if self.body.generator.is_none() { |
908 | self.fail(location, "`Yield` cannot appear outside generator bodies"); | |
909 | } | |
f2b60f7d | 910 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { |
3dfed10e XL |
911 | self.fail(location, "`Yield` should have been replaced by generator lowering"); |
912 | } | |
f035d41b XL |
913 | self.check_edge(location, *resume, EdgeKind::Normal); |
914 | if let Some(drop) = drop { | |
915 | self.check_edge(location, *drop, EdgeKind::Normal); | |
916 | } | |
917 | } | |
918 | TerminatorKind::FalseEdge { real_target, imaginary_target } => { | |
f2b60f7d | 919 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { |
5e7ed085 FG |
920 | self.fail( |
921 | location, | |
922 | "`FalseEdge` should have been removed after drop elaboration", | |
923 | ); | |
924 | } | |
f035d41b XL |
925 | self.check_edge(location, *real_target, EdgeKind::Normal); |
926 | self.check_edge(location, *imaginary_target, EdgeKind::Normal); | |
927 | } | |
928 | TerminatorKind::FalseUnwind { real_target, unwind } => { | |
f2b60f7d | 929 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { |
5e7ed085 FG |
930 | self.fail( |
931 | location, | |
932 | "`FalseUnwind` should have been removed after drop elaboration", | |
933 | ); | |
934 | } | |
f035d41b XL |
935 | self.check_edge(location, *real_target, EdgeKind::Normal); |
936 | if let Some(unwind) = unwind { | |
937 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
938 | } | |
939 | } | |
a2a8927a | 940 | TerminatorKind::InlineAsm { destination, cleanup, .. } => { |
f035d41b XL |
941 | if let Some(destination) = destination { |
942 | self.check_edge(location, *destination, EdgeKind::Normal); | |
f9f354fc | 943 | } |
a2a8927a XL |
944 | if let Some(cleanup) = cleanup { |
945 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
946 | } | |
f9f354fc | 947 | } |
5e7ed085 | 948 | TerminatorKind::GeneratorDrop => { |
04454e1e FG |
949 | if self.body.generator.is_none() { |
950 | self.fail(location, "`GeneratorDrop` cannot appear outside generator bodies"); | |
951 | } | |
f2b60f7d | 952 | if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { |
5e7ed085 FG |
953 | self.fail( |
954 | location, | |
955 | "`GeneratorDrop` should have been replaced by generator lowering", | |
956 | ); | |
957 | } | |
958 | } | |
04454e1e FG |
959 | TerminatorKind::Resume | TerminatorKind::Abort => { |
960 | let bb = location.block; | |
f2b60f7d | 961 | if !self.body.basic_blocks[bb].is_cleanup { |
04454e1e FG |
962 | self.fail(location, "Cannot `Resume` or `Abort` from non-cleanup basic block") |
963 | } | |
964 | } | |
965 | TerminatorKind::Return => { | |
966 | let bb = location.block; | |
f2b60f7d | 967 | if self.body.basic_blocks[bb].is_cleanup { |
04454e1e FG |
968 | self.fail(location, "Cannot `Return` from cleanup basic block") |
969 | } | |
970 | } | |
971 | TerminatorKind::Unreachable => {} | |
f9f354fc | 972 | } |
29967ef6 XL |
973 | |
974 | self.super_terminator(terminator, location); | |
975 | } | |
976 | ||
064997fb FG |
977 | fn visit_source_scope(&mut self, scope: SourceScope) { |
978 | if self.body.source_scopes.get(scope).is_none() { | |
29967ef6 XL |
979 | self.tcx.sess.diagnostic().delay_span_bug( |
980 | self.body.span, | |
981 | &format!( | |
982 | "broken MIR in {:?} ({}):\ninvalid source scope {:?}", | |
983 | self.body.source.instance, self.when, scope, | |
984 | ), | |
985 | ); | |
986 | } | |
f9f354fc XL |
987 | } |
988 | } |