]>
Commit | Line | Data |
---|---|---|
f9f354fc XL |
1 | //! Validates the MIR to ensure that invariants are upheld. |
2 | ||
04454e1e | 3 | use rustc_data_structures::fx::FxHashSet; |
29967ef6 | 4 | use rustc_index::bit_set::BitSet; |
1b1a35ee | 5 | use rustc_infer::infer::TyCtxtInferExt; |
29967ef6 | 6 | use rustc_middle::mir::interpret::Scalar; |
923072b8 | 7 | use rustc_middle::mir::visit::NonUseContext::VarDebugInfo; |
1b1a35ee XL |
8 | use rustc_middle::mir::visit::{PlaceContext, Visitor}; |
9 | use rustc_middle::mir::{ | |
064997fb FG |
10 | traversal, AggregateKind, BasicBlock, BinOp, Body, BorrowKind, CastKind, Local, Location, |
11 | MirPass, MirPhase, Operand, Place, PlaceElem, PlaceRef, ProjectionElem, Rvalue, SourceScope, | |
12 | Statement, StatementKind, Terminator, TerminatorKind, UnOp, START_BLOCK, | |
f9f354fc | 13 | }; |
1b1a35ee | 14 | use rustc_middle::ty::fold::BottomUpFolder; |
064997fb FG |
15 | use rustc_middle::ty::subst::Subst; |
16 | use rustc_middle::ty::{self, InstanceDef, ParamEnv, Ty, TyCtxt, TypeFoldable, TypeVisitable}; | |
c295e0f8 | 17 | use rustc_mir_dataflow::impls::MaybeStorageLive; |
064997fb | 18 | use rustc_mir_dataflow::storage::always_storage_live_locals; |
c295e0f8 | 19 | use rustc_mir_dataflow::{Analysis, ResultsCursor}; |
04454e1e | 20 | use rustc_target::abi::{Size, VariantIdx}; |
f035d41b XL |
21 | |
22 | #[derive(Copy, Clone, Debug)] | |
23 | enum EdgeKind { | |
24 | Unwind, | |
25 | Normal, | |
26 | } | |
f9f354fc XL |
27 | |
28 | pub struct Validator { | |
29 | /// Describes at which point in the pipeline this validation is happening. | |
30 | pub when: String, | |
3dfed10e XL |
31 | /// The phase for which we are upholding the dialect. If the given phase forbids a specific |
32 | /// element, this validator will now emit errors if that specific element is encountered. | |
33 | /// Note that phases that change the dialect cause all *following* phases to check the | |
34 | /// invariants of the new dialect. A phase that changes dialects never checks the new invariants | |
35 | /// itself. | |
36 | pub mir_phase: MirPhase, | |
f9f354fc XL |
37 | } |
38 | ||
39 | impl<'tcx> MirPass<'tcx> for Validator { | |
29967ef6 | 40 | fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { |
04454e1e FG |
41 | // FIXME(JakobDegen): These bodies never instantiated in codegend anyway, so it's not |
42 | // terribly important that they pass the validator. However, I think other passes might | |
43 | // still see them, in which case they might be surprised. It would probably be better if we | |
44 | // didn't put this through the MIR pipeline at all. | |
45 | if matches!(body.source.instance, InstanceDef::Intrinsic(..) | InstanceDef::Virtual(..)) { | |
46 | return; | |
47 | } | |
29967ef6 | 48 | let def_id = body.source.def_id(); |
1b1a35ee | 49 | let param_env = tcx.param_env(def_id); |
3dfed10e | 50 | let mir_phase = self.mir_phase; |
1b1a35ee | 51 | |
064997fb | 52 | let always_live_locals = always_storage_live_locals(body); |
1b1a35ee | 53 | let storage_liveness = MaybeStorageLive::new(always_live_locals) |
29967ef6 | 54 | .into_engine(tcx, body) |
1b1a35ee XL |
55 | .iterate_to_fixpoint() |
56 | .into_results_cursor(body); | |
57 | ||
29967ef6 XL |
58 | TypeChecker { |
59 | when: &self.when, | |
60 | body, | |
61 | tcx, | |
62 | param_env, | |
63 | mir_phase, | |
64 | reachable_blocks: traversal::reachable_as_bitset(body), | |
65 | storage_liveness, | |
66 | place_cache: Vec::new(), | |
5099ac24 | 67 | value_cache: Vec::new(), |
29967ef6 XL |
68 | } |
69 | .visit_body(body); | |
f9f354fc XL |
70 | } |
71 | } | |
72 | ||
f035d41b XL |
73 | /// Returns whether the two types are equal up to lifetimes. |
74 | /// All lifetimes, including higher-ranked ones, get ignored for this comparison. | |
75 | /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.) | |
76 | /// | |
77 | /// The point of this function is to approximate "equal up to subtyping". However, | |
78 | /// the approximation is incorrect as variance is ignored. | |
a2a8927a | 79 | pub fn equal_up_to_regions<'tcx>( |
f035d41b XL |
80 | tcx: TyCtxt<'tcx>, |
81 | param_env: ParamEnv<'tcx>, | |
82 | src: Ty<'tcx>, | |
83 | dest: Ty<'tcx>, | |
84 | ) -> bool { | |
85 | // Fast path. | |
86 | if src == dest { | |
87 | return true; | |
88 | } | |
89 | ||
1b1a35ee | 90 | // Normalize lifetimes away on both sides, then compare. |
1b1a35ee XL |
91 | let normalize = |ty: Ty<'tcx>| { |
92 | tcx.normalize_erasing_regions( | |
93 | param_env, | |
94 | ty.fold_with(&mut BottomUpFolder { | |
95 | tcx, | |
29967ef6 XL |
96 | // FIXME: We erase all late-bound lifetimes, but this is not fully correct. |
97 | // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`, | |
98 | // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`, | |
99 | // since one may have an `impl SomeTrait for fn(&32)` and | |
100 | // `impl SomeTrait for fn(&'static u32)` at the same time which | |
101 | // specify distinct values for Assoc. (See also #56105) | |
1b1a35ee XL |
102 | lt_op: |_| tcx.lifetimes.re_erased, |
103 | // Leave consts and types unchanged. | |
104 | ct_op: |ct| ct, | |
105 | ty_op: |ty| ty, | |
106 | }), | |
107 | ) | |
108 | }; | |
109 | tcx.infer_ctxt().enter(|infcx| infcx.can_eq(param_env, normalize(src), normalize(dest)).is_ok()) | |
f035d41b XL |
110 | } |
111 | ||
f9f354fc XL |
112 | struct TypeChecker<'a, 'tcx> { |
113 | when: &'a str, | |
f9f354fc XL |
114 | body: &'a Body<'tcx>, |
115 | tcx: TyCtxt<'tcx>, | |
116 | param_env: ParamEnv<'tcx>, | |
3dfed10e | 117 | mir_phase: MirPhase, |
29967ef6 | 118 | reachable_blocks: BitSet<BasicBlock>, |
1b1a35ee | 119 | storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>, |
29967ef6 | 120 | place_cache: Vec<PlaceRef<'tcx>>, |
5099ac24 | 121 | value_cache: Vec<u128>, |
f9f354fc XL |
122 | } |
123 | ||
124 | impl<'a, 'tcx> TypeChecker<'a, 'tcx> { | |
f035d41b XL |
125 | fn fail(&self, location: Location, msg: impl AsRef<str>) { |
126 | let span = self.body.source_info(location).span; | |
f9f354fc XL |
127 | // We use `delay_span_bug` as we might see broken MIR when other errors have already |
128 | // occurred. | |
129 | self.tcx.sess.diagnostic().delay_span_bug( | |
130 | span, | |
f035d41b XL |
131 | &format!( |
132 | "broken MIR in {:?} ({}) at {:?}:\n{}", | |
29967ef6 | 133 | self.body.source.instance, |
f035d41b XL |
134 | self.when, |
135 | location, | |
136 | msg.as_ref() | |
137 | ), | |
f9f354fc XL |
138 | ); |
139 | } | |
f035d41b XL |
140 | |
141 | fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) { | |
c295e0f8 XL |
142 | if bb == START_BLOCK { |
143 | self.fail(location, "start block must not have predecessors") | |
144 | } | |
f035d41b XL |
145 | if let Some(bb) = self.body.basic_blocks().get(bb) { |
146 | let src = self.body.basic_blocks().get(location.block).unwrap(); | |
147 | match (src.is_cleanup, bb.is_cleanup, edge_kind) { | |
148 | // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges | |
149 | (false, false, EdgeKind::Normal) | |
150 | // Non-cleanup blocks can jump to cleanup blocks along unwind edges | |
151 | | (false, true, EdgeKind::Unwind) | |
152 | // Cleanup blocks can jump to cleanup blocks along non-unwind edges | |
153 | | (true, true, EdgeKind::Normal) => {} | |
154 | // All other jumps are invalid | |
155 | _ => { | |
156 | self.fail( | |
157 | location, | |
158 | format!( | |
159 | "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})", | |
160 | edge_kind, | |
161 | bb, | |
162 | src.is_cleanup, | |
163 | bb.is_cleanup, | |
164 | ) | |
165 | ) | |
166 | } | |
167 | } | |
168 | } else { | |
169 | self.fail(location, format!("encountered jump to invalid basic block {:?}", bb)) | |
170 | } | |
171 | } | |
172 | ||
173 | /// Check if src can be assigned into dest. | |
174 | /// This is not precise, it will accept some incorrect assignments. | |
175 | fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool { | |
176 | // Fast path before we normalize. | |
177 | if src == dest { | |
178 | // Equal types, all is good. | |
179 | return true; | |
180 | } | |
5e7ed085 FG |
181 | // Normalization reveals opaque types, but we may be validating MIR while computing |
182 | // said opaque types, causing cycles. | |
183 | if (src, dest).has_opaque_types() { | |
184 | return true; | |
185 | } | |
f035d41b | 186 | // Normalize projections and things like that. |
3dfed10e | 187 | let param_env = self.param_env.with_reveal_all_normalized(self.tcx); |
f035d41b XL |
188 | let src = self.tcx.normalize_erasing_regions(param_env, src); |
189 | let dest = self.tcx.normalize_erasing_regions(param_env, dest); | |
190 | ||
191 | // Type-changing assignments can happen when subtyping is used. While | |
192 | // all normal lifetimes are erased, higher-ranked types with their | |
193 | // late-bound lifetimes are still around and can lead to type | |
194 | // differences. So we compare ignoring lifetimes. | |
195 | equal_up_to_regions(self.tcx, param_env, src, dest) | |
196 | } | |
f9f354fc XL |
197 | } |
198 | ||
199 | impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { | |
064997fb FG |
200 | fn visit_local(&mut self, local: Local, context: PlaceContext, location: Location) { |
201 | if self.body.local_decls.get(local).is_none() { | |
fc512014 XL |
202 | self.fail( |
203 | location, | |
204 | format!("local {:?} has no corresponding declaration in `body.local_decls`", local), | |
205 | ); | |
206 | } | |
207 | ||
29967ef6 | 208 | if self.reachable_blocks.contains(location.block) && context.is_use() { |
064997fb FG |
209 | // We check that the local is live whenever it is used. Technically, violating this |
210 | // restriction is only UB and not actually indicative of not well-formed MIR. This means | |
211 | // that an optimization which turns MIR that already has UB into MIR that fails this | |
212 | // check is not necessarily wrong. However, we have no such optimizations at the moment, | |
213 | // and so we include this check anyway to help us catch bugs. If you happen to write an | |
214 | // optimization that might cause this to incorrectly fire, feel free to remove this | |
215 | // check. | |
1b1a35ee XL |
216 | self.storage_liveness.seek_after_primary_effect(location); |
217 | let locals_with_storage = self.storage_liveness.get(); | |
064997fb | 218 | if !locals_with_storage.contains(local) { |
1b1a35ee XL |
219 | self.fail(location, format!("use of local {:?}, which has no storage here", local)); |
220 | } | |
221 | } | |
222 | } | |
223 | ||
f9f354fc | 224 | fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { |
29967ef6 | 225 | // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed. |
064997fb | 226 | if self.tcx.sess.opts.unstable_opts.validate_mir && self.mir_phase < MirPhase::DropsLowered |
04454e1e | 227 | { |
29967ef6 XL |
228 | // `Operand::Copy` is only supposed to be used with `Copy` types. |
229 | if let Operand::Copy(place) = operand { | |
230 | let ty = place.ty(&self.body.local_decls, self.tcx).ty; | |
231 | let span = self.body.source_info(location).span; | |
f9f354fc | 232 | |
29967ef6 XL |
233 | if !ty.is_copy_modulo_regions(self.tcx.at(span), self.param_env) { |
234 | self.fail(location, format!("`Operand::Copy` with non-`Copy` type {}", ty)); | |
235 | } | |
f9f354fc XL |
236 | } |
237 | } | |
238 | ||
239 | self.super_operand(operand, location); | |
240 | } | |
241 | ||
17df50a5 XL |
242 | fn visit_projection_elem( |
243 | &mut self, | |
244 | local: Local, | |
245 | proj_base: &[PlaceElem<'tcx>], | |
246 | elem: PlaceElem<'tcx>, | |
247 | context: PlaceContext, | |
248 | location: Location, | |
249 | ) { | |
923072b8 FG |
250 | match elem { |
251 | ProjectionElem::Index(index) => { | |
252 | let index_ty = self.body.local_decls[index].ty; | |
253 | if index_ty != self.tcx.types.usize { | |
254 | self.fail(location, format!("bad index ({:?} != usize)", index_ty)) | |
255 | } | |
17df50a5 | 256 | } |
923072b8 FG |
257 | ProjectionElem::Deref if self.mir_phase >= MirPhase::GeneratorsLowered => { |
258 | let base_ty = Place::ty_from(local, proj_base, &self.body.local_decls, self.tcx).ty; | |
259 | ||
260 | if base_ty.is_box() { | |
261 | self.fail( | |
262 | location, | |
263 | format!("{:?} dereferenced after ElaborateBoxDerefs", base_ty), | |
264 | ) | |
265 | } | |
266 | } | |
267 | ProjectionElem::Field(f, ty) => { | |
268 | let parent = Place { local, projection: self.tcx.intern_place_elems(proj_base) }; | |
269 | let parent_ty = parent.ty(&self.body.local_decls, self.tcx); | |
270 | let fail_out_of_bounds = |this: &Self, location| { | |
271 | this.fail(location, format!("Out of bounds field {:?} for {:?}", f, parent_ty)); | |
272 | }; | |
273 | let check_equal = |this: &Self, location, f_ty| { | |
274 | if !this.mir_assign_valid_types(ty, f_ty) { | |
275 | this.fail( | |
04454e1e FG |
276 | location, |
277 | format!( | |
278 | "Field projection `{:?}.{:?}` specified type `{:?}`, but actual type is {:?}", | |
279 | parent, f, ty, f_ty | |
280 | ) | |
281 | ) | |
923072b8 FG |
282 | } |
283 | }; | |
284 | ||
064997fb FG |
285 | let kind = match parent_ty.ty.kind() { |
286 | &ty::Opaque(def_id, substs) => { | |
287 | self.tcx.bound_type_of(def_id).subst(self.tcx, substs).kind() | |
288 | } | |
289 | kind => kind, | |
290 | }; | |
291 | ||
292 | match kind { | |
923072b8 FG |
293 | ty::Tuple(fields) => { |
294 | let Some(f_ty) = fields.get(f.as_usize()) else { | |
295 | fail_out_of_bounds(self, location); | |
296 | return; | |
297 | }; | |
298 | check_equal(self, location, *f_ty); | |
299 | } | |
300 | ty::Adt(adt_def, substs) => { | |
301 | let var = parent_ty.variant_index.unwrap_or(VariantIdx::from_u32(0)); | |
302 | let Some(field) = adt_def.variant(var).fields.get(f.as_usize()) else { | |
303 | fail_out_of_bounds(self, location); | |
304 | return; | |
305 | }; | |
306 | check_equal(self, location, field.ty(self.tcx, substs)); | |
307 | } | |
308 | ty::Closure(_, substs) => { | |
309 | let substs = substs.as_closure(); | |
310 | let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else { | |
311 | fail_out_of_bounds(self, location); | |
312 | return; | |
313 | }; | |
314 | check_equal(self, location, f_ty); | |
315 | } | |
064997fb FG |
316 | &ty::Generator(def_id, substs, _) => { |
317 | let f_ty = if let Some(var) = parent_ty.variant_index { | |
318 | let gen_body = if def_id == self.body.source.def_id() { | |
319 | self.body | |
320 | } else { | |
321 | self.tcx.optimized_mir(def_id) | |
322 | }; | |
323 | ||
324 | let Some(layout) = gen_body.generator_layout() else { | |
325 | self.fail(location, format!("No generator layout for {:?}", parent_ty)); | |
326 | return; | |
327 | }; | |
328 | ||
329 | let Some(&local) = layout.variant_fields[var].get(f) else { | |
330 | fail_out_of_bounds(self, location); | |
331 | return; | |
332 | }; | |
333 | ||
334 | let Some(&f_ty) = layout.field_tys.get(local) else { | |
335 | self.fail(location, format!("Out of bounds local {:?} for {:?}", local, parent_ty)); | |
336 | return; | |
337 | }; | |
338 | ||
339 | f_ty | |
340 | } else { | |
341 | let Some(f_ty) = substs.as_generator().prefix_tys().nth(f.index()) else { | |
342 | fail_out_of_bounds(self, location); | |
343 | return; | |
344 | }; | |
345 | ||
346 | f_ty | |
923072b8 | 347 | }; |
064997fb | 348 | |
923072b8 FG |
349 | check_equal(self, location, f_ty); |
350 | } | |
351 | _ => { | |
352 | self.fail(location, format!("{:?} does not have fields", parent_ty.ty)); | |
353 | } | |
04454e1e FG |
354 | } |
355 | } | |
923072b8 | 356 | _ => {} |
04454e1e | 357 | } |
17df50a5 XL |
358 | self.super_projection_elem(local, proj_base, elem, context, location); |
359 | } | |
360 | ||
923072b8 | 361 | fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) { |
04454e1e FG |
362 | // Set off any `bug!`s in the type computation code |
363 | let _ = place.ty(&self.body.local_decls, self.tcx); | |
923072b8 FG |
364 | |
365 | if self.mir_phase >= MirPhase::Derefered | |
366 | && place.projection.len() > 1 | |
367 | && cntxt != PlaceContext::NonUse(VarDebugInfo) | |
368 | && place.projection[1..].contains(&ProjectionElem::Deref) | |
369 | { | |
370 | self.fail(location, format!("{:?}, has deref at the wrong place", place)); | |
371 | } | |
064997fb FG |
372 | |
373 | self.super_place(place, cntxt, location); | |
04454e1e FG |
374 | } |
375 | ||
376 | fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { | |
377 | macro_rules! check_kinds { | |
378 | ($t:expr, $text:literal, $($patterns:tt)*) => { | |
379 | if !matches!(($t).kind(), $($patterns)*) { | |
380 | self.fail(location, format!($text, $t)); | |
381 | } | |
382 | }; | |
383 | } | |
384 | match rvalue { | |
064997fb | 385 | Rvalue::Use(_) | Rvalue::CopyForDeref(_) => {} |
04454e1e FG |
386 | Rvalue::Aggregate(agg_kind, _) => { |
387 | let disallowed = match **agg_kind { | |
388 | AggregateKind::Array(..) => false, | |
389 | AggregateKind::Generator(..) => self.mir_phase >= MirPhase::GeneratorsLowered, | |
390 | _ => self.mir_phase >= MirPhase::Deaggregated, | |
391 | }; | |
392 | if disallowed { | |
f035d41b XL |
393 | self.fail( |
394 | location, | |
04454e1e FG |
395 | format!("{:?} have been lowered to field assignments", rvalue), |
396 | ) | |
397 | } | |
398 | } | |
399 | Rvalue::Ref(_, BorrowKind::Shallow, _) => { | |
400 | if self.mir_phase >= MirPhase::DropsLowered { | |
401 | self.fail( | |
402 | location, | |
403 | "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase", | |
f035d41b XL |
404 | ); |
405 | } | |
04454e1e | 406 | } |
064997fb | 407 | Rvalue::Ref(..) => {} |
04454e1e FG |
408 | Rvalue::Len(p) => { |
409 | let pty = p.ty(&self.body.local_decls, self.tcx).ty; | |
410 | check_kinds!( | |
411 | pty, | |
412 | "Cannot compute length of non-array type {:?}", | |
413 | ty::Array(..) | ty::Slice(..) | |
414 | ); | |
415 | } | |
416 | Rvalue::BinaryOp(op, vals) => { | |
417 | use BinOp::*; | |
418 | let a = vals.0.ty(&self.body.local_decls, self.tcx); | |
419 | let b = vals.1.ty(&self.body.local_decls, self.tcx); | |
420 | match op { | |
421 | Offset => { | |
422 | check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..)); | |
423 | if b != self.tcx.types.isize && b != self.tcx.types.usize { | |
424 | self.fail(location, format!("Cannot offset by non-isize type {:?}", b)); | |
425 | } | |
426 | } | |
427 | Eq | Lt | Le | Ne | Ge | Gt => { | |
428 | for x in [a, b] { | |
429 | check_kinds!( | |
430 | x, | |
431 | "Cannot compare type {:?}", | |
432 | ty::Bool | |
433 | | ty::Char | |
434 | | ty::Int(..) | |
435 | | ty::Uint(..) | |
436 | | ty::Float(..) | |
437 | | ty::RawPtr(..) | |
438 | | ty::FnPtr(..) | |
439 | ) | |
440 | } | |
441 | // The function pointer types can have lifetimes | |
442 | if !self.mir_assign_valid_types(a, b) { | |
f035d41b XL |
443 | self.fail( |
444 | location, | |
04454e1e | 445 | format!("Cannot compare unequal types {:?} and {:?}", a, b), |
f035d41b XL |
446 | ); |
447 | } | |
f9f354fc | 448 | } |
04454e1e FG |
449 | Shl | Shr => { |
450 | for x in [a, b] { | |
451 | check_kinds!( | |
452 | x, | |
453 | "Cannot shift non-integer type {:?}", | |
454 | ty::Uint(..) | ty::Int(..) | |
455 | ) | |
456 | } | |
457 | } | |
458 | BitAnd | BitOr | BitXor => { | |
459 | for x in [a, b] { | |
460 | check_kinds!( | |
461 | x, | |
462 | "Cannot perform bitwise op on type {:?}", | |
463 | ty::Uint(..) | ty::Int(..) | ty::Bool | |
464 | ) | |
465 | } | |
466 | if a != b { | |
3dfed10e XL |
467 | self.fail( |
468 | location, | |
04454e1e FG |
469 | format!( |
470 | "Cannot perform bitwise op on unequal types {:?} and {:?}", | |
471 | a, b | |
472 | ), | |
473 | ); | |
474 | } | |
475 | } | |
476 | Add | Sub | Mul | Div | Rem => { | |
477 | for x in [a, b] { | |
478 | check_kinds!( | |
479 | x, | |
480 | "Cannot perform arithmetic on type {:?}", | |
481 | ty::Uint(..) | ty::Int(..) | ty::Float(..) | |
3dfed10e XL |
482 | ) |
483 | } | |
04454e1e FG |
484 | if a != b { |
485 | self.fail( | |
486 | location, | |
487 | format!( | |
488 | "Cannot perform arithmetic on unequal types {:?} and {:?}", | |
489 | a, b | |
490 | ), | |
491 | ); | |
492 | } | |
3dfed10e | 493 | } |
04454e1e FG |
494 | } |
495 | } | |
496 | Rvalue::CheckedBinaryOp(op, vals) => { | |
497 | use BinOp::*; | |
498 | let a = vals.0.ty(&self.body.local_decls, self.tcx); | |
499 | let b = vals.1.ty(&self.body.local_decls, self.tcx); | |
500 | match op { | |
501 | Add | Sub | Mul => { | |
502 | for x in [a, b] { | |
503 | check_kinds!( | |
504 | x, | |
505 | "Cannot perform checked arithmetic on type {:?}", | |
506 | ty::Uint(..) | ty::Int(..) | |
507 | ) | |
508 | } | |
509 | if a != b { | |
1b1a35ee XL |
510 | self.fail( |
511 | location, | |
04454e1e FG |
512 | format!( |
513 | "Cannot perform checked arithmetic on unequal types {:?} and {:?}", | |
514 | a, b | |
515 | ), | |
1b1a35ee XL |
516 | ); |
517 | } | |
518 | } | |
04454e1e FG |
519 | Shl | Shr => { |
520 | for x in [a, b] { | |
521 | check_kinds!( | |
522 | x, | |
523 | "Cannot perform checked shift on non-integer type {:?}", | |
524 | ty::Uint(..) | ty::Int(..) | |
525 | ) | |
526 | } | |
527 | } | |
528 | _ => self.fail(location, format!("There is no checked version of {:?}", op)), | |
529 | } | |
530 | } | |
531 | Rvalue::UnaryOp(op, operand) => { | |
532 | let a = operand.ty(&self.body.local_decls, self.tcx); | |
533 | match op { | |
534 | UnOp::Neg => { | |
535 | check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..)) | |
536 | } | |
537 | UnOp::Not => { | |
538 | check_kinds!( | |
539 | a, | |
540 | "Cannot binary not type {:?}", | |
541 | ty::Int(..) | ty::Uint(..) | ty::Bool | |
542 | ); | |
543 | } | |
544 | } | |
545 | } | |
546 | Rvalue::ShallowInitBox(operand, _) => { | |
547 | let a = operand.ty(&self.body.local_decls, self.tcx); | |
548 | check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..)); | |
549 | } | |
064997fb FG |
550 | Rvalue::Cast(kind, operand, target_type) => { |
551 | match kind { | |
552 | CastKind::Misc => { | |
553 | let op_ty = operand.ty(self.body, self.tcx); | |
554 | if op_ty.is_enum() { | |
555 | self.fail( | |
556 | location, | |
557 | format!( | |
558 | "enum -> int casts should go through `Rvalue::Discriminant`: {operand:?}:{op_ty} as {target_type}", | |
559 | ), | |
560 | ); | |
561 | } | |
562 | } | |
563 | // Nothing to check here | |
564 | CastKind::PointerFromExposedAddress | |
565 | | CastKind::PointerExposeAddress | |
566 | | CastKind::Pointer(_) => {} | |
567 | } | |
568 | } | |
569 | Rvalue::Repeat(_, _) | |
570 | | Rvalue::ThreadLocalRef(_) | |
571 | | Rvalue::AddressOf(_, _) | |
572 | | Rvalue::NullaryOp(_, _) | |
573 | | Rvalue::Discriminant(_) => {} | |
04454e1e FG |
574 | } |
575 | self.super_rvalue(rvalue, location); | |
576 | } | |
577 | ||
578 | fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { | |
579 | match &statement.kind { | |
580 | StatementKind::Assign(box (dest, rvalue)) => { | |
581 | // LHS and RHS of the assignment must have the same type. | |
582 | let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty; | |
583 | let right_ty = rvalue.ty(&self.body.local_decls, self.tcx); | |
584 | if !self.mir_assign_valid_types(right_ty, left_ty) { | |
585 | self.fail( | |
586 | location, | |
587 | format!( | |
588 | "encountered `{:?}` with incompatible types:\n\ | |
589 | left-hand side has type: {}\n\ | |
590 | right-hand side has type: {}", | |
591 | statement.kind, left_ty, right_ty, | |
592 | ), | |
593 | ); | |
594 | } | |
064997fb FG |
595 | if let Rvalue::CopyForDeref(place) = rvalue { |
596 | if !place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_some() | |
597 | { | |
598 | self.fail( | |
599 | location, | |
600 | "`CopyForDeref` should only be used for dereferenceable types", | |
601 | ) | |
602 | } | |
603 | } | |
04454e1e FG |
604 | // FIXME(JakobDegen): Check this for all rvalues, not just this one. |
605 | if let Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) = rvalue { | |
606 | // The sides of an assignment must not alias. Currently this just checks whether | |
607 | // the places are identical. | |
608 | if dest == src { | |
609 | self.fail( | |
610 | location, | |
611 | "encountered `Assign` statement with overlapping memory", | |
612 | ); | |
613 | } | |
f035d41b XL |
614 | } |
615 | } | |
1b1a35ee | 616 | StatementKind::AscribeUserType(..) => { |
5e7ed085 | 617 | if self.mir_phase >= MirPhase::DropsLowered { |
1b1a35ee XL |
618 | self.fail( |
619 | location, | |
620 | "`AscribeUserType` should have been removed after drop lowering phase", | |
621 | ); | |
622 | } | |
623 | } | |
624 | StatementKind::FakeRead(..) => { | |
5e7ed085 | 625 | if self.mir_phase >= MirPhase::DropsLowered { |
1b1a35ee XL |
626 | self.fail( |
627 | location, | |
628 | "`FakeRead` should have been removed after drop lowering phase", | |
629 | ); | |
630 | } | |
631 | } | |
6a06907d XL |
632 | StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping { |
633 | ref src, | |
634 | ref dst, | |
635 | ref count, | |
636 | }) => { | |
637 | let src_ty = src.ty(&self.body.local_decls, self.tcx); | |
638 | let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) { | |
639 | src_deref.ty | |
640 | } else { | |
641 | self.fail( | |
642 | location, | |
643 | format!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty), | |
644 | ); | |
645 | return; | |
646 | }; | |
647 | let dst_ty = dst.ty(&self.body.local_decls, self.tcx); | |
648 | let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) { | |
649 | dst_deref.ty | |
650 | } else { | |
651 | self.fail( | |
652 | location, | |
653 | format!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty), | |
654 | ); | |
655 | return; | |
656 | }; | |
657 | // since CopyNonOverlapping is parametrized by 1 type, | |
658 | // we only need to check that they are equal and not keep an extra parameter. | |
04454e1e | 659 | if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) { |
6a06907d XL |
660 | self.fail(location, format!("bad arg ({:?} != {:?})", op_src_ty, op_dst_ty)); |
661 | } | |
662 | ||
663 | let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx); | |
664 | if op_cnt_ty != self.tcx.types.usize { | |
665 | self.fail(location, format!("bad arg ({:?} != usize)", op_cnt_ty)) | |
666 | } | |
667 | } | |
04454e1e FG |
668 | StatementKind::SetDiscriminant { place, .. } => { |
669 | if self.mir_phase < MirPhase::Deaggregated { | |
670 | self.fail(location, "`SetDiscriminant`is not allowed until deaggregation"); | |
671 | } | |
672 | let pty = place.ty(&self.body.local_decls, self.tcx).ty.kind(); | |
673 | if !matches!(pty, ty::Adt(..) | ty::Generator(..) | ty::Opaque(..)) { | |
674 | self.fail( | |
675 | location, | |
676 | format!( | |
677 | "`SetDiscriminant` is only allowed on ADTs and generators, not {:?}", | |
678 | pty | |
679 | ), | |
680 | ); | |
681 | } | |
682 | } | |
683 | StatementKind::Deinit(..) => { | |
684 | if self.mir_phase < MirPhase::Deaggregated { | |
685 | self.fail(location, "`Deinit`is not allowed until deaggregation"); | |
5e7ed085 FG |
686 | } |
687 | } | |
688 | StatementKind::Retag(_, _) => { | |
689 | // FIXME(JakobDegen) The validator should check that `self.mir_phase < | |
690 | // DropsLowered`. However, this causes ICEs with generation of drop shims, which | |
691 | // seem to fail to set their `MirPhase` correctly. | |
692 | } | |
693 | StatementKind::StorageLive(..) | |
6a06907d | 694 | | StatementKind::StorageDead(..) |
6a06907d XL |
695 | | StatementKind::Coverage(_) |
696 | | StatementKind::Nop => {} | |
f035d41b | 697 | } |
29967ef6 XL |
698 | |
699 | self.super_statement(statement, location); | |
f035d41b XL |
700 | } |
701 | ||
702 | fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { | |
703 | match &terminator.kind { | |
704 | TerminatorKind::Goto { target } => { | |
705 | self.check_edge(location, *target, EdgeKind::Normal); | |
706 | } | |
29967ef6 | 707 | TerminatorKind::SwitchInt { targets, switch_ty, discr } => { |
f035d41b XL |
708 | let ty = discr.ty(&self.body.local_decls, self.tcx); |
709 | if ty != *switch_ty { | |
710 | self.fail( | |
711 | location, | |
712 | format!( | |
713 | "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}", | |
714 | ty, switch_ty, | |
715 | ), | |
716 | ); | |
717 | } | |
29967ef6 XL |
718 | |
719 | let target_width = self.tcx.sess.target.pointer_width; | |
720 | ||
721 | let size = Size::from_bits(match switch_ty.kind() { | |
722 | ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(), | |
723 | ty::Int(int) => int.normalize(target_width).bit_width().unwrap(), | |
724 | ty::Char => 32, | |
725 | ty::Bool => 1, | |
726 | other => bug!("unhandled type: {:?}", other), | |
727 | }); | |
728 | ||
729 | for (value, target) in targets.iter() { | |
730 | if Scalar::<()>::try_from_uint(value, size).is_none() { | |
731 | self.fail( | |
732 | location, | |
733 | format!("the value {:#x} is not a proper {:?}", value, switch_ty), | |
734 | ) | |
735 | } | |
736 | ||
737 | self.check_edge(location, target, EdgeKind::Normal); | |
f035d41b | 738 | } |
29967ef6 | 739 | self.check_edge(location, targets.otherwise(), EdgeKind::Normal); |
5099ac24 FG |
740 | |
741 | self.value_cache.clear(); | |
742 | self.value_cache.extend(targets.iter().map(|(value, _)| value)); | |
743 | let all_len = self.value_cache.len(); | |
744 | self.value_cache.sort_unstable(); | |
745 | self.value_cache.dedup(); | |
746 | let has_duplicates = all_len != self.value_cache.len(); | |
747 | if has_duplicates { | |
748 | self.fail( | |
749 | location, | |
750 | format!( | |
751 | "duplicated values in `SwitchInt` terminator: {:?}", | |
752 | terminator.kind, | |
753 | ), | |
754 | ); | |
755 | } | |
f035d41b XL |
756 | } |
757 | TerminatorKind::Drop { target, unwind, .. } => { | |
758 | self.check_edge(location, *target, EdgeKind::Normal); | |
759 | if let Some(unwind) = unwind { | |
760 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
761 | } | |
762 | } | |
763 | TerminatorKind::DropAndReplace { target, unwind, .. } => { | |
5e7ed085 | 764 | if self.mir_phase >= MirPhase::DropsLowered { |
3dfed10e XL |
765 | self.fail( |
766 | location, | |
5e7ed085 | 767 | "`DropAndReplace` should have been removed during drop elaboration", |
3dfed10e XL |
768 | ); |
769 | } | |
f035d41b XL |
770 | self.check_edge(location, *target, EdgeKind::Normal); |
771 | if let Some(unwind) = unwind { | |
772 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
773 | } | |
774 | } | |
923072b8 | 775 | TerminatorKind::Call { func, args, destination, target, cleanup, .. } => { |
f035d41b | 776 | let func_ty = func.ty(&self.body.local_decls, self.tcx); |
1b1a35ee | 777 | match func_ty.kind() { |
f035d41b XL |
778 | ty::FnPtr(..) | ty::FnDef(..) => {} |
779 | _ => self.fail( | |
780 | location, | |
781 | format!("encountered non-callable type {} in `Call` terminator", func_ty), | |
782 | ), | |
783 | } | |
923072b8 | 784 | if let Some(target) = target { |
f035d41b XL |
785 | self.check_edge(location, *target, EdgeKind::Normal); |
786 | } | |
787 | if let Some(cleanup) = cleanup { | |
788 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
789 | } | |
29967ef6 XL |
790 | |
791 | // The call destination place and Operand::Move place used as an argument might be | |
792 | // passed by a reference to the callee. Consequently they must be non-overlapping. | |
793 | // Currently this simply checks for duplicate places. | |
794 | self.place_cache.clear(); | |
923072b8 | 795 | self.place_cache.push(destination.as_ref()); |
29967ef6 XL |
796 | for arg in args { |
797 | if let Operand::Move(place) = arg { | |
798 | self.place_cache.push(place.as_ref()); | |
799 | } | |
800 | } | |
801 | let all_len = self.place_cache.len(); | |
04454e1e FG |
802 | let mut dedup = FxHashSet::default(); |
803 | self.place_cache.retain(|p| dedup.insert(*p)); | |
29967ef6 XL |
804 | let has_duplicates = all_len != self.place_cache.len(); |
805 | if has_duplicates { | |
806 | self.fail( | |
807 | location, | |
808 | format!( | |
809 | "encountered overlapping memory in `Call` terminator: {:?}", | |
810 | terminator.kind, | |
811 | ), | |
812 | ); | |
813 | } | |
f035d41b XL |
814 | } |
815 | TerminatorKind::Assert { cond, target, cleanup, .. } => { | |
816 | let cond_ty = cond.ty(&self.body.local_decls, self.tcx); | |
817 | if cond_ty != self.tcx.types.bool { | |
818 | self.fail( | |
819 | location, | |
820 | format!( | |
821 | "encountered non-boolean condition of type {} in `Assert` terminator", | |
822 | cond_ty | |
823 | ), | |
824 | ); | |
825 | } | |
826 | self.check_edge(location, *target, EdgeKind::Normal); | |
827 | if let Some(cleanup) = cleanup { | |
828 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
829 | } | |
830 | } | |
831 | TerminatorKind::Yield { resume, drop, .. } => { | |
04454e1e FG |
832 | if self.body.generator.is_none() { |
833 | self.fail(location, "`Yield` cannot appear outside generator bodies"); | |
834 | } | |
5e7ed085 | 835 | if self.mir_phase >= MirPhase::GeneratorsLowered { |
3dfed10e XL |
836 | self.fail(location, "`Yield` should have been replaced by generator lowering"); |
837 | } | |
f035d41b XL |
838 | self.check_edge(location, *resume, EdgeKind::Normal); |
839 | if let Some(drop) = drop { | |
840 | self.check_edge(location, *drop, EdgeKind::Normal); | |
841 | } | |
842 | } | |
843 | TerminatorKind::FalseEdge { real_target, imaginary_target } => { | |
5e7ed085 FG |
844 | if self.mir_phase >= MirPhase::DropsLowered { |
845 | self.fail( | |
846 | location, | |
847 | "`FalseEdge` should have been removed after drop elaboration", | |
848 | ); | |
849 | } | |
f035d41b XL |
850 | self.check_edge(location, *real_target, EdgeKind::Normal); |
851 | self.check_edge(location, *imaginary_target, EdgeKind::Normal); | |
852 | } | |
853 | TerminatorKind::FalseUnwind { real_target, unwind } => { | |
5e7ed085 FG |
854 | if self.mir_phase >= MirPhase::DropsLowered { |
855 | self.fail( | |
856 | location, | |
857 | "`FalseUnwind` should have been removed after drop elaboration", | |
858 | ); | |
859 | } | |
f035d41b XL |
860 | self.check_edge(location, *real_target, EdgeKind::Normal); |
861 | if let Some(unwind) = unwind { | |
862 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
863 | } | |
864 | } | |
a2a8927a | 865 | TerminatorKind::InlineAsm { destination, cleanup, .. } => { |
f035d41b XL |
866 | if let Some(destination) = destination { |
867 | self.check_edge(location, *destination, EdgeKind::Normal); | |
f9f354fc | 868 | } |
a2a8927a XL |
869 | if let Some(cleanup) = cleanup { |
870 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
871 | } | |
f9f354fc | 872 | } |
5e7ed085 | 873 | TerminatorKind::GeneratorDrop => { |
04454e1e FG |
874 | if self.body.generator.is_none() { |
875 | self.fail(location, "`GeneratorDrop` cannot appear outside generator bodies"); | |
876 | } | |
5e7ed085 FG |
877 | if self.mir_phase >= MirPhase::GeneratorsLowered { |
878 | self.fail( | |
879 | location, | |
880 | "`GeneratorDrop` should have been replaced by generator lowering", | |
881 | ); | |
882 | } | |
883 | } | |
04454e1e FG |
884 | TerminatorKind::Resume | TerminatorKind::Abort => { |
885 | let bb = location.block; | |
886 | if !self.body.basic_blocks()[bb].is_cleanup { | |
887 | self.fail(location, "Cannot `Resume` or `Abort` from non-cleanup basic block") | |
888 | } | |
889 | } | |
890 | TerminatorKind::Return => { | |
891 | let bb = location.block; | |
892 | if self.body.basic_blocks()[bb].is_cleanup { | |
893 | self.fail(location, "Cannot `Return` from cleanup basic block") | |
894 | } | |
895 | } | |
896 | TerminatorKind::Unreachable => {} | |
f9f354fc | 897 | } |
29967ef6 XL |
898 | |
899 | self.super_terminator(terminator, location); | |
900 | } | |
901 | ||
064997fb FG |
902 | fn visit_source_scope(&mut self, scope: SourceScope) { |
903 | if self.body.source_scopes.get(scope).is_none() { | |
29967ef6 XL |
904 | self.tcx.sess.diagnostic().delay_span_bug( |
905 | self.body.span, | |
906 | &format!( | |
907 | "broken MIR in {:?} ({}):\ninvalid source scope {:?}", | |
908 | self.body.source.instance, self.when, scope, | |
909 | ), | |
910 | ); | |
911 | } | |
f9f354fc XL |
912 | } |
913 | } |