]>
Commit | Line | Data |
---|---|---|
f9f354fc XL |
1 | //! Validates the MIR to ensure that invariants are upheld. |
2 | ||
04454e1e | 3 | use rustc_data_structures::fx::FxHashSet; |
29967ef6 | 4 | use rustc_index::bit_set::BitSet; |
1b1a35ee | 5 | use rustc_infer::infer::TyCtxtInferExt; |
29967ef6 | 6 | use rustc_middle::mir::interpret::Scalar; |
923072b8 | 7 | use rustc_middle::mir::visit::NonUseContext::VarDebugInfo; |
1b1a35ee XL |
8 | use rustc_middle::mir::visit::{PlaceContext, Visitor}; |
9 | use rustc_middle::mir::{ | |
04454e1e FG |
10 | traversal, AggregateKind, BasicBlock, BinOp, Body, BorrowKind, Local, Location, MirPass, |
11 | MirPhase, Operand, Place, PlaceElem, PlaceRef, ProjectionElem, Rvalue, SourceScope, Statement, | |
12 | StatementKind, Terminator, TerminatorKind, UnOp, START_BLOCK, | |
f9f354fc | 13 | }; |
1b1a35ee | 14 | use rustc_middle::ty::fold::BottomUpFolder; |
04454e1e | 15 | use rustc_middle::ty::{self, InstanceDef, ParamEnv, Ty, TyCtxt, TypeFoldable}; |
c295e0f8 | 16 | use rustc_mir_dataflow::impls::MaybeStorageLive; |
923072b8 | 17 | use rustc_mir_dataflow::storage::always_live_locals; |
c295e0f8 | 18 | use rustc_mir_dataflow::{Analysis, ResultsCursor}; |
04454e1e | 19 | use rustc_target::abi::{Size, VariantIdx}; |
f035d41b XL |
20 | |
21 | #[derive(Copy, Clone, Debug)] | |
22 | enum EdgeKind { | |
23 | Unwind, | |
24 | Normal, | |
25 | } | |
f9f354fc XL |
26 | |
27 | pub struct Validator { | |
28 | /// Describes at which point in the pipeline this validation is happening. | |
29 | pub when: String, | |
3dfed10e XL |
30 | /// The phase for which we are upholding the dialect. If the given phase forbids a specific |
31 | /// element, this validator will now emit errors if that specific element is encountered. | |
32 | /// Note that phases that change the dialect cause all *following* phases to check the | |
33 | /// invariants of the new dialect. A phase that changes dialects never checks the new invariants | |
34 | /// itself. | |
35 | pub mir_phase: MirPhase, | |
f9f354fc XL |
36 | } |
37 | ||
38 | impl<'tcx> MirPass<'tcx> for Validator { | |
29967ef6 | 39 | fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { |
04454e1e FG |
40 | // FIXME(JakobDegen): These bodies never instantiated in codegend anyway, so it's not |
41 | // terribly important that they pass the validator. However, I think other passes might | |
42 | // still see them, in which case they might be surprised. It would probably be better if we | |
43 | // didn't put this through the MIR pipeline at all. | |
44 | if matches!(body.source.instance, InstanceDef::Intrinsic(..) | InstanceDef::Virtual(..)) { | |
45 | return; | |
46 | } | |
29967ef6 | 47 | let def_id = body.source.def_id(); |
1b1a35ee | 48 | let param_env = tcx.param_env(def_id); |
3dfed10e | 49 | let mir_phase = self.mir_phase; |
1b1a35ee | 50 | |
923072b8 | 51 | let always_live_locals = always_live_locals(body); |
1b1a35ee | 52 | let storage_liveness = MaybeStorageLive::new(always_live_locals) |
29967ef6 | 53 | .into_engine(tcx, body) |
1b1a35ee XL |
54 | .iterate_to_fixpoint() |
55 | .into_results_cursor(body); | |
56 | ||
29967ef6 XL |
57 | TypeChecker { |
58 | when: &self.when, | |
59 | body, | |
60 | tcx, | |
61 | param_env, | |
62 | mir_phase, | |
63 | reachable_blocks: traversal::reachable_as_bitset(body), | |
64 | storage_liveness, | |
65 | place_cache: Vec::new(), | |
5099ac24 | 66 | value_cache: Vec::new(), |
29967ef6 XL |
67 | } |
68 | .visit_body(body); | |
f9f354fc XL |
69 | } |
70 | } | |
71 | ||
f035d41b XL |
72 | /// Returns whether the two types are equal up to lifetimes. |
73 | /// All lifetimes, including higher-ranked ones, get ignored for this comparison. | |
74 | /// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.) | |
75 | /// | |
76 | /// The point of this function is to approximate "equal up to subtyping". However, | |
77 | /// the approximation is incorrect as variance is ignored. | |
a2a8927a | 78 | pub fn equal_up_to_regions<'tcx>( |
f035d41b XL |
79 | tcx: TyCtxt<'tcx>, |
80 | param_env: ParamEnv<'tcx>, | |
81 | src: Ty<'tcx>, | |
82 | dest: Ty<'tcx>, | |
83 | ) -> bool { | |
84 | // Fast path. | |
85 | if src == dest { | |
86 | return true; | |
87 | } | |
88 | ||
1b1a35ee | 89 | // Normalize lifetimes away on both sides, then compare. |
1b1a35ee XL |
90 | let normalize = |ty: Ty<'tcx>| { |
91 | tcx.normalize_erasing_regions( | |
92 | param_env, | |
93 | ty.fold_with(&mut BottomUpFolder { | |
94 | tcx, | |
29967ef6 XL |
95 | // FIXME: We erase all late-bound lifetimes, but this is not fully correct. |
96 | // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`, | |
97 | // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`, | |
98 | // since one may have an `impl SomeTrait for fn(&32)` and | |
99 | // `impl SomeTrait for fn(&'static u32)` at the same time which | |
100 | // specify distinct values for Assoc. (See also #56105) | |
1b1a35ee XL |
101 | lt_op: |_| tcx.lifetimes.re_erased, |
102 | // Leave consts and types unchanged. | |
103 | ct_op: |ct| ct, | |
104 | ty_op: |ty| ty, | |
105 | }), | |
106 | ) | |
107 | }; | |
108 | tcx.infer_ctxt().enter(|infcx| infcx.can_eq(param_env, normalize(src), normalize(dest)).is_ok()) | |
f035d41b XL |
109 | } |
110 | ||
f9f354fc XL |
111 | struct TypeChecker<'a, 'tcx> { |
112 | when: &'a str, | |
f9f354fc XL |
113 | body: &'a Body<'tcx>, |
114 | tcx: TyCtxt<'tcx>, | |
115 | param_env: ParamEnv<'tcx>, | |
3dfed10e | 116 | mir_phase: MirPhase, |
29967ef6 | 117 | reachable_blocks: BitSet<BasicBlock>, |
1b1a35ee | 118 | storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>, |
29967ef6 | 119 | place_cache: Vec<PlaceRef<'tcx>>, |
5099ac24 | 120 | value_cache: Vec<u128>, |
f9f354fc XL |
121 | } |
122 | ||
123 | impl<'a, 'tcx> TypeChecker<'a, 'tcx> { | |
f035d41b XL |
124 | fn fail(&self, location: Location, msg: impl AsRef<str>) { |
125 | let span = self.body.source_info(location).span; | |
f9f354fc XL |
126 | // We use `delay_span_bug` as we might see broken MIR when other errors have already |
127 | // occurred. | |
128 | self.tcx.sess.diagnostic().delay_span_bug( | |
129 | span, | |
f035d41b XL |
130 | &format!( |
131 | "broken MIR in {:?} ({}) at {:?}:\n{}", | |
29967ef6 | 132 | self.body.source.instance, |
f035d41b XL |
133 | self.when, |
134 | location, | |
135 | msg.as_ref() | |
136 | ), | |
f9f354fc XL |
137 | ); |
138 | } | |
f035d41b XL |
139 | |
140 | fn check_edge(&self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) { | |
c295e0f8 XL |
141 | if bb == START_BLOCK { |
142 | self.fail(location, "start block must not have predecessors") | |
143 | } | |
f035d41b XL |
144 | if let Some(bb) = self.body.basic_blocks().get(bb) { |
145 | let src = self.body.basic_blocks().get(location.block).unwrap(); | |
146 | match (src.is_cleanup, bb.is_cleanup, edge_kind) { | |
147 | // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges | |
148 | (false, false, EdgeKind::Normal) | |
149 | // Non-cleanup blocks can jump to cleanup blocks along unwind edges | |
150 | | (false, true, EdgeKind::Unwind) | |
151 | // Cleanup blocks can jump to cleanup blocks along non-unwind edges | |
152 | | (true, true, EdgeKind::Normal) => {} | |
153 | // All other jumps are invalid | |
154 | _ => { | |
155 | self.fail( | |
156 | location, | |
157 | format!( | |
158 | "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})", | |
159 | edge_kind, | |
160 | bb, | |
161 | src.is_cleanup, | |
162 | bb.is_cleanup, | |
163 | ) | |
164 | ) | |
165 | } | |
166 | } | |
167 | } else { | |
168 | self.fail(location, format!("encountered jump to invalid basic block {:?}", bb)) | |
169 | } | |
170 | } | |
171 | ||
172 | /// Check if src can be assigned into dest. | |
173 | /// This is not precise, it will accept some incorrect assignments. | |
174 | fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool { | |
175 | // Fast path before we normalize. | |
176 | if src == dest { | |
177 | // Equal types, all is good. | |
178 | return true; | |
179 | } | |
5e7ed085 FG |
180 | // Normalization reveals opaque types, but we may be validating MIR while computing |
181 | // said opaque types, causing cycles. | |
182 | if (src, dest).has_opaque_types() { | |
183 | return true; | |
184 | } | |
f035d41b | 185 | // Normalize projections and things like that. |
3dfed10e | 186 | let param_env = self.param_env.with_reveal_all_normalized(self.tcx); |
f035d41b XL |
187 | let src = self.tcx.normalize_erasing_regions(param_env, src); |
188 | let dest = self.tcx.normalize_erasing_regions(param_env, dest); | |
189 | ||
190 | // Type-changing assignments can happen when subtyping is used. While | |
191 | // all normal lifetimes are erased, higher-ranked types with their | |
192 | // late-bound lifetimes are still around and can lead to type | |
193 | // differences. So we compare ignoring lifetimes. | |
194 | equal_up_to_regions(self.tcx, param_env, src, dest) | |
195 | } | |
f9f354fc XL |
196 | } |
197 | ||
198 | impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { | |
1b1a35ee | 199 | fn visit_local(&mut self, local: &Local, context: PlaceContext, location: Location) { |
fc512014 XL |
200 | if self.body.local_decls.get(*local).is_none() { |
201 | self.fail( | |
202 | location, | |
203 | format!("local {:?} has no corresponding declaration in `body.local_decls`", local), | |
204 | ); | |
205 | } | |
206 | ||
29967ef6 | 207 | if self.reachable_blocks.contains(location.block) && context.is_use() { |
1b1a35ee XL |
208 | // Uses of locals must occur while the local's storage is allocated. |
209 | self.storage_liveness.seek_after_primary_effect(location); | |
210 | let locals_with_storage = self.storage_liveness.get(); | |
211 | if !locals_with_storage.contains(*local) { | |
212 | self.fail(location, format!("use of local {:?}, which has no storage here", local)); | |
213 | } | |
214 | } | |
215 | } | |
216 | ||
f9f354fc | 217 | fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { |
29967ef6 | 218 | // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed. |
04454e1e FG |
219 | if self.tcx.sess.opts.debugging_opts.validate_mir && self.mir_phase < MirPhase::DropsLowered |
220 | { | |
29967ef6 XL |
221 | // `Operand::Copy` is only supposed to be used with `Copy` types. |
222 | if let Operand::Copy(place) = operand { | |
223 | let ty = place.ty(&self.body.local_decls, self.tcx).ty; | |
224 | let span = self.body.source_info(location).span; | |
f9f354fc | 225 | |
29967ef6 XL |
226 | if !ty.is_copy_modulo_regions(self.tcx.at(span), self.param_env) { |
227 | self.fail(location, format!("`Operand::Copy` with non-`Copy` type {}", ty)); | |
228 | } | |
f9f354fc XL |
229 | } |
230 | } | |
231 | ||
232 | self.super_operand(operand, location); | |
233 | } | |
234 | ||
17df50a5 XL |
235 | fn visit_projection_elem( |
236 | &mut self, | |
237 | local: Local, | |
238 | proj_base: &[PlaceElem<'tcx>], | |
239 | elem: PlaceElem<'tcx>, | |
240 | context: PlaceContext, | |
241 | location: Location, | |
242 | ) { | |
923072b8 FG |
243 | match elem { |
244 | ProjectionElem::Index(index) => { | |
245 | let index_ty = self.body.local_decls[index].ty; | |
246 | if index_ty != self.tcx.types.usize { | |
247 | self.fail(location, format!("bad index ({:?} != usize)", index_ty)) | |
248 | } | |
17df50a5 | 249 | } |
923072b8 FG |
250 | ProjectionElem::Deref if self.mir_phase >= MirPhase::GeneratorsLowered => { |
251 | let base_ty = Place::ty_from(local, proj_base, &self.body.local_decls, self.tcx).ty; | |
252 | ||
253 | if base_ty.is_box() { | |
254 | self.fail( | |
255 | location, | |
256 | format!("{:?} dereferenced after ElaborateBoxDerefs", base_ty), | |
257 | ) | |
258 | } | |
259 | } | |
260 | ProjectionElem::Field(f, ty) => { | |
261 | let parent = Place { local, projection: self.tcx.intern_place_elems(proj_base) }; | |
262 | let parent_ty = parent.ty(&self.body.local_decls, self.tcx); | |
263 | let fail_out_of_bounds = |this: &Self, location| { | |
264 | this.fail(location, format!("Out of bounds field {:?} for {:?}", f, parent_ty)); | |
265 | }; | |
266 | let check_equal = |this: &Self, location, f_ty| { | |
267 | if !this.mir_assign_valid_types(ty, f_ty) { | |
268 | this.fail( | |
04454e1e FG |
269 | location, |
270 | format!( | |
271 | "Field projection `{:?}.{:?}` specified type `{:?}`, but actual type is {:?}", | |
272 | parent, f, ty, f_ty | |
273 | ) | |
274 | ) | |
923072b8 FG |
275 | } |
276 | }; | |
277 | ||
278 | match parent_ty.ty.kind() { | |
279 | ty::Tuple(fields) => { | |
280 | let Some(f_ty) = fields.get(f.as_usize()) else { | |
281 | fail_out_of_bounds(self, location); | |
282 | return; | |
283 | }; | |
284 | check_equal(self, location, *f_ty); | |
285 | } | |
286 | ty::Adt(adt_def, substs) => { | |
287 | let var = parent_ty.variant_index.unwrap_or(VariantIdx::from_u32(0)); | |
288 | let Some(field) = adt_def.variant(var).fields.get(f.as_usize()) else { | |
289 | fail_out_of_bounds(self, location); | |
290 | return; | |
291 | }; | |
292 | check_equal(self, location, field.ty(self.tcx, substs)); | |
293 | } | |
294 | ty::Closure(_, substs) => { | |
295 | let substs = substs.as_closure(); | |
296 | let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else { | |
297 | fail_out_of_bounds(self, location); | |
298 | return; | |
299 | }; | |
300 | check_equal(self, location, f_ty); | |
301 | } | |
302 | ty::Generator(_, substs, _) => { | |
303 | let substs = substs.as_generator(); | |
304 | let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else { | |
305 | fail_out_of_bounds(self, location); | |
306 | return; | |
307 | }; | |
308 | check_equal(self, location, f_ty); | |
309 | } | |
310 | _ => { | |
311 | self.fail(location, format!("{:?} does not have fields", parent_ty.ty)); | |
312 | } | |
04454e1e FG |
313 | } |
314 | } | |
923072b8 | 315 | _ => {} |
04454e1e | 316 | } |
17df50a5 XL |
317 | self.super_projection_elem(local, proj_base, elem, context, location); |
318 | } | |
319 | ||
923072b8 | 320 | fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) { |
04454e1e FG |
321 | // Set off any `bug!`s in the type computation code |
322 | let _ = place.ty(&self.body.local_decls, self.tcx); | |
923072b8 FG |
323 | |
324 | if self.mir_phase >= MirPhase::Derefered | |
325 | && place.projection.len() > 1 | |
326 | && cntxt != PlaceContext::NonUse(VarDebugInfo) | |
327 | && place.projection[1..].contains(&ProjectionElem::Deref) | |
328 | { | |
329 | self.fail(location, format!("{:?}, has deref at the wrong place", place)); | |
330 | } | |
04454e1e FG |
331 | } |
332 | ||
333 | fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { | |
334 | macro_rules! check_kinds { | |
335 | ($t:expr, $text:literal, $($patterns:tt)*) => { | |
336 | if !matches!(($t).kind(), $($patterns)*) { | |
337 | self.fail(location, format!($text, $t)); | |
338 | } | |
339 | }; | |
340 | } | |
341 | match rvalue { | |
342 | Rvalue::Use(_) => {} | |
343 | Rvalue::Aggregate(agg_kind, _) => { | |
344 | let disallowed = match **agg_kind { | |
345 | AggregateKind::Array(..) => false, | |
346 | AggregateKind::Generator(..) => self.mir_phase >= MirPhase::GeneratorsLowered, | |
347 | _ => self.mir_phase >= MirPhase::Deaggregated, | |
348 | }; | |
349 | if disallowed { | |
f035d41b XL |
350 | self.fail( |
351 | location, | |
04454e1e FG |
352 | format!("{:?} have been lowered to field assignments", rvalue), |
353 | ) | |
354 | } | |
355 | } | |
356 | Rvalue::Ref(_, BorrowKind::Shallow, _) => { | |
357 | if self.mir_phase >= MirPhase::DropsLowered { | |
358 | self.fail( | |
359 | location, | |
360 | "`Assign` statement with a `Shallow` borrow should have been removed after drop lowering phase", | |
f035d41b XL |
361 | ); |
362 | } | |
04454e1e FG |
363 | } |
364 | Rvalue::Len(p) => { | |
365 | let pty = p.ty(&self.body.local_decls, self.tcx).ty; | |
366 | check_kinds!( | |
367 | pty, | |
368 | "Cannot compute length of non-array type {:?}", | |
369 | ty::Array(..) | ty::Slice(..) | |
370 | ); | |
371 | } | |
372 | Rvalue::BinaryOp(op, vals) => { | |
373 | use BinOp::*; | |
374 | let a = vals.0.ty(&self.body.local_decls, self.tcx); | |
375 | let b = vals.1.ty(&self.body.local_decls, self.tcx); | |
376 | match op { | |
377 | Offset => { | |
378 | check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..)); | |
379 | if b != self.tcx.types.isize && b != self.tcx.types.usize { | |
380 | self.fail(location, format!("Cannot offset by non-isize type {:?}", b)); | |
381 | } | |
382 | } | |
383 | Eq | Lt | Le | Ne | Ge | Gt => { | |
384 | for x in [a, b] { | |
385 | check_kinds!( | |
386 | x, | |
387 | "Cannot compare type {:?}", | |
388 | ty::Bool | |
389 | | ty::Char | |
390 | | ty::Int(..) | |
391 | | ty::Uint(..) | |
392 | | ty::Float(..) | |
393 | | ty::RawPtr(..) | |
394 | | ty::FnPtr(..) | |
395 | ) | |
396 | } | |
397 | // The function pointer types can have lifetimes | |
398 | if !self.mir_assign_valid_types(a, b) { | |
f035d41b XL |
399 | self.fail( |
400 | location, | |
04454e1e | 401 | format!("Cannot compare unequal types {:?} and {:?}", a, b), |
f035d41b XL |
402 | ); |
403 | } | |
f9f354fc | 404 | } |
04454e1e FG |
405 | Shl | Shr => { |
406 | for x in [a, b] { | |
407 | check_kinds!( | |
408 | x, | |
409 | "Cannot shift non-integer type {:?}", | |
410 | ty::Uint(..) | ty::Int(..) | |
411 | ) | |
412 | } | |
413 | } | |
414 | BitAnd | BitOr | BitXor => { | |
415 | for x in [a, b] { | |
416 | check_kinds!( | |
417 | x, | |
418 | "Cannot perform bitwise op on type {:?}", | |
419 | ty::Uint(..) | ty::Int(..) | ty::Bool | |
420 | ) | |
421 | } | |
422 | if a != b { | |
3dfed10e XL |
423 | self.fail( |
424 | location, | |
04454e1e FG |
425 | format!( |
426 | "Cannot perform bitwise op on unequal types {:?} and {:?}", | |
427 | a, b | |
428 | ), | |
429 | ); | |
430 | } | |
431 | } | |
432 | Add | Sub | Mul | Div | Rem => { | |
433 | for x in [a, b] { | |
434 | check_kinds!( | |
435 | x, | |
436 | "Cannot perform arithmetic on type {:?}", | |
437 | ty::Uint(..) | ty::Int(..) | ty::Float(..) | |
3dfed10e XL |
438 | ) |
439 | } | |
04454e1e FG |
440 | if a != b { |
441 | self.fail( | |
442 | location, | |
443 | format!( | |
444 | "Cannot perform arithmetic on unequal types {:?} and {:?}", | |
445 | a, b | |
446 | ), | |
447 | ); | |
448 | } | |
3dfed10e | 449 | } |
04454e1e FG |
450 | } |
451 | } | |
452 | Rvalue::CheckedBinaryOp(op, vals) => { | |
453 | use BinOp::*; | |
454 | let a = vals.0.ty(&self.body.local_decls, self.tcx); | |
455 | let b = vals.1.ty(&self.body.local_decls, self.tcx); | |
456 | match op { | |
457 | Add | Sub | Mul => { | |
458 | for x in [a, b] { | |
459 | check_kinds!( | |
460 | x, | |
461 | "Cannot perform checked arithmetic on type {:?}", | |
462 | ty::Uint(..) | ty::Int(..) | |
463 | ) | |
464 | } | |
465 | if a != b { | |
1b1a35ee XL |
466 | self.fail( |
467 | location, | |
04454e1e FG |
468 | format!( |
469 | "Cannot perform checked arithmetic on unequal types {:?} and {:?}", | |
470 | a, b | |
471 | ), | |
1b1a35ee XL |
472 | ); |
473 | } | |
474 | } | |
04454e1e FG |
475 | Shl | Shr => { |
476 | for x in [a, b] { | |
477 | check_kinds!( | |
478 | x, | |
479 | "Cannot perform checked shift on non-integer type {:?}", | |
480 | ty::Uint(..) | ty::Int(..) | |
481 | ) | |
482 | } | |
483 | } | |
484 | _ => self.fail(location, format!("There is no checked version of {:?}", op)), | |
485 | } | |
486 | } | |
487 | Rvalue::UnaryOp(op, operand) => { | |
488 | let a = operand.ty(&self.body.local_decls, self.tcx); | |
489 | match op { | |
490 | UnOp::Neg => { | |
491 | check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..)) | |
492 | } | |
493 | UnOp::Not => { | |
494 | check_kinds!( | |
495 | a, | |
496 | "Cannot binary not type {:?}", | |
497 | ty::Int(..) | ty::Uint(..) | ty::Bool | |
498 | ); | |
499 | } | |
500 | } | |
501 | } | |
502 | Rvalue::ShallowInitBox(operand, _) => { | |
503 | let a = operand.ty(&self.body.local_decls, self.tcx); | |
504 | check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..)); | |
505 | } | |
506 | _ => {} | |
507 | } | |
508 | self.super_rvalue(rvalue, location); | |
509 | } | |
510 | ||
511 | fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { | |
512 | match &statement.kind { | |
513 | StatementKind::Assign(box (dest, rvalue)) => { | |
514 | // LHS and RHS of the assignment must have the same type. | |
515 | let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty; | |
516 | let right_ty = rvalue.ty(&self.body.local_decls, self.tcx); | |
517 | if !self.mir_assign_valid_types(right_ty, left_ty) { | |
518 | self.fail( | |
519 | location, | |
520 | format!( | |
521 | "encountered `{:?}` with incompatible types:\n\ | |
522 | left-hand side has type: {}\n\ | |
523 | right-hand side has type: {}", | |
524 | statement.kind, left_ty, right_ty, | |
525 | ), | |
526 | ); | |
527 | } | |
528 | // FIXME(JakobDegen): Check this for all rvalues, not just this one. | |
529 | if let Rvalue::Use(Operand::Copy(src) | Operand::Move(src)) = rvalue { | |
530 | // The sides of an assignment must not alias. Currently this just checks whether | |
531 | // the places are identical. | |
532 | if dest == src { | |
533 | self.fail( | |
534 | location, | |
535 | "encountered `Assign` statement with overlapping memory", | |
536 | ); | |
537 | } | |
f035d41b XL |
538 | } |
539 | } | |
1b1a35ee | 540 | StatementKind::AscribeUserType(..) => { |
5e7ed085 | 541 | if self.mir_phase >= MirPhase::DropsLowered { |
1b1a35ee XL |
542 | self.fail( |
543 | location, | |
544 | "`AscribeUserType` should have been removed after drop lowering phase", | |
545 | ); | |
546 | } | |
547 | } | |
548 | StatementKind::FakeRead(..) => { | |
5e7ed085 | 549 | if self.mir_phase >= MirPhase::DropsLowered { |
1b1a35ee XL |
550 | self.fail( |
551 | location, | |
552 | "`FakeRead` should have been removed after drop lowering phase", | |
553 | ); | |
554 | } | |
555 | } | |
6a06907d XL |
556 | StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping { |
557 | ref src, | |
558 | ref dst, | |
559 | ref count, | |
560 | }) => { | |
561 | let src_ty = src.ty(&self.body.local_decls, self.tcx); | |
562 | let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) { | |
563 | src_deref.ty | |
564 | } else { | |
565 | self.fail( | |
566 | location, | |
567 | format!("Expected src to be ptr in copy_nonoverlapping, got: {}", src_ty), | |
568 | ); | |
569 | return; | |
570 | }; | |
571 | let dst_ty = dst.ty(&self.body.local_decls, self.tcx); | |
572 | let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) { | |
573 | dst_deref.ty | |
574 | } else { | |
575 | self.fail( | |
576 | location, | |
577 | format!("Expected dst to be ptr in copy_nonoverlapping, got: {}", dst_ty), | |
578 | ); | |
579 | return; | |
580 | }; | |
581 | // since CopyNonOverlapping is parametrized by 1 type, | |
582 | // we only need to check that they are equal and not keep an extra parameter. | |
04454e1e | 583 | if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) { |
6a06907d XL |
584 | self.fail(location, format!("bad arg ({:?} != {:?})", op_src_ty, op_dst_ty)); |
585 | } | |
586 | ||
587 | let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx); | |
588 | if op_cnt_ty != self.tcx.types.usize { | |
589 | self.fail(location, format!("bad arg ({:?} != usize)", op_cnt_ty)) | |
590 | } | |
591 | } | |
04454e1e FG |
592 | StatementKind::SetDiscriminant { place, .. } => { |
593 | if self.mir_phase < MirPhase::Deaggregated { | |
594 | self.fail(location, "`SetDiscriminant`is not allowed until deaggregation"); | |
595 | } | |
596 | let pty = place.ty(&self.body.local_decls, self.tcx).ty.kind(); | |
597 | if !matches!(pty, ty::Adt(..) | ty::Generator(..) | ty::Opaque(..)) { | |
598 | self.fail( | |
599 | location, | |
600 | format!( | |
601 | "`SetDiscriminant` is only allowed on ADTs and generators, not {:?}", | |
602 | pty | |
603 | ), | |
604 | ); | |
605 | } | |
606 | } | |
607 | StatementKind::Deinit(..) => { | |
608 | if self.mir_phase < MirPhase::Deaggregated { | |
609 | self.fail(location, "`Deinit`is not allowed until deaggregation"); | |
5e7ed085 FG |
610 | } |
611 | } | |
612 | StatementKind::Retag(_, _) => { | |
613 | // FIXME(JakobDegen) The validator should check that `self.mir_phase < | |
614 | // DropsLowered`. However, this causes ICEs with generation of drop shims, which | |
615 | // seem to fail to set their `MirPhase` correctly. | |
616 | } | |
617 | StatementKind::StorageLive(..) | |
6a06907d | 618 | | StatementKind::StorageDead(..) |
6a06907d XL |
619 | | StatementKind::Coverage(_) |
620 | | StatementKind::Nop => {} | |
f035d41b | 621 | } |
29967ef6 XL |
622 | |
623 | self.super_statement(statement, location); | |
f035d41b XL |
624 | } |
625 | ||
626 | fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { | |
627 | match &terminator.kind { | |
628 | TerminatorKind::Goto { target } => { | |
629 | self.check_edge(location, *target, EdgeKind::Normal); | |
630 | } | |
29967ef6 | 631 | TerminatorKind::SwitchInt { targets, switch_ty, discr } => { |
f035d41b XL |
632 | let ty = discr.ty(&self.body.local_decls, self.tcx); |
633 | if ty != *switch_ty { | |
634 | self.fail( | |
635 | location, | |
636 | format!( | |
637 | "encountered `SwitchInt` terminator with type mismatch: {:?} != {:?}", | |
638 | ty, switch_ty, | |
639 | ), | |
640 | ); | |
641 | } | |
29967ef6 XL |
642 | |
643 | let target_width = self.tcx.sess.target.pointer_width; | |
644 | ||
645 | let size = Size::from_bits(match switch_ty.kind() { | |
646 | ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(), | |
647 | ty::Int(int) => int.normalize(target_width).bit_width().unwrap(), | |
648 | ty::Char => 32, | |
649 | ty::Bool => 1, | |
650 | other => bug!("unhandled type: {:?}", other), | |
651 | }); | |
652 | ||
653 | for (value, target) in targets.iter() { | |
654 | if Scalar::<()>::try_from_uint(value, size).is_none() { | |
655 | self.fail( | |
656 | location, | |
657 | format!("the value {:#x} is not a proper {:?}", value, switch_ty), | |
658 | ) | |
659 | } | |
660 | ||
661 | self.check_edge(location, target, EdgeKind::Normal); | |
f035d41b | 662 | } |
29967ef6 | 663 | self.check_edge(location, targets.otherwise(), EdgeKind::Normal); |
5099ac24 FG |
664 | |
665 | self.value_cache.clear(); | |
666 | self.value_cache.extend(targets.iter().map(|(value, _)| value)); | |
667 | let all_len = self.value_cache.len(); | |
668 | self.value_cache.sort_unstable(); | |
669 | self.value_cache.dedup(); | |
670 | let has_duplicates = all_len != self.value_cache.len(); | |
671 | if has_duplicates { | |
672 | self.fail( | |
673 | location, | |
674 | format!( | |
675 | "duplicated values in `SwitchInt` terminator: {:?}", | |
676 | terminator.kind, | |
677 | ), | |
678 | ); | |
679 | } | |
f035d41b XL |
680 | } |
681 | TerminatorKind::Drop { target, unwind, .. } => { | |
682 | self.check_edge(location, *target, EdgeKind::Normal); | |
683 | if let Some(unwind) = unwind { | |
684 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
685 | } | |
686 | } | |
687 | TerminatorKind::DropAndReplace { target, unwind, .. } => { | |
5e7ed085 | 688 | if self.mir_phase >= MirPhase::DropsLowered { |
3dfed10e XL |
689 | self.fail( |
690 | location, | |
5e7ed085 | 691 | "`DropAndReplace` should have been removed during drop elaboration", |
3dfed10e XL |
692 | ); |
693 | } | |
f035d41b XL |
694 | self.check_edge(location, *target, EdgeKind::Normal); |
695 | if let Some(unwind) = unwind { | |
696 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
697 | } | |
698 | } | |
923072b8 | 699 | TerminatorKind::Call { func, args, destination, target, cleanup, .. } => { |
f035d41b | 700 | let func_ty = func.ty(&self.body.local_decls, self.tcx); |
1b1a35ee | 701 | match func_ty.kind() { |
f035d41b XL |
702 | ty::FnPtr(..) | ty::FnDef(..) => {} |
703 | _ => self.fail( | |
704 | location, | |
705 | format!("encountered non-callable type {} in `Call` terminator", func_ty), | |
706 | ), | |
707 | } | |
923072b8 | 708 | if let Some(target) = target { |
f035d41b XL |
709 | self.check_edge(location, *target, EdgeKind::Normal); |
710 | } | |
711 | if let Some(cleanup) = cleanup { | |
712 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
713 | } | |
29967ef6 XL |
714 | |
715 | // The call destination place and Operand::Move place used as an argument might be | |
716 | // passed by a reference to the callee. Consequently they must be non-overlapping. | |
717 | // Currently this simply checks for duplicate places. | |
718 | self.place_cache.clear(); | |
923072b8 | 719 | self.place_cache.push(destination.as_ref()); |
29967ef6 XL |
720 | for arg in args { |
721 | if let Operand::Move(place) = arg { | |
722 | self.place_cache.push(place.as_ref()); | |
723 | } | |
724 | } | |
725 | let all_len = self.place_cache.len(); | |
04454e1e FG |
726 | let mut dedup = FxHashSet::default(); |
727 | self.place_cache.retain(|p| dedup.insert(*p)); | |
29967ef6 XL |
728 | let has_duplicates = all_len != self.place_cache.len(); |
729 | if has_duplicates { | |
730 | self.fail( | |
731 | location, | |
732 | format!( | |
733 | "encountered overlapping memory in `Call` terminator: {:?}", | |
734 | terminator.kind, | |
735 | ), | |
736 | ); | |
737 | } | |
f035d41b XL |
738 | } |
739 | TerminatorKind::Assert { cond, target, cleanup, .. } => { | |
740 | let cond_ty = cond.ty(&self.body.local_decls, self.tcx); | |
741 | if cond_ty != self.tcx.types.bool { | |
742 | self.fail( | |
743 | location, | |
744 | format!( | |
745 | "encountered non-boolean condition of type {} in `Assert` terminator", | |
746 | cond_ty | |
747 | ), | |
748 | ); | |
749 | } | |
750 | self.check_edge(location, *target, EdgeKind::Normal); | |
751 | if let Some(cleanup) = cleanup { | |
752 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
753 | } | |
754 | } | |
755 | TerminatorKind::Yield { resume, drop, .. } => { | |
04454e1e FG |
756 | if self.body.generator.is_none() { |
757 | self.fail(location, "`Yield` cannot appear outside generator bodies"); | |
758 | } | |
5e7ed085 | 759 | if self.mir_phase >= MirPhase::GeneratorsLowered { |
3dfed10e XL |
760 | self.fail(location, "`Yield` should have been replaced by generator lowering"); |
761 | } | |
f035d41b XL |
762 | self.check_edge(location, *resume, EdgeKind::Normal); |
763 | if let Some(drop) = drop { | |
764 | self.check_edge(location, *drop, EdgeKind::Normal); | |
765 | } | |
766 | } | |
767 | TerminatorKind::FalseEdge { real_target, imaginary_target } => { | |
5e7ed085 FG |
768 | if self.mir_phase >= MirPhase::DropsLowered { |
769 | self.fail( | |
770 | location, | |
771 | "`FalseEdge` should have been removed after drop elaboration", | |
772 | ); | |
773 | } | |
f035d41b XL |
774 | self.check_edge(location, *real_target, EdgeKind::Normal); |
775 | self.check_edge(location, *imaginary_target, EdgeKind::Normal); | |
776 | } | |
777 | TerminatorKind::FalseUnwind { real_target, unwind } => { | |
5e7ed085 FG |
778 | if self.mir_phase >= MirPhase::DropsLowered { |
779 | self.fail( | |
780 | location, | |
781 | "`FalseUnwind` should have been removed after drop elaboration", | |
782 | ); | |
783 | } | |
f035d41b XL |
784 | self.check_edge(location, *real_target, EdgeKind::Normal); |
785 | if let Some(unwind) = unwind { | |
786 | self.check_edge(location, *unwind, EdgeKind::Unwind); | |
787 | } | |
788 | } | |
a2a8927a | 789 | TerminatorKind::InlineAsm { destination, cleanup, .. } => { |
f035d41b XL |
790 | if let Some(destination) = destination { |
791 | self.check_edge(location, *destination, EdgeKind::Normal); | |
f9f354fc | 792 | } |
a2a8927a XL |
793 | if let Some(cleanup) = cleanup { |
794 | self.check_edge(location, *cleanup, EdgeKind::Unwind); | |
795 | } | |
f9f354fc | 796 | } |
5e7ed085 | 797 | TerminatorKind::GeneratorDrop => { |
04454e1e FG |
798 | if self.body.generator.is_none() { |
799 | self.fail(location, "`GeneratorDrop` cannot appear outside generator bodies"); | |
800 | } | |
5e7ed085 FG |
801 | if self.mir_phase >= MirPhase::GeneratorsLowered { |
802 | self.fail( | |
803 | location, | |
804 | "`GeneratorDrop` should have been replaced by generator lowering", | |
805 | ); | |
806 | } | |
807 | } | |
04454e1e FG |
808 | TerminatorKind::Resume | TerminatorKind::Abort => { |
809 | let bb = location.block; | |
810 | if !self.body.basic_blocks()[bb].is_cleanup { | |
811 | self.fail(location, "Cannot `Resume` or `Abort` from non-cleanup basic block") | |
812 | } | |
813 | } | |
814 | TerminatorKind::Return => { | |
815 | let bb = location.block; | |
816 | if self.body.basic_blocks()[bb].is_cleanup { | |
817 | self.fail(location, "Cannot `Return` from cleanup basic block") | |
818 | } | |
819 | } | |
820 | TerminatorKind::Unreachable => {} | |
f9f354fc | 821 | } |
29967ef6 XL |
822 | |
823 | self.super_terminator(terminator, location); | |
824 | } | |
825 | ||
826 | fn visit_source_scope(&mut self, scope: &SourceScope) { | |
827 | if self.body.source_scopes.get(*scope).is_none() { | |
828 | self.tcx.sess.diagnostic().delay_span_bug( | |
829 | self.body.span, | |
830 | &format!( | |
831 | "broken MIR in {:?} ({}):\ninvalid source scope {:?}", | |
832 | self.body.source.instance, self.when, scope, | |
833 | ), | |
834 | ); | |
835 | } | |
f9f354fc XL |
836 | } |
837 | } |