]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_const_eval/src/transform/check_consts/check.rs
New upstream version 1.62.1+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / transform / check_consts / check.rs
1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3 use rustc_errors::{Applicability, Diagnostic, ErrorGuaranteed};
4 use rustc_hir as hir;
5 use rustc_hir::def_id::DefId;
6 use rustc_index::bit_set::BitSet;
7 use rustc_infer::infer::TyCtxtInferExt;
8 use rustc_infer::traits::{ImplSource, Obligation, ObligationCause};
9 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
10 use rustc_middle::mir::*;
11 use rustc_middle::ty::cast::CastTy;
12 use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
13 use rustc_middle::ty::{self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt};
14 use rustc_middle::ty::{Binder, TraitPredicate, TraitRef, TypeFoldable};
15 use rustc_mir_dataflow::{self, Analysis};
16 use rustc_span::{sym, Span, Symbol};
17 use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
18 use rustc_trait_selection::traits::SelectionContext;
19
20 use std::mem;
21 use std::ops::Deref;
22
23 use super::ops::{self, NonConstOp, Status};
24 use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
25 use super::resolver::FlowSensitiveAnalysis;
26 use super::{ConstCx, Qualif};
27 use crate::const_eval::is_unstable_const_fn;
28
29 type QualifResults<'mir, 'tcx, Q> =
30 rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
31
32 #[derive(Default)]
33 pub struct Qualifs<'mir, 'tcx> {
34 has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
35 needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
36 needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
37 }
38
39 impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
40 /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
41 ///
42 /// Only updates the cursor if absolutely necessary
43 pub fn needs_drop(
44 &mut self,
45 ccx: &'mir ConstCx<'mir, 'tcx>,
46 local: Local,
47 location: Location,
48 ) -> bool {
49 let ty = ccx.body.local_decls[local].ty;
50 // Peeking into opaque types causes cycles if the current function declares said opaque
51 // type. Thus we avoid short circuiting on the type and instead run the more expensive
52 // analysis that looks at the actual usage within this function
53 if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
54 return false;
55 }
56
57 let needs_drop = self.needs_drop.get_or_insert_with(|| {
58 let ConstCx { tcx, body, .. } = *ccx;
59
60 FlowSensitiveAnalysis::new(NeedsDrop, ccx)
61 .into_engine(tcx, &body)
62 .iterate_to_fixpoint()
63 .into_results_cursor(&body)
64 });
65
66 needs_drop.seek_before_primary_effect(location);
67 needs_drop.get().contains(local)
68 }
69
70 /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
71 ///
72 /// Only updates the cursor if absolutely necessary
73 pub fn needs_non_const_drop(
74 &mut self,
75 ccx: &'mir ConstCx<'mir, 'tcx>,
76 local: Local,
77 location: Location,
78 ) -> bool {
79 let ty = ccx.body.local_decls[local].ty;
80 if !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
81 return false;
82 }
83
84 let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
85 let ConstCx { tcx, body, .. } = *ccx;
86
87 FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
88 .into_engine(tcx, &body)
89 .iterate_to_fixpoint()
90 .into_results_cursor(&body)
91 });
92
93 needs_non_const_drop.seek_before_primary_effect(location);
94 needs_non_const_drop.get().contains(local)
95 }
96
97 /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
98 ///
99 /// Only updates the cursor if absolutely necessary.
100 pub fn has_mut_interior(
101 &mut self,
102 ccx: &'mir ConstCx<'mir, 'tcx>,
103 local: Local,
104 location: Location,
105 ) -> bool {
106 let ty = ccx.body.local_decls[local].ty;
107 // Peeking into opaque types causes cycles if the current function declares said opaque
108 // type. Thus we avoid short circuiting on the type and instead run the more expensive
109 // analysis that looks at the actual usage within this function
110 if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
111 return false;
112 }
113
114 let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
115 let ConstCx { tcx, body, .. } = *ccx;
116
117 FlowSensitiveAnalysis::new(HasMutInterior, ccx)
118 .into_engine(tcx, &body)
119 .iterate_to_fixpoint()
120 .into_results_cursor(&body)
121 });
122
123 has_mut_interior.seek_before_primary_effect(location);
124 has_mut_interior.get().contains(local)
125 }
126
127 fn in_return_place(
128 &mut self,
129 ccx: &'mir ConstCx<'mir, 'tcx>,
130 tainted_by_errors: Option<ErrorGuaranteed>,
131 ) -> ConstQualifs {
132 // Find the `Return` terminator if one exists.
133 //
134 // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
135 // qualifs for the return type.
136 let return_block = ccx
137 .body
138 .basic_blocks()
139 .iter_enumerated()
140 .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
141 .map(|(bb, _)| bb);
142
143 let Some(return_block) = return_block else {
144 return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
145 };
146
147 let return_loc = ccx.body.terminator_loc(return_block);
148
149 let custom_eq = match ccx.const_kind() {
150 // We don't care whether a `const fn` returns a value that is not structurally
151 // matchable. Functions calls are opaque and always use type-based qualification, so
152 // this value should never be used.
153 hir::ConstContext::ConstFn => true,
154
155 // If we know that all values of the return type are structurally matchable, there's no
156 // need to run dataflow.
157 // Opaque types do not participate in const generics or pattern matching, so we can safely count them out.
158 _ if ccx.body.return_ty().has_opaque_types()
159 || !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) =>
160 {
161 false
162 }
163
164 hir::ConstContext::Const | hir::ConstContext::Static(_) => {
165 let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
166 .into_engine(ccx.tcx, &ccx.body)
167 .iterate_to_fixpoint()
168 .into_results_cursor(&ccx.body);
169
170 cursor.seek_after_primary_effect(return_loc);
171 cursor.get().contains(RETURN_PLACE)
172 }
173 };
174
175 ConstQualifs {
176 needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
177 needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
178 has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
179 custom_eq,
180 tainted_by_errors,
181 }
182 }
183 }
184
185 pub struct Checker<'mir, 'tcx> {
186 ccx: &'mir ConstCx<'mir, 'tcx>,
187 qualifs: Qualifs<'mir, 'tcx>,
188
189 /// The span of the current statement.
190 span: Span,
191
192 /// A set that stores for each local whether it has a `StorageDead` for it somewhere.
193 local_has_storage_dead: Option<BitSet<Local>>,
194
195 error_emitted: Option<ErrorGuaranteed>,
196 secondary_errors: Vec<Diagnostic>,
197 }
198
199 impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
200 type Target = ConstCx<'mir, 'tcx>;
201
202 fn deref(&self) -> &Self::Target {
203 &self.ccx
204 }
205 }
206
207 impl<'mir, 'tcx> Checker<'mir, 'tcx> {
208 pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
209 Checker {
210 span: ccx.body.span,
211 ccx,
212 qualifs: Default::default(),
213 local_has_storage_dead: None,
214 error_emitted: None,
215 secondary_errors: Vec::new(),
216 }
217 }
218
219 pub fn check_body(&mut self) {
220 let ConstCx { tcx, body, .. } = *self.ccx;
221 let def_id = self.ccx.def_id();
222
223 // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
224 // no need to emit duplicate errors here.
225 if self.ccx.is_async() || body.generator.is_some() {
226 tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`");
227 return;
228 }
229
230 // The local type and predicate checks are not free and only relevant for `const fn`s.
231 if self.const_kind() == hir::ConstContext::ConstFn {
232 // Prevent const trait methods from being annotated as `stable`.
233 // FIXME: Do this as part of stability checking.
234 if self.is_const_stable_const_fn() {
235 if crate::const_eval::is_parent_const_impl_raw(tcx, def_id) {
236 self.ccx
237 .tcx
238 .sess
239 .struct_span_err(self.span, "trait methods cannot be stable const fn")
240 .emit();
241 }
242 }
243
244 for (idx, local) in body.local_decls.iter_enumerated() {
245 // Handle the return place below.
246 if idx == RETURN_PLACE || local.internal {
247 continue;
248 }
249
250 self.span = local.source_info.span;
251 self.check_local_or_return_ty(local.ty, idx);
252 }
253
254 // impl trait is gone in MIR, so check the return type of a const fn by its signature
255 // instead of the type of the return place.
256 self.span = body.local_decls[RETURN_PLACE].source_info.span;
257 let return_ty = tcx.fn_sig(def_id).output();
258 self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE);
259 }
260
261 if !tcx.has_attr(def_id.to_def_id(), sym::rustc_do_not_const_check) {
262 self.visit_body(&body);
263 }
264
265 // If we got through const-checking without emitting any "primary" errors, emit any
266 // "secondary" errors if they occurred.
267 let secondary_errors = mem::take(&mut self.secondary_errors);
268 if self.error_emitted.is_none() {
269 for mut error in secondary_errors {
270 self.tcx.sess.diagnostic().emit_diagnostic(&mut error);
271 }
272 } else {
273 assert!(self.tcx.sess.has_errors().is_some());
274 }
275 }
276
277 fn local_has_storage_dead(&mut self, local: Local) -> bool {
278 let ccx = self.ccx;
279 self.local_has_storage_dead
280 .get_or_insert_with(|| {
281 struct StorageDeads {
282 locals: BitSet<Local>,
283 }
284 impl<'tcx> Visitor<'tcx> for StorageDeads {
285 fn visit_statement(&mut self, stmt: &Statement<'tcx>, _: Location) {
286 if let StatementKind::StorageDead(l) = stmt.kind {
287 self.locals.insert(l);
288 }
289 }
290 }
291 let mut v = StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) };
292 v.visit_body(ccx.body);
293 v.locals
294 })
295 .contains(local)
296 }
297
298 pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
299 self.qualifs.in_return_place(self.ccx, self.error_emitted)
300 }
301
302 /// Emits an error if an expression cannot be evaluated in the current context.
303 pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
304 self.check_op_spanned(op, self.span);
305 }
306
307 /// Emits an error at the given `span` if an expression cannot be evaluated in the current
308 /// context.
309 pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
310 let gate = match op.status_in_item(self.ccx) {
311 Status::Allowed => return,
312
313 Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
314 let unstable_in_stable = self.ccx.is_const_stable_const_fn()
315 && !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate);
316 if unstable_in_stable {
317 emit_unstable_in_stable_error(self.ccx, span, gate);
318 }
319
320 return;
321 }
322
323 Status::Unstable(gate) => Some(gate),
324 Status::Forbidden => None,
325 };
326
327 if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
328 self.tcx.sess.miri_unleashed_feature(span, gate);
329 return;
330 }
331
332 let mut err = op.build_error(self.ccx, span);
333 assert!(err.is_error());
334
335 match op.importance() {
336 ops::DiagnosticImportance::Primary => {
337 let reported = err.emit();
338 self.error_emitted = Some(reported);
339 }
340
341 ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
342 }
343 }
344
345 fn check_static(&mut self, def_id: DefId, span: Span) {
346 if self.tcx.is_thread_local_static(def_id) {
347 self.tcx.sess.delay_span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef");
348 }
349 self.check_op_spanned(ops::StaticAccess, span)
350 }
351
352 fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) {
353 let kind = self.body.local_kind(local);
354
355 for ty in ty.walk() {
356 let ty = match ty.unpack() {
357 GenericArgKind::Type(ty) => ty,
358
359 // No constraints on lifetimes or constants, except potentially
360 // constants' types, but `walk` will get to them as well.
361 GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
362 };
363
364 match *ty.kind() {
365 ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)),
366 _ => {}
367 }
368 }
369 }
370
371 fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) {
372 match self.const_kind() {
373 // In a const fn all borrows are transient or point to the places given via
374 // references in the arguments (so we already checked them with
375 // TransientMutBorrow/MutBorrow as appropriate).
376 // The borrow checker guarantees that no new non-transient borrows are created.
377 // NOTE: Once we have heap allocations during CTFE we need to figure out
378 // how to prevent `const fn` to create long-lived allocations that point
379 // to mutable memory.
380 hir::ConstContext::ConstFn => self.check_op(ops::TransientMutBorrow(kind)),
381 _ => {
382 // Locals with StorageDead do not live beyond the evaluation and can
383 // thus safely be borrowed without being able to be leaked to the final
384 // value of the constant.
385 if self.local_has_storage_dead(local) {
386 self.check_op(ops::TransientMutBorrow(kind));
387 } else {
388 self.check_op(ops::MutBorrow(kind));
389 }
390 }
391 }
392 }
393 }
394
395 impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
396 fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
397 trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
398
399 // We don't const-check basic blocks on the cleanup path since we never unwind during
400 // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
401 // are unreachable during const-eval.
402 //
403 // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
404 // locals that would never be dropped during normal execution are sometimes dropped during
405 // unwinding, which means backwards-incompatible live-drop errors.
406 if block.is_cleanup {
407 return;
408 }
409
410 self.super_basic_block_data(bb, block);
411 }
412
413 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
414 trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
415
416 // Special-case reborrows to be more like a copy of a reference.
417 match *rvalue {
418 Rvalue::Ref(_, kind, place) => {
419 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
420 let ctx = match kind {
421 BorrowKind::Shared => {
422 PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
423 }
424 BorrowKind::Shallow => {
425 PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
426 }
427 BorrowKind::Unique => {
428 PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
429 }
430 BorrowKind::Mut { .. } => {
431 PlaceContext::MutatingUse(MutatingUseContext::Borrow)
432 }
433 };
434 self.visit_local(&reborrowed_place_ref.local, ctx, location);
435 self.visit_projection(reborrowed_place_ref, ctx, location);
436 return;
437 }
438 }
439 Rvalue::AddressOf(mutbl, place) => {
440 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
441 let ctx = match mutbl {
442 Mutability::Not => {
443 PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
444 }
445 Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
446 };
447 self.visit_local(&reborrowed_place_ref.local, ctx, location);
448 self.visit_projection(reborrowed_place_ref, ctx, location);
449 return;
450 }
451 }
452 _ => {}
453 }
454
455 self.super_rvalue(rvalue, location);
456
457 match *rvalue {
458 Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
459
460 Rvalue::Use(_)
461 | Rvalue::Repeat(..)
462 | Rvalue::Discriminant(..)
463 | Rvalue::Len(_)
464 | Rvalue::Aggregate(..) => {}
465
466 Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
467 | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
468 let ty = place.ty(self.body, self.tcx).ty;
469 let is_allowed = match ty.kind() {
470 // Inside a `static mut`, `&mut [...]` is allowed.
471 ty::Array(..) | ty::Slice(_)
472 if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
473 {
474 true
475 }
476
477 // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
478 // that this is merely a ZST and it is already eligible for promotion.
479 // This may require an RFC?
480 /*
481 ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
482 => true,
483 */
484 _ => false,
485 };
486
487 if !is_allowed {
488 if let BorrowKind::Mut { .. } = kind {
489 self.check_mut_borrow(place.local, hir::BorrowKind::Ref)
490 } else {
491 self.check_op(ops::CellBorrow);
492 }
493 }
494 }
495
496 Rvalue::AddressOf(Mutability::Mut, ref place) => {
497 self.check_mut_borrow(place.local, hir::BorrowKind::Raw)
498 }
499
500 Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
501 | Rvalue::AddressOf(Mutability::Not, ref place) => {
502 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
503 &self.ccx,
504 &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
505 place.as_ref(),
506 );
507
508 if borrowed_place_has_mut_interior {
509 match self.const_kind() {
510 // In a const fn all borrows are transient or point to the places given via
511 // references in the arguments (so we already checked them with
512 // TransientCellBorrow/CellBorrow as appropriate).
513 // The borrow checker guarantees that no new non-transient borrows are created.
514 // NOTE: Once we have heap allocations during CTFE we need to figure out
515 // how to prevent `const fn` to create long-lived allocations that point
516 // to (interior) mutable memory.
517 hir::ConstContext::ConstFn => self.check_op(ops::TransientCellBorrow),
518 _ => {
519 // Locals with StorageDead are definitely not part of the final constant value, and
520 // it is thus inherently safe to permit such locals to have their
521 // address taken as we can't end up with a reference to them in the
522 // final value.
523 // Note: This is only sound if every local that has a `StorageDead` has a
524 // `StorageDead` in every control flow path leading to a `return` terminator.
525 if self.local_has_storage_dead(place.local) {
526 self.check_op(ops::TransientCellBorrow);
527 } else {
528 self.check_op(ops::CellBorrow);
529 }
530 }
531 }
532 }
533 }
534
535 Rvalue::Cast(
536 CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
537 _,
538 _,
539 ) => {}
540
541 Rvalue::Cast(
542 CastKind::Pointer(
543 PointerCast::UnsafeFnPointer
544 | PointerCast::ClosureFnPointer(_)
545 | PointerCast::ReifyFnPointer,
546 ),
547 _,
548 _,
549 ) => {
550 // Nothing to do here. Function pointer casts are allowed now.
551 }
552
553 Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, _) => {
554 // Nothing to check here (`check_local_or_return_ty` ensures no trait objects occur
555 // in the type of any local, which also excludes casts).
556 }
557
558 Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
559 let operand_ty = operand.ty(self.body, self.tcx);
560 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
561 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
562
563 if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
564 self.check_op(ops::RawPtrToIntCast);
565 }
566 }
567
568 Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _) => {}
569 Rvalue::ShallowInitBox(_, _) => {}
570
571 Rvalue::UnaryOp(_, ref operand) => {
572 let ty = operand.ty(self.body, self.tcx);
573 if is_int_bool_or_char(ty) {
574 // Int, bool, and char operations are fine.
575 } else if ty.is_floating_point() {
576 self.check_op(ops::FloatingPointOp);
577 } else {
578 span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
579 }
580 }
581
582 Rvalue::BinaryOp(op, box (ref lhs, ref rhs))
583 | Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
584 let lhs_ty = lhs.ty(self.body, self.tcx);
585 let rhs_ty = rhs.ty(self.body, self.tcx);
586
587 if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) {
588 // Int, bool, and char operations are fine.
589 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
590 assert_eq!(lhs_ty, rhs_ty);
591 assert!(
592 op == BinOp::Eq
593 || op == BinOp::Ne
594 || op == BinOp::Le
595 || op == BinOp::Lt
596 || op == BinOp::Ge
597 || op == BinOp::Gt
598 || op == BinOp::Offset
599 );
600
601 self.check_op(ops::RawPtrComparison);
602 } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() {
603 self.check_op(ops::FloatingPointOp);
604 } else {
605 span_bug!(
606 self.span,
607 "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
608 lhs_ty,
609 rhs_ty
610 );
611 }
612 }
613 }
614 }
615
616 fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
617 self.super_operand(op, location);
618 if let Operand::Constant(c) = op {
619 if let Some(def_id) = c.check_static_ptr(self.tcx) {
620 self.check_static(def_id, self.span);
621 }
622 }
623 }
624 fn visit_projection_elem(
625 &mut self,
626 place_local: Local,
627 proj_base: &[PlaceElem<'tcx>],
628 elem: PlaceElem<'tcx>,
629 context: PlaceContext,
630 location: Location,
631 ) {
632 trace!(
633 "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
634 context={:?} location={:?}",
635 place_local,
636 proj_base,
637 elem,
638 context,
639 location,
640 );
641
642 self.super_projection_elem(place_local, proj_base, elem, context, location);
643
644 match elem {
645 ProjectionElem::Deref => {
646 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
647 if base_ty.is_unsafe_ptr() {
648 if proj_base.is_empty() {
649 let decl = &self.body.local_decls[place_local];
650 if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
651 let span = decl.source_info.span;
652 self.check_static(def_id, span);
653 return;
654 }
655 }
656
657 // `*const T` is stable, `*mut T` is not
658 if !base_ty.is_mutable_ptr() {
659 return;
660 }
661
662 self.check_op(ops::RawMutPtrDeref);
663 }
664
665 if context.is_mutating_use() {
666 self.check_op(ops::MutDeref);
667 }
668 }
669
670 ProjectionElem::ConstantIndex { .. }
671 | ProjectionElem::Downcast(..)
672 | ProjectionElem::Subslice { .. }
673 | ProjectionElem::Field(..)
674 | ProjectionElem::Index(_) => {}
675 }
676 }
677
678 fn visit_source_info(&mut self, source_info: &SourceInfo) {
679 trace!("visit_source_info: source_info={:?}", source_info);
680 self.span = source_info.span;
681 }
682
683 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
684 trace!("visit_statement: statement={:?} location={:?}", statement, location);
685
686 self.super_statement(statement, location);
687
688 match statement.kind {
689 StatementKind::Assign(..)
690 | StatementKind::SetDiscriminant { .. }
691 | StatementKind::Deinit(..)
692 | StatementKind::FakeRead(..)
693 | StatementKind::StorageLive(_)
694 | StatementKind::StorageDead(_)
695 | StatementKind::Retag { .. }
696 | StatementKind::AscribeUserType(..)
697 | StatementKind::Coverage(..)
698 | StatementKind::CopyNonOverlapping(..)
699 | StatementKind::Nop => {}
700 }
701 }
702
703 #[instrument(level = "debug", skip(self))]
704 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
705 use rustc_target::spec::abi::Abi::RustIntrinsic;
706
707 self.super_terminator(terminator, location);
708
709 match &terminator.kind {
710 TerminatorKind::Call { func, args, fn_span, from_hir_call, .. } => {
711 let ConstCx { tcx, body, param_env, .. } = *self.ccx;
712 let caller = self.def_id();
713
714 let fn_ty = func.ty(body, tcx);
715
716 let (mut callee, mut substs) = match *fn_ty.kind() {
717 ty::FnDef(def_id, substs) => (def_id, substs),
718
719 ty::FnPtr(_) => {
720 self.check_op(ops::FnCallIndirect);
721 return;
722 }
723 _ => {
724 span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
725 }
726 };
727
728 let mut nonconst_call_permission = false;
729
730 // Attempting to call a trait method?
731 if let Some(trait_id) = tcx.trait_of_item(callee) {
732 trace!("attempting to call a trait method");
733 if !self.tcx.features().const_trait_impl {
734 self.check_op(ops::FnCallNonConst {
735 caller,
736 callee,
737 substs,
738 span: *fn_span,
739 from_hir_call: *from_hir_call,
740 });
741 return;
742 }
743
744 let trait_ref = TraitRef::from_method(tcx, trait_id, substs);
745 let poly_trait_pred = Binder::dummy(TraitPredicate {
746 trait_ref,
747 constness: ty::BoundConstness::ConstIfConst,
748 polarity: ty::ImplPolarity::Positive,
749 });
750 let obligation =
751 Obligation::new(ObligationCause::dummy(), param_env, poly_trait_pred);
752
753 let implsrc = tcx.infer_ctxt().enter(|infcx| {
754 let mut selcx = SelectionContext::new(&infcx);
755 selcx.select(&obligation)
756 });
757
758 match implsrc {
759 Ok(Some(ImplSource::Param(_, ty::BoundConstness::ConstIfConst))) => {
760 debug!(
761 "const_trait_impl: provided {:?} via where-clause in {:?}",
762 trait_ref, param_env
763 );
764 return;
765 }
766 Ok(Some(ImplSource::UserDefined(data))) => {
767 let callee_name = tcx.item_name(callee);
768 if let Some(&did) = tcx
769 .associated_item_def_ids(data.impl_def_id)
770 .iter()
771 .find(|did| tcx.item_name(**did) == callee_name)
772 {
773 // using internal substs is ok here, since this is only
774 // used for the `resolve` call below
775 substs = InternalSubsts::identity_for_item(tcx, did);
776 callee = did;
777 }
778
779 if let hir::Constness::NotConst = tcx.impl_constness(data.impl_def_id) {
780 self.check_op(ops::FnCallNonConst {
781 caller,
782 callee,
783 substs,
784 span: *fn_span,
785 from_hir_call: *from_hir_call,
786 });
787 return;
788 }
789 }
790 _ if !tcx.is_const_fn_raw(callee) => {
791 // At this point, it is only legal when the caller is marked with
792 // #[default_method_body_is_const], and the callee is in the same
793 // trait.
794 let callee_trait = tcx.trait_of_item(callee);
795 if callee_trait.is_some()
796 && tcx.has_attr(caller.to_def_id(), sym::default_method_body_is_const)
797 && callee_trait == tcx.trait_of_item(caller)
798 // Can only call methods when it's `<Self as TheTrait>::f`.
799 && tcx.types.self_param == substs.type_at(0)
800 {
801 nonconst_call_permission = true;
802 }
803
804 if !nonconst_call_permission {
805 let obligation = Obligation::new(
806 ObligationCause::dummy_with_span(*fn_span),
807 param_env,
808 tcx.mk_predicate(
809 poly_trait_pred.map_bound(ty::PredicateKind::Trait),
810 ),
811 );
812
813 // improve diagnostics by showing what failed. Our requirements are stricter this time
814 // as we are going to error again anyways.
815 tcx.infer_ctxt().enter(|infcx| {
816 if let Err(e) = implsrc {
817 infcx.report_selection_error(
818 obligation.clone(),
819 &obligation,
820 &e,
821 false,
822 );
823 }
824 });
825
826 self.check_op(ops::FnCallNonConst {
827 caller,
828 callee,
829 substs,
830 span: *fn_span,
831 from_hir_call: *from_hir_call,
832 });
833 return;
834 }
835 }
836 _ => {}
837 }
838
839 // Resolve a trait method call to its concrete implementation, which may be in a
840 // `const` trait impl.
841 let instance = Instance::resolve(tcx, param_env, callee, substs);
842 debug!("Resolving ({:?}) -> {:?}", callee, instance);
843 if let Ok(Some(func)) = instance {
844 if let InstanceDef::Item(def) = func.def {
845 callee = def.did;
846 }
847 }
848 }
849
850 // At this point, we are calling a function, `callee`, whose `DefId` is known...
851
852 // `begin_panic` and `panic_display` are generic functions that accept
853 // types other than str. Check to enforce that only str can be used in
854 // const-eval.
855
856 // const-eval of the `begin_panic` fn assumes the argument is `&str`
857 if Some(callee) == tcx.lang_items().begin_panic_fn() {
858 match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
859 ty::Ref(_, ty, _) if ty.is_str() => return,
860 _ => self.check_op(ops::PanicNonStr),
861 }
862 }
863
864 // const-eval of the `panic_display` fn assumes the argument is `&&str`
865 if Some(callee) == tcx.lang_items().panic_display() {
866 match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
867 ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
868 {
869 return;
870 }
871 _ => self.check_op(ops::PanicNonStr),
872 }
873 }
874
875 if Some(callee) == tcx.lang_items().exchange_malloc_fn() {
876 self.check_op(ops::HeapAllocation);
877 return;
878 }
879
880 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
881 let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
882 if is_async_block {
883 let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
884 self.check_op(ops::Generator(kind));
885 return;
886 }
887
888 let is_intrinsic = tcx.fn_sig(callee).abi() == RustIntrinsic;
889
890 if !tcx.is_const_fn_raw(callee) {
891 if tcx.trait_of_item(callee).is_some() {
892 if tcx.has_attr(callee, sym::default_method_body_is_const) {
893 // To get to here we must have already found a const impl for the
894 // trait, but for it to still be non-const can be that the impl is
895 // using default method bodies.
896 nonconst_call_permission = true;
897 }
898 }
899
900 if !nonconst_call_permission {
901 self.check_op(ops::FnCallNonConst {
902 caller,
903 callee,
904 substs,
905 span: *fn_span,
906 from_hir_call: *from_hir_call,
907 });
908 return;
909 }
910 }
911
912 // If the `const fn` we are trying to call is not const-stable, ensure that we have
913 // the proper feature gate enabled.
914 if let Some(gate) = is_unstable_const_fn(tcx, callee) {
915 trace!(?gate, "calling unstable const fn");
916 if self.span.allows_unstable(gate) {
917 return;
918 }
919
920 // Calling an unstable function *always* requires that the corresponding gate
921 // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`.
922 if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
923 self.check_op(ops::FnCallUnstable(callee, Some(gate)));
924 return;
925 }
926
927 // If this crate is not using stability attributes, or the caller is not claiming to be a
928 // stable `const fn`, that is all that is required.
929 if !self.ccx.is_const_stable_const_fn() {
930 trace!("crate not using stability attributes or caller not stably const");
931 return;
932 }
933
934 // Otherwise, we are something const-stable calling a const-unstable fn.
935
936 if super::rustc_allow_const_fn_unstable(tcx, caller, gate) {
937 trace!("rustc_allow_const_fn_unstable gate active");
938 return;
939 }
940
941 self.check_op(ops::FnCallUnstable(callee, Some(gate)));
942 return;
943 }
944
945 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
946 // have no `rustc_const_stable` attributes to be const-unstable as well. This
947 // should be fixed later.
948 let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
949 && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
950 if callee_is_unstable_unmarked {
951 trace!("callee_is_unstable_unmarked");
952 // We do not use `const` modifiers for intrinsic "functions", as intrinsics are
953 // `extern` functions, and these have no way to get marked `const`. So instead we
954 // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
955 if self.ccx.is_const_stable_const_fn() || is_intrinsic {
956 self.check_op(ops::FnCallUnstable(callee, None));
957 return;
958 }
959 }
960 trace!("permitting call");
961 }
962
963 // Forbid all `Drop` terminators unless the place being dropped is a local with no
964 // projections that cannot be `NeedsNonConstDrop`.
965 TerminatorKind::Drop { place: dropped_place, .. }
966 | TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
967 // If we are checking live drops after drop-elaboration, don't emit duplicate
968 // errors here.
969 if super::post_drop_elaboration::checking_enabled(self.ccx) {
970 return;
971 }
972
973 let mut err_span = self.span;
974 let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
975
976 let ty_needs_non_const_drop =
977 qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place);
978
979 debug!(?ty_of_dropped_place, ?ty_needs_non_const_drop);
980
981 if !ty_needs_non_const_drop {
982 return;
983 }
984
985 let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
986 // Use the span where the local was declared as the span of the drop error.
987 err_span = self.body.local_decls[local].source_info.span;
988 self.qualifs.needs_non_const_drop(self.ccx, local, location)
989 } else {
990 true
991 };
992
993 if needs_non_const_drop {
994 self.check_op_spanned(
995 ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
996 err_span,
997 );
998 }
999 }
1000
1001 TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
1002
1003 TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
1004 self.check_op(ops::Generator(hir::GeneratorKind::Gen))
1005 }
1006
1007 TerminatorKind::Abort => {
1008 // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
1009 span_bug!(self.span, "`Abort` terminator outside of cleanup block")
1010 }
1011
1012 TerminatorKind::Assert { .. }
1013 | TerminatorKind::FalseEdge { .. }
1014 | TerminatorKind::FalseUnwind { .. }
1015 | TerminatorKind::Goto { .. }
1016 | TerminatorKind::Resume
1017 | TerminatorKind::Return
1018 | TerminatorKind::SwitchInt { .. }
1019 | TerminatorKind::Unreachable => {}
1020 }
1021 }
1022 }
1023
1024 fn place_as_reborrow<'tcx>(
1025 tcx: TyCtxt<'tcx>,
1026 body: &Body<'tcx>,
1027 place: Place<'tcx>,
1028 ) -> Option<PlaceRef<'tcx>> {
1029 match place.as_ref().last_projection() {
1030 Some((place_base, ProjectionElem::Deref)) => {
1031 // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
1032 // that points to the allocation for the static. Don't treat these as reborrows.
1033 if body.local_decls[place_base.local].is_ref_to_static() {
1034 None
1035 } else {
1036 // Ensure the type being derefed is a reference and not a raw pointer.
1037 // This is sufficient to prevent an access to a `static mut` from being marked as a
1038 // reborrow, even if the check above were to disappear.
1039 let inner_ty = place_base.ty(body, tcx).ty;
1040
1041 if let ty::Ref(..) = inner_ty.kind() {
1042 return Some(place_base);
1043 } else {
1044 return None;
1045 }
1046 }
1047 }
1048 _ => None,
1049 }
1050 }
1051
1052 fn is_int_bool_or_char(ty: Ty<'_>) -> bool {
1053 ty.is_bool() || ty.is_integral() || ty.is_char()
1054 }
1055
1056 fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
1057 let attr_span = ccx.tcx.def_span(ccx.def_id()).shrink_to_lo();
1058
1059 ccx.tcx
1060 .sess
1061 .struct_span_err(
1062 span,
1063 &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()),
1064 )
1065 .span_suggestion(
1066 attr_span,
1067 "if it is not part of the public API, make this function unstably const",
1068 concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
1069 Applicability::HasPlaceholders,
1070 )
1071 .span_suggestion(
1072 attr_span,
1073 "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks",
1074 format!("#[rustc_allow_const_fn_unstable({})]\n", gate),
1075 Applicability::MaybeIncorrect,
1076 )
1077 .emit();
1078 }