1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
3 use rustc_errors
::{Applicability, Diagnostic, ErrorGuaranteed}
;
5 use rustc_hir
::def_id
::DefId
;
6 use rustc_index
::bit_set
::BitSet
;
7 use rustc_infer
::infer
::TyCtxtInferExt
;
8 use rustc_infer
::traits
::{ImplSource, Obligation, ObligationCause}
;
9 use rustc_middle
::mir
::visit
::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}
;
10 use rustc_middle
::mir
::*;
11 use rustc_middle
::ty
::cast
::CastTy
;
12 use rustc_middle
::ty
::subst
::{GenericArgKind, InternalSubsts}
;
13 use rustc_middle
::ty
::{self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt}
;
14 use rustc_middle
::ty
::{Binder, TraitPredicate, TraitRef, TypeFoldable}
;
15 use rustc_mir_dataflow
::{self, Analysis}
;
16 use rustc_span
::{sym, Span, Symbol}
;
17 use rustc_trait_selection
::traits
::error_reporting
::InferCtxtExt
;
18 use rustc_trait_selection
::traits
::SelectionContext
;
23 use super::ops
::{self, NonConstOp, Status}
;
24 use super::qualifs
::{self, CustomEq, HasMutInterior, NeedsDrop, NeedsNonConstDrop}
;
25 use super::resolver
::FlowSensitiveAnalysis
;
26 use super::{ConstCx, Qualif}
;
27 use crate::const_eval
::is_unstable_const_fn
;
29 type QualifResults
<'mir
, 'tcx
, Q
> =
30 rustc_mir_dataflow
::ResultsCursor
<'mir
, 'tcx
, FlowSensitiveAnalysis
<'mir
, 'mir
, 'tcx
, Q
>>;
33 pub struct Qualifs
<'mir
, 'tcx
> {
34 has_mut_interior
: Option
<QualifResults
<'mir
, 'tcx
, HasMutInterior
>>,
35 needs_drop
: Option
<QualifResults
<'mir
, 'tcx
, NeedsDrop
>>,
36 needs_non_const_drop
: Option
<QualifResults
<'mir
, 'tcx
, NeedsNonConstDrop
>>,
39 impl<'mir
, 'tcx
> Qualifs
<'mir
, 'tcx
> {
40 /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
42 /// Only updates the cursor if absolutely necessary
45 ccx
: &'mir ConstCx
<'mir
, 'tcx
>,
49 let ty
= ccx
.body
.local_decls
[local
].ty
;
50 // Peeking into opaque types causes cycles if the current function declares said opaque
51 // type. Thus we avoid short circuiting on the type and instead run the more expensive
52 // analysis that looks at the actual usage within this function
53 if !ty
.has_opaque_types() && !NeedsDrop
::in_any_value_of_ty(ccx
, ty
) {
57 let needs_drop
= self.needs_drop
.get_or_insert_with(|| {
58 let ConstCx { tcx, body, .. }
= *ccx
;
60 FlowSensitiveAnalysis
::new(NeedsDrop
, ccx
)
61 .into_engine(tcx
, &body
)
62 .iterate_to_fixpoint()
63 .into_results_cursor(&body
)
66 needs_drop
.seek_before_primary_effect(location
);
67 needs_drop
.get().contains(local
)
70 /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
72 /// Only updates the cursor if absolutely necessary
73 pub fn needs_non_const_drop(
75 ccx
: &'mir ConstCx
<'mir
, 'tcx
>,
79 let ty
= ccx
.body
.local_decls
[local
].ty
;
80 if !NeedsNonConstDrop
::in_any_value_of_ty(ccx
, ty
) {
84 let needs_non_const_drop
= self.needs_non_const_drop
.get_or_insert_with(|| {
85 let ConstCx { tcx, body, .. }
= *ccx
;
87 FlowSensitiveAnalysis
::new(NeedsNonConstDrop
, ccx
)
88 .into_engine(tcx
, &body
)
89 .iterate_to_fixpoint()
90 .into_results_cursor(&body
)
93 needs_non_const_drop
.seek_before_primary_effect(location
);
94 needs_non_const_drop
.get().contains(local
)
97 /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
99 /// Only updates the cursor if absolutely necessary.
100 pub fn has_mut_interior(
102 ccx
: &'mir ConstCx
<'mir
, 'tcx
>,
106 let ty
= ccx
.body
.local_decls
[local
].ty
;
107 // Peeking into opaque types causes cycles if the current function declares said opaque
108 // type. Thus we avoid short circuiting on the type and instead run the more expensive
109 // analysis that looks at the actual usage within this function
110 if !ty
.has_opaque_types() && !HasMutInterior
::in_any_value_of_ty(ccx
, ty
) {
114 let has_mut_interior
= self.has_mut_interior
.get_or_insert_with(|| {
115 let ConstCx { tcx, body, .. }
= *ccx
;
117 FlowSensitiveAnalysis
::new(HasMutInterior
, ccx
)
118 .into_engine(tcx
, &body
)
119 .iterate_to_fixpoint()
120 .into_results_cursor(&body
)
123 has_mut_interior
.seek_before_primary_effect(location
);
124 has_mut_interior
.get().contains(local
)
129 ccx
: &'mir ConstCx
<'mir
, 'tcx
>,
130 tainted_by_errors
: Option
<ErrorGuaranteed
>,
132 // Find the `Return` terminator if one exists.
134 // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
135 // qualifs for the return type.
136 let return_block
= ccx
140 .find(|(_
, block
)| matches
!(block
.terminator().kind
, TerminatorKind
::Return
))
143 let Some(return_block
) = return_block
else {
144 return qualifs
::in_any_value_of_ty(ccx
, ccx
.body
.return_ty(), tainted_by_errors
);
147 let return_loc
= ccx
.body
.terminator_loc(return_block
);
149 let custom_eq
= match ccx
.const_kind() {
150 // We don't care whether a `const fn` returns a value that is not structurally
151 // matchable. Functions calls are opaque and always use type-based qualification, so
152 // this value should never be used.
153 hir
::ConstContext
::ConstFn
=> true,
155 // If we know that all values of the return type are structurally matchable, there's no
156 // need to run dataflow.
157 // Opaque types do not participate in const generics or pattern matching, so we can safely count them out.
158 _
if ccx
.body
.return_ty().has_opaque_types()
159 || !CustomEq
::in_any_value_of_ty(ccx
, ccx
.body
.return_ty()) =>
164 hir
::ConstContext
::Const
| hir
::ConstContext
::Static(_
) => {
165 let mut cursor
= FlowSensitiveAnalysis
::new(CustomEq
, ccx
)
166 .into_engine(ccx
.tcx
, &ccx
.body
)
167 .iterate_to_fixpoint()
168 .into_results_cursor(&ccx
.body
);
170 cursor
.seek_after_primary_effect(return_loc
);
171 cursor
.get().contains(RETURN_PLACE
)
176 needs_drop
: self.needs_drop(ccx
, RETURN_PLACE
, return_loc
),
177 needs_non_const_drop
: self.needs_non_const_drop(ccx
, RETURN_PLACE
, return_loc
),
178 has_mut_interior
: self.has_mut_interior(ccx
, RETURN_PLACE
, return_loc
),
185 pub struct Checker
<'mir
, 'tcx
> {
186 ccx
: &'mir ConstCx
<'mir
, 'tcx
>,
187 qualifs
: Qualifs
<'mir
, 'tcx
>,
189 /// The span of the current statement.
192 /// A set that stores for each local whether it has a `StorageDead` for it somewhere.
193 local_has_storage_dead
: Option
<BitSet
<Local
>>,
195 error_emitted
: Option
<ErrorGuaranteed
>,
196 secondary_errors
: Vec
<Diagnostic
>,
199 impl<'mir
, 'tcx
> Deref
for Checker
<'mir
, 'tcx
> {
200 type Target
= ConstCx
<'mir
, 'tcx
>;
202 fn deref(&self) -> &Self::Target
{
207 impl<'mir
, 'tcx
> Checker
<'mir
, 'tcx
> {
208 pub fn new(ccx
: &'mir ConstCx
<'mir
, 'tcx
>) -> Self {
212 qualifs
: Default
::default(),
213 local_has_storage_dead
: None
,
215 secondary_errors
: Vec
::new(),
219 pub fn check_body(&mut self) {
220 let ConstCx { tcx, body, .. }
= *self.ccx
;
221 let def_id
= self.ccx
.def_id();
223 // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
224 // no need to emit duplicate errors here.
225 if self.ccx
.is_async() || body
.generator
.is_some() {
226 tcx
.sess
.delay_span_bug(body
.span
, "`async` functions cannot be `const fn`");
230 // The local type and predicate checks are not free and only relevant for `const fn`s.
231 if self.const_kind() == hir
::ConstContext
::ConstFn
{
232 // Prevent const trait methods from being annotated as `stable`.
233 // FIXME: Do this as part of stability checking.
234 if self.is_const_stable_const_fn() {
235 if crate::const_eval
::is_parent_const_impl_raw(tcx
, def_id
) {
239 .struct_span_err(self.span
, "trait methods cannot be stable const fn")
244 for (idx
, local
) in body
.local_decls
.iter_enumerated() {
245 // Handle the return place below.
246 if idx
== RETURN_PLACE
|| local
.internal
{
250 self.span
= local
.source_info
.span
;
251 self.check_local_or_return_ty(local
.ty
, idx
);
254 // impl trait is gone in MIR, so check the return type of a const fn by its signature
255 // instead of the type of the return place.
256 self.span
= body
.local_decls
[RETURN_PLACE
].source_info
.span
;
257 let return_ty
= tcx
.fn_sig(def_id
).output();
258 self.check_local_or_return_ty(return_ty
.skip_binder(), RETURN_PLACE
);
261 if !tcx
.has_attr(def_id
.to_def_id(), sym
::rustc_do_not_const_check
) {
262 self.visit_body(&body
);
265 // If we got through const-checking without emitting any "primary" errors, emit any
266 // "secondary" errors if they occurred.
267 let secondary_errors
= mem
::take(&mut self.secondary_errors
);
268 if self.error_emitted
.is_none() {
269 for mut error
in secondary_errors
{
270 self.tcx
.sess
.diagnostic().emit_diagnostic(&mut error
);
273 assert
!(self.tcx
.sess
.has_errors().is_some());
277 fn local_has_storage_dead(&mut self, local
: Local
) -> bool
{
279 self.local_has_storage_dead
280 .get_or_insert_with(|| {
281 struct StorageDeads
{
282 locals
: BitSet
<Local
>,
284 impl<'tcx
> Visitor
<'tcx
> for StorageDeads
{
285 fn visit_statement(&mut self, stmt
: &Statement
<'tcx
>, _
: Location
) {
286 if let StatementKind
::StorageDead(l
) = stmt
.kind
{
287 self.locals
.insert(l
);
291 let mut v
= StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) }
;
292 v
.visit_body(ccx
.body
);
298 pub fn qualifs_in_return_place(&mut self) -> ConstQualifs
{
299 self.qualifs
.in_return_place(self.ccx
, self.error_emitted
)
302 /// Emits an error if an expression cannot be evaluated in the current context.
303 pub fn check_op(&mut self, op
: impl NonConstOp
<'tcx
>) {
304 self.check_op_spanned(op
, self.span
);
307 /// Emits an error at the given `span` if an expression cannot be evaluated in the current
309 pub fn check_op_spanned
<O
: NonConstOp
<'tcx
>>(&mut self, op
: O
, span
: Span
) {
310 let gate
= match op
.status_in_item(self.ccx
) {
311 Status
::Allowed
=> return,
313 Status
::Unstable(gate
) if self.tcx
.features().enabled(gate
) => {
314 let unstable_in_stable
= self.ccx
.is_const_stable_const_fn()
315 && !super::rustc_allow_const_fn_unstable(self.tcx
, self.def_id(), gate
);
316 if unstable_in_stable
{
317 emit_unstable_in_stable_error(self.ccx
, span
, gate
);
323 Status
::Unstable(gate
) => Some(gate
),
324 Status
::Forbidden
=> None
,
327 if self.tcx
.sess
.opts
.debugging_opts
.unleash_the_miri_inside_of_you
{
328 self.tcx
.sess
.miri_unleashed_feature(span
, gate
);
332 let mut err
= op
.build_error(self.ccx
, span
);
333 assert
!(err
.is_error());
335 match op
.importance() {
336 ops
::DiagnosticImportance
::Primary
=> {
337 let reported
= err
.emit();
338 self.error_emitted
= Some(reported
);
341 ops
::DiagnosticImportance
::Secondary
=> err
.buffer(&mut self.secondary_errors
),
345 fn check_static(&mut self, def_id
: DefId
, span
: Span
) {
346 if self.tcx
.is_thread_local_static(def_id
) {
347 self.tcx
.sess
.delay_span_bug(span
, "tls access is checked in `Rvalue::ThreadLocalRef");
349 self.check_op_spanned(ops
::StaticAccess
, span
)
352 fn check_local_or_return_ty(&mut self, ty
: Ty
<'tcx
>, local
: Local
) {
353 let kind
= self.body
.local_kind(local
);
355 for ty
in ty
.walk() {
356 let ty
= match ty
.unpack() {
357 GenericArgKind
::Type(ty
) => ty
,
359 // No constraints on lifetimes or constants, except potentially
360 // constants' types, but `walk` will get to them as well.
361 GenericArgKind
::Lifetime(_
) | GenericArgKind
::Const(_
) => continue,
365 ty
::Ref(_
, _
, hir
::Mutability
::Mut
) => self.check_op(ops
::ty
::MutRef(kind
)),
371 fn check_mut_borrow(&mut self, local
: Local
, kind
: hir
::BorrowKind
) {
372 match self.const_kind() {
373 // In a const fn all borrows are transient or point to the places given via
374 // references in the arguments (so we already checked them with
375 // TransientMutBorrow/MutBorrow as appropriate).
376 // The borrow checker guarantees that no new non-transient borrows are created.
377 // NOTE: Once we have heap allocations during CTFE we need to figure out
378 // how to prevent `const fn` to create long-lived allocations that point
379 // to mutable memory.
380 hir
::ConstContext
::ConstFn
=> self.check_op(ops
::TransientMutBorrow(kind
)),
382 // Locals with StorageDead do not live beyond the evaluation and can
383 // thus safely be borrowed without being able to be leaked to the final
384 // value of the constant.
385 if self.local_has_storage_dead(local
) {
386 self.check_op(ops
::TransientMutBorrow(kind
));
388 self.check_op(ops
::MutBorrow(kind
));
395 impl<'tcx
> Visitor
<'tcx
> for Checker
<'_
, 'tcx
> {
396 fn visit_basic_block_data(&mut self, bb
: BasicBlock
, block
: &BasicBlockData
<'tcx
>) {
397 trace
!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb
, block
.is_cleanup
);
399 // We don't const-check basic blocks on the cleanup path since we never unwind during
400 // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
401 // are unreachable during const-eval.
403 // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
404 // locals that would never be dropped during normal execution are sometimes dropped during
405 // unwinding, which means backwards-incompatible live-drop errors.
406 if block
.is_cleanup
{
410 self.super_basic_block_data(bb
, block
);
413 fn visit_rvalue(&mut self, rvalue
: &Rvalue
<'tcx
>, location
: Location
) {
414 trace
!("visit_rvalue: rvalue={:?} location={:?}", rvalue
, location
);
416 // Special-case reborrows to be more like a copy of a reference.
418 Rvalue
::Ref(_
, kind
, place
) => {
419 if let Some(reborrowed_place_ref
) = place_as_reborrow(self.tcx
, self.body
, place
) {
420 let ctx
= match kind
{
421 BorrowKind
::Shared
=> {
422 PlaceContext
::NonMutatingUse(NonMutatingUseContext
::SharedBorrow
)
424 BorrowKind
::Shallow
=> {
425 PlaceContext
::NonMutatingUse(NonMutatingUseContext
::ShallowBorrow
)
427 BorrowKind
::Unique
=> {
428 PlaceContext
::NonMutatingUse(NonMutatingUseContext
::UniqueBorrow
)
430 BorrowKind
::Mut { .. }
=> {
431 PlaceContext
::MutatingUse(MutatingUseContext
::Borrow
)
434 self.visit_local(&reborrowed_place_ref
.local
, ctx
, location
);
435 self.visit_projection(reborrowed_place_ref
, ctx
, location
);
439 Rvalue
::AddressOf(mutbl
, place
) => {
440 if let Some(reborrowed_place_ref
) = place_as_reborrow(self.tcx
, self.body
, place
) {
441 let ctx
= match mutbl
{
443 PlaceContext
::NonMutatingUse(NonMutatingUseContext
::AddressOf
)
445 Mutability
::Mut
=> PlaceContext
::MutatingUse(MutatingUseContext
::AddressOf
),
447 self.visit_local(&reborrowed_place_ref
.local
, ctx
, location
);
448 self.visit_projection(reborrowed_place_ref
, ctx
, location
);
455 self.super_rvalue(rvalue
, location
);
458 Rvalue
::ThreadLocalRef(_
) => self.check_op(ops
::ThreadLocalAccess
),
462 | Rvalue
::Discriminant(..)
464 | Rvalue
::Aggregate(..) => {}
466 Rvalue
::Ref(_
, kind @ BorrowKind
::Mut { .. }
, ref place
)
467 | Rvalue
::Ref(_
, kind @ BorrowKind
::Unique
, ref place
) => {
468 let ty
= place
.ty(self.body
, self.tcx
).ty
;
469 let is_allowed
= match ty
.kind() {
470 // Inside a `static mut`, `&mut [...]` is allowed.
471 ty
::Array(..) | ty
::Slice(_
)
472 if self.const_kind() == hir
::ConstContext
::Static(hir
::Mutability
::Mut
) =>
477 // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
478 // that this is merely a ZST and it is already eligible for promotion.
479 // This may require an RFC?
481 ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
488 if let BorrowKind
::Mut { .. }
= kind
{
489 self.check_mut_borrow(place
.local
, hir
::BorrowKind
::Ref
)
491 self.check_op(ops
::CellBorrow
);
496 Rvalue
::AddressOf(Mutability
::Mut
, ref place
) => {
497 self.check_mut_borrow(place
.local
, hir
::BorrowKind
::Raw
)
500 Rvalue
::Ref(_
, BorrowKind
::Shared
| BorrowKind
::Shallow
, ref place
)
501 | Rvalue
::AddressOf(Mutability
::Not
, ref place
) => {
502 let borrowed_place_has_mut_interior
= qualifs
::in_place
::<HasMutInterior
, _
>(
504 &mut |local
| self.qualifs
.has_mut_interior(self.ccx
, local
, location
),
508 if borrowed_place_has_mut_interior
{
509 match self.const_kind() {
510 // In a const fn all borrows are transient or point to the places given via
511 // references in the arguments (so we already checked them with
512 // TransientCellBorrow/CellBorrow as appropriate).
513 // The borrow checker guarantees that no new non-transient borrows are created.
514 // NOTE: Once we have heap allocations during CTFE we need to figure out
515 // how to prevent `const fn` to create long-lived allocations that point
516 // to (interior) mutable memory.
517 hir
::ConstContext
::ConstFn
=> self.check_op(ops
::TransientCellBorrow
),
519 // Locals with StorageDead are definitely not part of the final constant value, and
520 // it is thus inherently safe to permit such locals to have their
521 // address taken as we can't end up with a reference to them in the
523 // Note: This is only sound if every local that has a `StorageDead` has a
524 // `StorageDead` in every control flow path leading to a `return` terminator.
525 if self.local_has_storage_dead(place
.local
) {
526 self.check_op(ops
::TransientCellBorrow
);
528 self.check_op(ops
::CellBorrow
);
536 CastKind
::Pointer(PointerCast
::MutToConstPointer
| PointerCast
::ArrayToPointer
),
543 PointerCast
::UnsafeFnPointer
544 | PointerCast
::ClosureFnPointer(_
)
545 | PointerCast
::ReifyFnPointer
,
550 // Nothing to do here. Function pointer casts are allowed now.
553 Rvalue
::Cast(CastKind
::Pointer(PointerCast
::Unsize
), _
, _
) => {
554 // Nothing to check here (`check_local_or_return_ty` ensures no trait objects occur
555 // in the type of any local, which also excludes casts).
558 Rvalue
::Cast(CastKind
::Misc
, ref operand
, cast_ty
) => {
559 let operand_ty
= operand
.ty(self.body
, self.tcx
);
560 let cast_in
= CastTy
::from_ty(operand_ty
).expect("bad input type for cast");
561 let cast_out
= CastTy
::from_ty(cast_ty
).expect("bad output type for cast");
563 if let (CastTy
::Ptr(_
) | CastTy
::FnPtr
, CastTy
::Int(_
)) = (cast_in
, cast_out
) {
564 self.check_op(ops
::RawPtrToIntCast
);
568 Rvalue
::NullaryOp(NullOp
::SizeOf
| NullOp
::AlignOf
, _
) => {}
569 Rvalue
::ShallowInitBox(_
, _
) => {}
571 Rvalue
::UnaryOp(_
, ref operand
) => {
572 let ty
= operand
.ty(self.body
, self.tcx
);
573 if is_int_bool_or_char(ty
) {
574 // Int, bool, and char operations are fine.
575 } else if ty
.is_floating_point() {
576 self.check_op(ops
::FloatingPointOp
);
578 span_bug
!(self.span
, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty
);
582 Rvalue
::BinaryOp(op
, box (ref lhs
, ref rhs
))
583 | Rvalue
::CheckedBinaryOp(op
, box (ref lhs
, ref rhs
)) => {
584 let lhs_ty
= lhs
.ty(self.body
, self.tcx
);
585 let rhs_ty
= rhs
.ty(self.body
, self.tcx
);
587 if is_int_bool_or_char(lhs_ty
) && is_int_bool_or_char(rhs_ty
) {
588 // Int, bool, and char operations are fine.
589 } else if lhs_ty
.is_fn_ptr() || lhs_ty
.is_unsafe_ptr() {
590 assert_eq
!(lhs_ty
, rhs_ty
);
598 || op
== BinOp
::Offset
601 self.check_op(ops
::RawPtrComparison
);
602 } else if lhs_ty
.is_floating_point() || rhs_ty
.is_floating_point() {
603 self.check_op(ops
::FloatingPointOp
);
607 "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
616 fn visit_operand(&mut self, op
: &Operand
<'tcx
>, location
: Location
) {
617 self.super_operand(op
, location
);
618 if let Operand
::Constant(c
) = op
{
619 if let Some(def_id
) = c
.check_static_ptr(self.tcx
) {
620 self.check_static(def_id
, self.span
);
624 fn visit_projection_elem(
627 proj_base
: &[PlaceElem
<'tcx
>],
628 elem
: PlaceElem
<'tcx
>,
629 context
: PlaceContext
,
633 "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
634 context={:?} location={:?}",
642 self.super_projection_elem(place_local
, proj_base
, elem
, context
, location
);
645 ProjectionElem
::Deref
=> {
646 let base_ty
= Place
::ty_from(place_local
, proj_base
, self.body
, self.tcx
).ty
;
647 if base_ty
.is_unsafe_ptr() {
648 if proj_base
.is_empty() {
649 let decl
= &self.body
.local_decls
[place_local
];
650 if let Some(box LocalInfo
::StaticRef { def_id, .. }
) = decl
.local_info
{
651 let span
= decl
.source_info
.span
;
652 self.check_static(def_id
, span
);
657 // `*const T` is stable, `*mut T` is not
658 if !base_ty
.is_mutable_ptr() {
662 self.check_op(ops
::RawMutPtrDeref
);
665 if context
.is_mutating_use() {
666 self.check_op(ops
::MutDeref
);
670 ProjectionElem
::ConstantIndex { .. }
671 | ProjectionElem
::Downcast(..)
672 | ProjectionElem
::Subslice { .. }
673 | ProjectionElem
::Field(..)
674 | ProjectionElem
::Index(_
) => {}
678 fn visit_source_info(&mut self, source_info
: &SourceInfo
) {
679 trace
!("visit_source_info: source_info={:?}", source_info
);
680 self.span
= source_info
.span
;
683 fn visit_statement(&mut self, statement
: &Statement
<'tcx
>, location
: Location
) {
684 trace
!("visit_statement: statement={:?} location={:?}", statement
, location
);
686 self.super_statement(statement
, location
);
688 match statement
.kind
{
689 StatementKind
::Assign(..)
690 | StatementKind
::SetDiscriminant { .. }
691 | StatementKind
::Deinit(..)
692 | StatementKind
::FakeRead(..)
693 | StatementKind
::StorageLive(_
)
694 | StatementKind
::StorageDead(_
)
695 | StatementKind
::Retag { .. }
696 | StatementKind
::AscribeUserType(..)
697 | StatementKind
::Coverage(..)
698 | StatementKind
::CopyNonOverlapping(..)
699 | StatementKind
::Nop
=> {}
703 #[instrument(level = "debug", skip(self))]
704 fn visit_terminator(&mut self, terminator
: &Terminator
<'tcx
>, location
: Location
) {
705 use rustc_target
::spec
::abi
::Abi
::RustIntrinsic
;
707 self.super_terminator(terminator
, location
);
709 match &terminator
.kind
{
710 TerminatorKind
::Call { func, args, fn_span, from_hir_call, .. }
=> {
711 let ConstCx { tcx, body, param_env, .. }
= *self.ccx
;
712 let caller
= self.def_id();
714 let fn_ty
= func
.ty(body
, tcx
);
716 let (mut callee
, mut substs
) = match *fn_ty
.kind() {
717 ty
::FnDef(def_id
, substs
) => (def_id
, substs
),
720 self.check_op(ops
::FnCallIndirect
);
724 span_bug
!(terminator
.source_info
.span
, "invalid callee of type {:?}", fn_ty
)
728 let mut nonconst_call_permission
= false;
730 // Attempting to call a trait method?
731 if let Some(trait_id
) = tcx
.trait_of_item(callee
) {
732 trace
!("attempting to call a trait method");
733 if !self.tcx
.features().const_trait_impl
{
734 self.check_op(ops
::FnCallNonConst
{
739 from_hir_call
: *from_hir_call
,
744 let trait_ref
= TraitRef
::from_method(tcx
, trait_id
, substs
);
745 let poly_trait_pred
= Binder
::dummy(TraitPredicate
{
747 constness
: ty
::BoundConstness
::ConstIfConst
,
748 polarity
: ty
::ImplPolarity
::Positive
,
751 Obligation
::new(ObligationCause
::dummy(), param_env
, poly_trait_pred
);
753 let implsrc
= tcx
.infer_ctxt().enter(|infcx
| {
754 let mut selcx
= SelectionContext
::new(&infcx
);
755 selcx
.select(&obligation
)
759 Ok(Some(ImplSource
::Param(_
, ty
::BoundConstness
::ConstIfConst
))) => {
761 "const_trait_impl: provided {:?} via where-clause in {:?}",
766 Ok(Some(ImplSource
::UserDefined(data
))) => {
767 let callee_name
= tcx
.item_name(callee
);
768 if let Some(&did
) = tcx
769 .associated_item_def_ids(data
.impl_def_id
)
771 .find(|did
| tcx
.item_name(**did
) == callee_name
)
773 // using internal substs is ok here, since this is only
774 // used for the `resolve` call below
775 substs
= InternalSubsts
::identity_for_item(tcx
, did
);
779 if let hir
::Constness
::NotConst
= tcx
.impl_constness(data
.impl_def_id
) {
780 self.check_op(ops
::FnCallNonConst
{
785 from_hir_call
: *from_hir_call
,
790 _
if !tcx
.is_const_fn_raw(callee
) => {
791 // At this point, it is only legal when the caller is marked with
792 // #[default_method_body_is_const], and the callee is in the same
794 let callee_trait
= tcx
.trait_of_item(callee
);
795 if callee_trait
.is_some()
796 && tcx
.has_attr(caller
.to_def_id(), sym
::default_method_body_is_const
)
797 && callee_trait
== tcx
.trait_of_item(caller
)
798 // Can only call methods when it's `<Self as TheTrait>::f`.
799 && tcx
.types
.self_param
== substs
.type_at(0)
801 nonconst_call_permission
= true;
804 if !nonconst_call_permission
{
805 let obligation
= Obligation
::new(
806 ObligationCause
::dummy_with_span(*fn_span
),
809 poly_trait_pred
.map_bound(ty
::PredicateKind
::Trait
),
813 // improve diagnostics by showing what failed. Our requirements are stricter this time
814 // as we are going to error again anyways.
815 tcx
.infer_ctxt().enter(|infcx
| {
816 if let Err(e
) = implsrc
{
817 infcx
.report_selection_error(
826 self.check_op(ops
::FnCallNonConst
{
831 from_hir_call
: *from_hir_call
,
839 // Resolve a trait method call to its concrete implementation, which may be in a
840 // `const` trait impl.
841 let instance
= Instance
::resolve(tcx
, param_env
, callee
, substs
);
842 debug
!("Resolving ({:?}) -> {:?}", callee
, instance
);
843 if let Ok(Some(func
)) = instance
{
844 if let InstanceDef
::Item(def
) = func
.def
{
850 // At this point, we are calling a function, `callee`, whose `DefId` is known...
852 // `begin_panic` and `panic_display` are generic functions that accept
853 // types other than str. Check to enforce that only str can be used in
856 // const-eval of the `begin_panic` fn assumes the argument is `&str`
857 if Some(callee
) == tcx
.lang_items().begin_panic_fn() {
858 match args
[0].ty(&self.ccx
.body
.local_decls
, tcx
).kind() {
859 ty
::Ref(_
, ty
, _
) if ty
.is_str() => return,
860 _
=> self.check_op(ops
::PanicNonStr
),
864 // const-eval of the `panic_display` fn assumes the argument is `&&str`
865 if Some(callee
) == tcx
.lang_items().panic_display() {
866 match args
[0].ty(&self.ccx
.body
.local_decls
, tcx
).kind() {
867 ty
::Ref(_
, ty
, _
) if matches
!(ty
.kind(), ty
::Ref(_
, ty
, _
) if ty
.is_str()) =>
871 _
=> self.check_op(ops
::PanicNonStr
),
875 if Some(callee
) == tcx
.lang_items().exchange_malloc_fn() {
876 self.check_op(ops
::HeapAllocation
);
880 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
881 let is_async_block
= Some(callee
) == tcx
.lang_items().from_generator_fn();
883 let kind
= hir
::GeneratorKind
::Async(hir
::AsyncGeneratorKind
::Block
);
884 self.check_op(ops
::Generator(kind
));
888 let is_intrinsic
= tcx
.fn_sig(callee
).abi() == RustIntrinsic
;
890 if !tcx
.is_const_fn_raw(callee
) {
891 if tcx
.trait_of_item(callee
).is_some() {
892 if tcx
.has_attr(callee
, sym
::default_method_body_is_const
) {
893 // To get to here we must have already found a const impl for the
894 // trait, but for it to still be non-const can be that the impl is
895 // using default method bodies.
896 nonconst_call_permission
= true;
900 if !nonconst_call_permission
{
901 self.check_op(ops
::FnCallNonConst
{
906 from_hir_call
: *from_hir_call
,
912 // If the `const fn` we are trying to call is not const-stable, ensure that we have
913 // the proper feature gate enabled.
914 if let Some(gate
) = is_unstable_const_fn(tcx
, callee
) {
915 trace
!(?gate
, "calling unstable const fn");
916 if self.span
.allows_unstable(gate
) {
920 // Calling an unstable function *always* requires that the corresponding gate
921 // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`.
922 if !tcx
.features().declared_lib_features
.iter().any(|&(sym
, _
)| sym
== gate
) {
923 self.check_op(ops
::FnCallUnstable(callee
, Some(gate
)));
927 // If this crate is not using stability attributes, or the caller is not claiming to be a
928 // stable `const fn`, that is all that is required.
929 if !self.ccx
.is_const_stable_const_fn() {
930 trace
!("crate not using stability attributes or caller not stably const");
934 // Otherwise, we are something const-stable calling a const-unstable fn.
936 if super::rustc_allow_const_fn_unstable(tcx
, caller
, gate
) {
937 trace
!("rustc_allow_const_fn_unstable gate active");
941 self.check_op(ops
::FnCallUnstable(callee
, Some(gate
)));
945 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
946 // have no `rustc_const_stable` attributes to be const-unstable as well. This
947 // should be fixed later.
948 let callee_is_unstable_unmarked
= tcx
.lookup_const_stability(callee
).is_none()
949 && tcx
.lookup_stability(callee
).map_or(false, |s
| s
.level
.is_unstable());
950 if callee_is_unstable_unmarked
{
951 trace
!("callee_is_unstable_unmarked");
952 // We do not use `const` modifiers for intrinsic "functions", as intrinsics are
953 // `extern` functions, and these have no way to get marked `const`. So instead we
954 // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
955 if self.ccx
.is_const_stable_const_fn() || is_intrinsic
{
956 self.check_op(ops
::FnCallUnstable(callee
, None
));
960 trace
!("permitting call");
963 // Forbid all `Drop` terminators unless the place being dropped is a local with no
964 // projections that cannot be `NeedsNonConstDrop`.
965 TerminatorKind
::Drop { place: dropped_place, .. }
966 | TerminatorKind
::DropAndReplace { place: dropped_place, .. }
=> {
967 // If we are checking live drops after drop-elaboration, don't emit duplicate
969 if super::post_drop_elaboration
::checking_enabled(self.ccx
) {
973 let mut err_span
= self.span
;
974 let ty_of_dropped_place
= dropped_place
.ty(self.body
, self.tcx
).ty
;
976 let ty_needs_non_const_drop
=
977 qualifs
::NeedsNonConstDrop
::in_any_value_of_ty(self.ccx
, ty_of_dropped_place
);
979 debug
!(?ty_of_dropped_place
, ?ty_needs_non_const_drop
);
981 if !ty_needs_non_const_drop
{
985 let needs_non_const_drop
= if let Some(local
) = dropped_place
.as_local() {
986 // Use the span where the local was declared as the span of the drop error.
987 err_span
= self.body
.local_decls
[local
].source_info
.span
;
988 self.qualifs
.needs_non_const_drop(self.ccx
, local
, location
)
993 if needs_non_const_drop
{
994 self.check_op_spanned(
995 ops
::LiveDrop { dropped_at: Some(terminator.source_info.span) }
,
1001 TerminatorKind
::InlineAsm { .. }
=> self.check_op(ops
::InlineAsm
),
1003 TerminatorKind
::GeneratorDrop
| TerminatorKind
::Yield { .. }
=> {
1004 self.check_op(ops
::Generator(hir
::GeneratorKind
::Gen
))
1007 TerminatorKind
::Abort
=> {
1008 // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
1009 span_bug
!(self.span
, "`Abort` terminator outside of cleanup block")
1012 TerminatorKind
::Assert { .. }
1013 | TerminatorKind
::FalseEdge { .. }
1014 | TerminatorKind
::FalseUnwind { .. }
1015 | TerminatorKind
::Goto { .. }
1016 | TerminatorKind
::Resume
1017 | TerminatorKind
::Return
1018 | TerminatorKind
::SwitchInt { .. }
1019 | TerminatorKind
::Unreachable
=> {}
1024 fn place_as_reborrow
<'tcx
>(
1028 ) -> Option
<PlaceRef
<'tcx
>> {
1029 match place
.as_ref().last_projection() {
1030 Some((place_base
, ProjectionElem
::Deref
)) => {
1031 // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
1032 // that points to the allocation for the static. Don't treat these as reborrows.
1033 if body
.local_decls
[place_base
.local
].is_ref_to_static() {
1036 // Ensure the type being derefed is a reference and not a raw pointer.
1037 // This is sufficient to prevent an access to a `static mut` from being marked as a
1038 // reborrow, even if the check above were to disappear.
1039 let inner_ty
= place_base
.ty(body
, tcx
).ty
;
1041 if let ty
::Ref(..) = inner_ty
.kind() {
1042 return Some(place_base
);
1052 fn is_int_bool_or_char(ty
: Ty
<'_
>) -> bool
{
1053 ty
.is_bool() || ty
.is_integral() || ty
.is_char()
1056 fn emit_unstable_in_stable_error(ccx
: &ConstCx
<'_
, '_
>, span
: Span
, gate
: Symbol
) {
1057 let attr_span
= ccx
.tcx
.def_span(ccx
.def_id()).shrink_to_lo();
1063 &format
!("const-stable function cannot use `#[feature({})]`", gate
.as_str()),
1067 "if it is not part of the public API, make this function unstably const",
1068 concat
!(r
#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
1069 Applicability
::HasPlaceholders
,
1073 "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks",
1074 format
!("#[rustc_allow_const_fn_unstable({})]\n", gate
),
1075 Applicability
::MaybeIncorrect
,