]>
Commit | Line | Data |
---|---|---|
e74abb32 XL |
1 | //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations. |
2 | ||
fc512014 | 3 | use rustc_errors::{struct_span_err, Applicability, Diagnostic, ErrorReported}; |
1b1a35ee XL |
4 | use rustc_hir::def_id::DefId; |
5 | use rustc_hir::{self as hir, HirId, LangItem}; | |
5869c6ff | 6 | use rustc_index::bit_set::BitSet; |
74b04a01 | 7 | use rustc_infer::infer::TyCtxtInferExt; |
fc512014 | 8 | use rustc_infer::traits::{ImplSource, Obligation, ObligationCause}; |
ba9703b0 XL |
9 | use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; |
10 | use rustc_middle::mir::*; | |
11 | use rustc_middle::ty::cast::CastTy; | |
1b1a35ee XL |
12 | use rustc_middle::ty::subst::GenericArgKind; |
13 | use rustc_middle::ty::{ | |
14 | self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt, TypeAndMut, | |
15 | }; | |
fc512014 | 16 | use rustc_middle::ty::{Binder, TraitPredicate, TraitRef}; |
1b1a35ee | 17 | use rustc_span::{sym, Span, Symbol}; |
ba9703b0 | 18 | use rustc_trait_selection::traits::error_reporting::InferCtxtExt; |
fc512014 | 19 | use rustc_trait_selection::traits::{self, SelectionContext, TraitEngine}; |
e74abb32 | 20 | |
1b1a35ee | 21 | use std::mem; |
e74abb32 XL |
22 | use std::ops::Deref; |
23 | ||
1b1a35ee | 24 | use super::ops::{self, NonConstOp, Status}; |
f9f354fc | 25 | use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop}; |
e74abb32 | 26 | use super::resolver::FlowSensitiveAnalysis; |
f9f354fc | 27 | use super::{is_lang_panic_fn, ConstCx, Qualif}; |
1b1a35ee | 28 | use crate::const_eval::is_unstable_const_fn; |
f9f354fc | 29 | use crate::dataflow::impls::MaybeMutBorrowedLocals; |
ba9703b0 | 30 | use crate::dataflow::{self, Analysis}; |
e74abb32 | 31 | |
74b04a01 XL |
32 | // We are using `MaybeMutBorrowedLocals` as a proxy for whether an item may have been mutated |
33 | // through a pointer prior to the given point. This is okay even though `MaybeMutBorrowedLocals` | |
34 | // kills locals upon `StorageDead` because a local will never be used after a `StorageDead`. | |
f9f354fc | 35 | type IndirectlyMutableResults<'mir, 'tcx> = |
74b04a01 | 36 | dataflow::ResultsCursor<'mir, 'tcx, MaybeMutBorrowedLocals<'mir, 'tcx>>; |
e74abb32 | 37 | |
f9f354fc XL |
38 | type QualifResults<'mir, 'tcx, Q> = |
39 | dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>; | |
e74abb32 | 40 | |
f9f354fc XL |
41 | #[derive(Default)] |
42 | pub struct Qualifs<'mir, 'tcx> { | |
43 | has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>, | |
44 | needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>, | |
45 | indirectly_mutable: Option<IndirectlyMutableResults<'mir, 'tcx>>, | |
e74abb32 XL |
46 | } |
47 | ||
f9f354fc | 48 | impl Qualifs<'mir, 'tcx> { |
f035d41b | 49 | pub fn indirectly_mutable( |
f9f354fc XL |
50 | &mut self, |
51 | ccx: &'mir ConstCx<'mir, 'tcx>, | |
52 | local: Local, | |
53 | location: Location, | |
54 | ) -> bool { | |
55 | let indirectly_mutable = self.indirectly_mutable.get_or_insert_with(|| { | |
29967ef6 | 56 | let ConstCx { tcx, body, param_env, .. } = *ccx; |
f9f354fc XL |
57 | |
58 | // We can use `unsound_ignore_borrow_on_drop` here because custom drop impls are not | |
59 | // allowed in a const. | |
60 | // | |
61 | // FIXME(ecstaticmorse): Someday we want to allow custom drop impls. How do we do this | |
62 | // without breaking stable code? | |
63 | MaybeMutBorrowedLocals::mut_borrows_only(tcx, &body, param_env) | |
64 | .unsound_ignore_borrow_on_drop() | |
29967ef6 | 65 | .into_engine(tcx, &body) |
1b1a35ee | 66 | .pass_name("const_qualification") |
f9f354fc XL |
67 | .iterate_to_fixpoint() |
68 | .into_results_cursor(&body) | |
69 | }); | |
70 | ||
71 | indirectly_mutable.seek_before_primary_effect(location); | |
72 | indirectly_mutable.get().contains(local) | |
e74abb32 XL |
73 | } |
74 | ||
75 | /// Returns `true` if `local` is `NeedsDrop` at the given `Location`. | |
76 | /// | |
77 | /// Only updates the cursor if absolutely necessary | |
f035d41b | 78 | pub fn needs_drop( |
f9f354fc XL |
79 | &mut self, |
80 | ccx: &'mir ConstCx<'mir, 'tcx>, | |
81 | local: Local, | |
82 | location: Location, | |
83 | ) -> bool { | |
84 | let ty = ccx.body.local_decls[local].ty; | |
85 | if !NeedsDrop::in_any_value_of_ty(ccx, ty) { | |
e74abb32 XL |
86 | return false; |
87 | } | |
88 | ||
f9f354fc | 89 | let needs_drop = self.needs_drop.get_or_insert_with(|| { |
29967ef6 | 90 | let ConstCx { tcx, body, .. } = *ccx; |
f9f354fc XL |
91 | |
92 | FlowSensitiveAnalysis::new(NeedsDrop, ccx) | |
29967ef6 | 93 | .into_engine(tcx, &body) |
f9f354fc XL |
94 | .iterate_to_fixpoint() |
95 | .into_results_cursor(&body) | |
96 | }); | |
97 | ||
98 | needs_drop.seek_before_primary_effect(location); | |
99 | needs_drop.get().contains(local) || self.indirectly_mutable(ccx, local, location) | |
e74abb32 XL |
100 | } |
101 | ||
60c5eb7d XL |
102 | /// Returns `true` if `local` is `HasMutInterior` at the given `Location`. |
103 | /// | |
104 | /// Only updates the cursor if absolutely necessary. | |
f035d41b | 105 | pub fn has_mut_interior( |
f9f354fc XL |
106 | &mut self, |
107 | ccx: &'mir ConstCx<'mir, 'tcx>, | |
108 | local: Local, | |
109 | location: Location, | |
110 | ) -> bool { | |
111 | let ty = ccx.body.local_decls[local].ty; | |
112 | if !HasMutInterior::in_any_value_of_ty(ccx, ty) { | |
60c5eb7d XL |
113 | return false; |
114 | } | |
115 | ||
f9f354fc | 116 | let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| { |
29967ef6 | 117 | let ConstCx { tcx, body, .. } = *ccx; |
f9f354fc XL |
118 | |
119 | FlowSensitiveAnalysis::new(HasMutInterior, ccx) | |
29967ef6 | 120 | .into_engine(tcx, &body) |
f9f354fc XL |
121 | .iterate_to_fixpoint() |
122 | .into_results_cursor(&body) | |
123 | }); | |
124 | ||
125 | has_mut_interior.seek_before_primary_effect(location); | |
126 | has_mut_interior.get().contains(local) || self.indirectly_mutable(ccx, local, location) | |
60c5eb7d XL |
127 | } |
128 | ||
fc512014 XL |
129 | fn in_return_place( |
130 | &mut self, | |
131 | ccx: &'mir ConstCx<'mir, 'tcx>, | |
132 | error_occured: Option<ErrorReported>, | |
133 | ) -> ConstQualifs { | |
60c5eb7d XL |
134 | // Find the `Return` terminator if one exists. |
135 | // | |
136 | // If no `Return` terminator exists, this MIR is divergent. Just return the conservative | |
137 | // qualifs for the return type. | |
f9f354fc | 138 | let return_block = ccx |
dfeec247 | 139 | .body |
60c5eb7d XL |
140 | .basic_blocks() |
141 | .iter_enumerated() | |
dfeec247 XL |
142 | .find(|(_, block)| match block.terminator().kind { |
143 | TerminatorKind::Return => true, | |
144 | _ => false, | |
60c5eb7d XL |
145 | }) |
146 | .map(|(bb, _)| bb); | |
147 | ||
148 | let return_block = match return_block { | |
fc512014 | 149 | None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), error_occured), |
60c5eb7d XL |
150 | Some(bb) => bb, |
151 | }; | |
152 | ||
f9f354fc XL |
153 | let return_loc = ccx.body.terminator_loc(return_block); |
154 | ||
155 | let custom_eq = match ccx.const_kind() { | |
156 | // We don't care whether a `const fn` returns a value that is not structurally | |
157 | // matchable. Functions calls are opaque and always use type-based qualification, so | |
158 | // this value should never be used. | |
159 | hir::ConstContext::ConstFn => true, | |
160 | ||
161 | // If we know that all values of the return type are structurally matchable, there's no | |
162 | // need to run dataflow. | |
163 | _ if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) => false, | |
164 | ||
165 | hir::ConstContext::Const | hir::ConstContext::Static(_) => { | |
166 | let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx) | |
29967ef6 | 167 | .into_engine(ccx.tcx, &ccx.body) |
f9f354fc XL |
168 | .iterate_to_fixpoint() |
169 | .into_results_cursor(&ccx.body); | |
170 | ||
171 | cursor.seek_after_primary_effect(return_loc); | |
172 | cursor.contains(RETURN_PLACE) | |
173 | } | |
174 | }; | |
60c5eb7d XL |
175 | |
176 | ConstQualifs { | |
f9f354fc XL |
177 | needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc), |
178 | has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc), | |
179 | custom_eq, | |
fc512014 | 180 | error_occured, |
60c5eb7d XL |
181 | } |
182 | } | |
e74abb32 XL |
183 | } |
184 | ||
f9f354fc XL |
185 | pub struct Validator<'mir, 'tcx> { |
186 | ccx: &'mir ConstCx<'mir, 'tcx>, | |
187 | qualifs: Qualifs<'mir, 'tcx>, | |
e74abb32 XL |
188 | |
189 | /// The span of the current statement. | |
190 | span: Span, | |
1b1a35ee | 191 | |
5869c6ff XL |
192 | /// A set that stores for each local whether it has a `StorageDead` for it somewhere. |
193 | local_has_storage_dead: Option<BitSet<Local>>, | |
194 | ||
fc512014 | 195 | error_emitted: Option<ErrorReported>, |
1b1a35ee | 196 | secondary_errors: Vec<Diagnostic>, |
e74abb32 XL |
197 | } |
198 | ||
f9f354fc XL |
199 | impl Deref for Validator<'mir, 'tcx> { |
200 | type Target = ConstCx<'mir, 'tcx>; | |
e74abb32 XL |
201 | |
202 | fn deref(&self) -> &Self::Target { | |
f9f354fc | 203 | &self.ccx |
e74abb32 XL |
204 | } |
205 | } | |
206 | ||
f9f354fc XL |
207 | impl Validator<'mir, 'tcx> { |
208 | pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self { | |
1b1a35ee XL |
209 | Validator { |
210 | span: ccx.body.span, | |
211 | ccx, | |
212 | qualifs: Default::default(), | |
5869c6ff | 213 | local_has_storage_dead: None, |
fc512014 | 214 | error_emitted: None, |
1b1a35ee XL |
215 | secondary_errors: Vec::new(), |
216 | } | |
e74abb32 XL |
217 | } |
218 | ||
60c5eb7d | 219 | pub fn check_body(&mut self) { |
29967ef6 XL |
220 | let ConstCx { tcx, body, .. } = *self.ccx; |
221 | let def_id = self.ccx.def_id(); | |
1b1a35ee XL |
222 | |
223 | // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's | |
224 | // no need to emit duplicate errors here. | |
6a06907d | 225 | if is_async_fn(self.ccx) || body.generator.is_some() { |
1b1a35ee XL |
226 | tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`"); |
227 | return; | |
228 | } | |
60c5eb7d | 229 | |
1b1a35ee XL |
230 | // The local type and predicate checks are not free and only relevant for `const fn`s. |
231 | if self.const_kind() == hir::ConstContext::ConstFn { | |
232 | // Prevent const trait methods from being annotated as `stable`. | |
233 | // FIXME: Do this as part of stability checking. | |
234 | if self.is_const_stable_const_fn() { | |
29967ef6 | 235 | let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); |
1b1a35ee XL |
236 | if crate::const_eval::is_parent_const_impl_raw(tcx, hir_id) { |
237 | struct_span_err!( | |
238 | self.ccx.tcx.sess, | |
239 | self.span, | |
240 | E0723, | |
241 | "trait methods cannot be stable const fn" | |
242 | ) | |
243 | .emit(); | |
244 | } | |
245 | } | |
60c5eb7d | 246 | |
1b1a35ee XL |
247 | self.check_item_predicates(); |
248 | ||
249 | for (idx, local) in body.local_decls.iter_enumerated() { | |
250 | // Handle the return place below. | |
251 | if idx == RETURN_PLACE || local.internal { | |
252 | continue; | |
253 | } | |
254 | ||
255 | self.span = local.source_info.span; | |
256 | self.check_local_or_return_ty(local.ty, idx); | |
60c5eb7d | 257 | } |
1b1a35ee XL |
258 | |
259 | // impl trait is gone in MIR, so check the return type of a const fn by its signature | |
260 | // instead of the type of the return place. | |
261 | self.span = body.local_decls[RETURN_PLACE].source_info.span; | |
262 | let return_ty = tcx.fn_sig(def_id).output(); | |
263 | self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE); | |
60c5eb7d XL |
264 | } |
265 | ||
ba9703b0 | 266 | self.visit_body(&body); |
60c5eb7d XL |
267 | |
268 | // Ensure that the end result is `Sync` in a non-thread local `static`. | |
1b1a35ee XL |
269 | let should_check_for_sync = self.const_kind() |
270 | == hir::ConstContext::Static(hir::Mutability::Not) | |
f035d41b | 271 | && !tcx.is_thread_local_static(def_id.to_def_id()); |
60c5eb7d XL |
272 | |
273 | if should_check_for_sync { | |
3dfed10e | 274 | let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); |
60c5eb7d XL |
275 | check_return_ty_is_sync(tcx, &body, hir_id); |
276 | } | |
1b1a35ee XL |
277 | |
278 | // If we got through const-checking without emitting any "primary" errors, emit any | |
279 | // "secondary" errors if they occurred. | |
280 | let secondary_errors = mem::take(&mut self.secondary_errors); | |
fc512014 | 281 | if self.error_emitted.is_none() { |
1b1a35ee XL |
282 | for error in secondary_errors { |
283 | self.tcx.sess.diagnostic().emit_diagnostic(&error); | |
284 | } | |
285 | } else { | |
286 | assert!(self.tcx.sess.has_errors()); | |
287 | } | |
60c5eb7d XL |
288 | } |
289 | ||
5869c6ff XL |
290 | fn local_has_storage_dead(&mut self, local: Local) -> bool { |
291 | let ccx = self.ccx; | |
292 | self.local_has_storage_dead | |
293 | .get_or_insert_with(|| { | |
294 | struct StorageDeads { | |
295 | locals: BitSet<Local>, | |
296 | } | |
297 | impl Visitor<'tcx> for StorageDeads { | |
298 | fn visit_statement(&mut self, stmt: &Statement<'tcx>, _: Location) { | |
299 | if let StatementKind::StorageDead(l) = stmt.kind { | |
300 | self.locals.insert(l); | |
301 | } | |
302 | } | |
303 | } | |
304 | let mut v = StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) }; | |
305 | v.visit_body(ccx.body); | |
306 | v.locals | |
307 | }) | |
308 | .contains(local) | |
309 | } | |
310 | ||
60c5eb7d | 311 | pub fn qualifs_in_return_place(&mut self) -> ConstQualifs { |
fc512014 | 312 | self.qualifs.in_return_place(self.ccx, self.error_emitted) |
e74abb32 XL |
313 | } |
314 | ||
e74abb32 | 315 | /// Emits an error if an expression cannot be evaluated in the current context. |
60c5eb7d | 316 | pub fn check_op(&mut self, op: impl NonConstOp) { |
1b1a35ee | 317 | self.check_op_spanned(op, self.span); |
f035d41b XL |
318 | } |
319 | ||
320 | /// Emits an error at the given `span` if an expression cannot be evaluated in the current | |
321 | /// context. | |
1b1a35ee XL |
322 | pub fn check_op_spanned<O: NonConstOp>(&mut self, op: O, span: Span) { |
323 | let gate = match op.status_in_item(self.ccx) { | |
324 | Status::Allowed => return, | |
325 | ||
326 | Status::Unstable(gate) if self.tcx.features().enabled(gate) => { | |
327 | let unstable_in_stable = self.ccx.is_const_stable_const_fn() | |
29967ef6 XL |
328 | && !super::rustc_allow_const_fn_unstable( |
329 | self.tcx, | |
330 | self.def_id().to_def_id(), | |
331 | gate, | |
332 | ); | |
1b1a35ee XL |
333 | if unstable_in_stable { |
334 | emit_unstable_in_stable_error(self.ccx, span, gate); | |
335 | } | |
336 | ||
337 | return; | |
338 | } | |
339 | ||
340 | Status::Unstable(gate) => Some(gate), | |
341 | Status::Forbidden => None, | |
342 | }; | |
343 | ||
344 | if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you { | |
345 | self.tcx.sess.miri_unleashed_feature(span, gate); | |
346 | return; | |
347 | } | |
348 | ||
349 | let mut err = op.build_error(self.ccx, span); | |
350 | assert!(err.is_error()); | |
351 | ||
352 | match op.importance() { | |
353 | ops::DiagnosticImportance::Primary => { | |
fc512014 | 354 | self.error_emitted = Some(ErrorReported); |
1b1a35ee XL |
355 | err.emit(); |
356 | } | |
357 | ||
358 | ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors), | |
359 | } | |
e74abb32 | 360 | } |
60c5eb7d XL |
361 | |
362 | fn check_static(&mut self, def_id: DefId, span: Span) { | |
f9f354fc XL |
363 | assert!( |
364 | !self.tcx.is_thread_local_static(def_id), | |
365 | "tls access is checked in `Rvalue::ThreadLocalRef" | |
366 | ); | |
367 | self.check_op_spanned(ops::StaticAccess, span) | |
60c5eb7d | 368 | } |
1b1a35ee XL |
369 | |
370 | fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) { | |
371 | let kind = self.body.local_kind(local); | |
372 | ||
373 | for ty in ty.walk() { | |
374 | let ty = match ty.unpack() { | |
375 | GenericArgKind::Type(ty) => ty, | |
376 | ||
377 | // No constraints on lifetimes or constants, except potentially | |
378 | // constants' types, but `walk` will get to them as well. | |
379 | GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue, | |
380 | }; | |
381 | ||
382 | match *ty.kind() { | |
383 | ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)), | |
384 | ty::Opaque(..) => self.check_op(ops::ty::ImplTrait), | |
385 | ty::FnPtr(..) => self.check_op(ops::ty::FnPtr(kind)), | |
386 | ||
387 | ty::Dynamic(preds, _) => { | |
388 | for pred in preds.iter() { | |
389 | match pred.skip_binder() { | |
390 | ty::ExistentialPredicate::AutoTrait(_) | |
391 | | ty::ExistentialPredicate::Projection(_) => { | |
392 | self.check_op(ops::ty::TraitBound(kind)) | |
393 | } | |
394 | ty::ExistentialPredicate::Trait(trait_ref) => { | |
395 | if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() { | |
396 | self.check_op(ops::ty::TraitBound(kind)) | |
397 | } | |
398 | } | |
399 | } | |
400 | } | |
401 | } | |
402 | _ => {} | |
403 | } | |
404 | } | |
405 | } | |
406 | ||
407 | fn check_item_predicates(&mut self) { | |
29967ef6 | 408 | let ConstCx { tcx, .. } = *self.ccx; |
1b1a35ee | 409 | |
29967ef6 | 410 | let mut current = self.def_id().to_def_id(); |
1b1a35ee XL |
411 | loop { |
412 | let predicates = tcx.predicates_of(current); | |
413 | for (predicate, _) in predicates.predicates { | |
5869c6ff XL |
414 | match predicate.kind().skip_binder() { |
415 | ty::PredicateKind::RegionOutlives(_) | |
416 | | ty::PredicateKind::TypeOutlives(_) | |
417 | | ty::PredicateKind::WellFormed(_) | |
418 | | ty::PredicateKind::Projection(_) | |
419 | | ty::PredicateKind::ConstEvaluatable(..) | |
420 | | ty::PredicateKind::ConstEquate(..) | |
421 | | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue, | |
422 | ty::PredicateKind::ObjectSafe(_) => { | |
1b1a35ee XL |
423 | bug!("object safe predicate on function: {:#?}", predicate) |
424 | } | |
5869c6ff | 425 | ty::PredicateKind::ClosureKind(..) => { |
1b1a35ee XL |
426 | bug!("closure kind predicate on function: {:#?}", predicate) |
427 | } | |
5869c6ff | 428 | ty::PredicateKind::Subtype(_) => { |
1b1a35ee XL |
429 | bug!("subtype predicate on function: {:#?}", predicate) |
430 | } | |
5869c6ff | 431 | ty::PredicateKind::Trait(pred, constness) => { |
1b1a35ee XL |
432 | if Some(pred.def_id()) == tcx.lang_items().sized_trait() { |
433 | continue; | |
434 | } | |
435 | match pred.self_ty().kind() { | |
436 | ty::Param(p) => { | |
437 | let generics = tcx.generics_of(current); | |
438 | let def = generics.type_param(p, tcx); | |
439 | let span = tcx.def_span(def.def_id); | |
440 | ||
441 | // These are part of the function signature, so treat them like | |
442 | // arguments when determining importance. | |
443 | let kind = LocalKind::Arg; | |
444 | ||
445 | if constness == hir::Constness::Const { | |
446 | self.check_op_spanned(ops::ty::TraitBound(kind), span); | |
447 | } else if !tcx.features().const_fn | |
448 | || self.ccx.is_const_stable_const_fn() | |
449 | { | |
450 | // HACK: We shouldn't need the conditional above, but trait | |
451 | // bounds on containing impl blocks are wrongly being marked as | |
452 | // "not-const". | |
453 | self.check_op_spanned(ops::ty::TraitBound(kind), span); | |
454 | } | |
455 | } | |
456 | // other kinds of bounds are either tautologies | |
457 | // or cause errors in other passes | |
458 | _ => continue, | |
459 | } | |
460 | } | |
461 | } | |
462 | } | |
463 | match predicates.parent { | |
464 | Some(parent) => current = parent, | |
465 | None => break, | |
466 | } | |
467 | } | |
468 | } | |
5869c6ff XL |
469 | |
470 | fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) { | |
471 | match self.const_kind() { | |
472 | // In a const fn all borrows are transient or point to the places given via | |
473 | // references in the arguments (so we already checked them with | |
474 | // TransientMutBorrow/MutBorrow as appropriate). | |
475 | // The borrow checker guarantees that no new non-transient borrows are created. | |
476 | // NOTE: Once we have heap allocations during CTFE we need to figure out | |
477 | // how to prevent `const fn` to create long-lived allocations that point | |
478 | // to mutable memory. | |
479 | hir::ConstContext::ConstFn => self.check_op(ops::TransientMutBorrow(kind)), | |
480 | _ => { | |
481 | // Locals with StorageDead do not live beyond the evaluation and can | |
482 | // thus safely be borrowed without being able to be leaked to the final | |
483 | // value of the constant. | |
484 | if self.local_has_storage_dead(local) { | |
485 | self.check_op(ops::TransientMutBorrow(kind)); | |
486 | } else { | |
487 | self.check_op(ops::MutBorrow(kind)); | |
488 | } | |
489 | } | |
490 | } | |
491 | } | |
e74abb32 XL |
492 | } |
493 | ||
f9f354fc | 494 | impl Visitor<'tcx> for Validator<'mir, 'tcx> { |
dfeec247 | 495 | fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) { |
60c5eb7d XL |
496 | trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup); |
497 | ||
29967ef6 XL |
498 | // We don't const-check basic blocks on the cleanup path since we never unwind during |
499 | // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks | |
500 | // are unreachable during const-eval. | |
60c5eb7d | 501 | // |
29967ef6 XL |
502 | // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because |
503 | // locals that would never be dropped during normal execution are sometimes dropped during | |
504 | // unwinding, which means backwards-incompatible live-drop errors. | |
60c5eb7d XL |
505 | if block.is_cleanup { |
506 | return; | |
507 | } | |
508 | ||
509 | self.super_basic_block_data(bb, block); | |
510 | } | |
511 | ||
e74abb32 XL |
512 | fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { |
513 | trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location); | |
514 | ||
60c5eb7d | 515 | // Special-case reborrows to be more like a copy of a reference. |
dfeec247 | 516 | match *rvalue { |
ba9703b0 | 517 | Rvalue::Ref(_, kind, place) => { |
6a06907d | 518 | if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) { |
dfeec247 XL |
519 | let ctx = match kind { |
520 | BorrowKind::Shared => { | |
521 | PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow) | |
522 | } | |
523 | BorrowKind::Shallow => { | |
524 | PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow) | |
525 | } | |
526 | BorrowKind::Unique => { | |
527 | PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow) | |
528 | } | |
529 | BorrowKind::Mut { .. } => { | |
530 | PlaceContext::MutatingUse(MutatingUseContext::Borrow) | |
531 | } | |
532 | }; | |
6a06907d XL |
533 | self.visit_local(&reborrowed_place_ref.local, ctx, location); |
534 | self.visit_projection(reborrowed_place_ref, ctx, location); | |
dfeec247 XL |
535 | return; |
536 | } | |
e74abb32 | 537 | } |
ba9703b0 | 538 | Rvalue::AddressOf(mutbl, place) => { |
6a06907d | 539 | if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) { |
dfeec247 XL |
540 | let ctx = match mutbl { |
541 | Mutability::Not => { | |
542 | PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf) | |
543 | } | |
544 | Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf), | |
545 | }; | |
6a06907d XL |
546 | self.visit_local(&reborrowed_place_ref.local, ctx, location); |
547 | self.visit_projection(reborrowed_place_ref, ctx, location); | |
dfeec247 XL |
548 | return; |
549 | } | |
550 | } | |
551 | _ => {} | |
e74abb32 XL |
552 | } |
553 | ||
60c5eb7d XL |
554 | self.super_rvalue(rvalue, location); |
555 | ||
e74abb32 | 556 | match *rvalue { |
f9f354fc XL |
557 | Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess), |
558 | ||
dfeec247 XL |
559 | Rvalue::Use(_) |
560 | | Rvalue::Repeat(..) | |
dfeec247 XL |
561 | | Rvalue::Discriminant(..) |
562 | | Rvalue::Len(_) | |
563 | | Rvalue::Aggregate(..) => {} | |
564 | ||
565 | Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place) | |
566 | | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => { | |
f9f354fc | 567 | let ty = place.ty(self.body, self.tcx).ty; |
1b1a35ee | 568 | let is_allowed = match ty.kind() { |
60c5eb7d | 569 | // Inside a `static mut`, `&mut [...]` is allowed. |
f9f354fc XL |
570 | ty::Array(..) | ty::Slice(_) |
571 | if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) => | |
572 | { | |
dfeec247 XL |
573 | true |
574 | } | |
60c5eb7d XL |
575 | |
576 | // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given | |
577 | // that this is merely a ZST and it is already eligible for promotion. | |
578 | // This may require an RFC? | |
579 | /* | |
580 | ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0) | |
581 | => true, | |
582 | */ | |
60c5eb7d XL |
583 | _ => false, |
584 | }; | |
585 | ||
586 | if !is_allowed { | |
dfeec247 | 587 | if let BorrowKind::Mut { .. } = kind { |
5869c6ff | 588 | self.check_mut_borrow(place.local, hir::BorrowKind::Ref) |
60c5eb7d XL |
589 | } else { |
590 | self.check_op(ops::CellBorrow); | |
591 | } | |
592 | } | |
593 | } | |
594 | ||
5869c6ff XL |
595 | Rvalue::AddressOf(Mutability::Mut, ref place) => { |
596 | self.check_mut_borrow(place.local, hir::BorrowKind::Raw) | |
29967ef6 | 597 | } |
dfeec247 | 598 | |
ba9703b0 | 599 | Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place) |
74b04a01 | 600 | | Rvalue::AddressOf(Mutability::Not, ref place) => { |
ba9703b0 | 601 | let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>( |
f9f354fc XL |
602 | &self.ccx, |
603 | &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location), | |
74b04a01 XL |
604 | place.as_ref(), |
605 | ); | |
606 | ||
607 | if borrowed_place_has_mut_interior { | |
5869c6ff XL |
608 | match self.const_kind() { |
609 | // In a const fn all borrows are transient or point to the places given via | |
610 | // references in the arguments (so we already checked them with | |
611 | // TransientCellBorrow/CellBorrow as appropriate). | |
612 | // The borrow checker guarantees that no new non-transient borrows are created. | |
613 | // NOTE: Once we have heap allocations during CTFE we need to figure out | |
614 | // how to prevent `const fn` to create long-lived allocations that point | |
615 | // to (interior) mutable memory. | |
616 | hir::ConstContext::ConstFn => self.check_op(ops::TransientCellBorrow), | |
617 | _ => { | |
618 | // Locals with StorageDead are definitely not part of the final constant value, and | |
619 | // it is thus inherently safe to permit such locals to have their | |
620 | // address taken as we can't end up with a reference to them in the | |
621 | // final value. | |
622 | // Note: This is only sound if every local that has a `StorageDead` has a | |
623 | // `StorageDead` in every control flow path leading to a `return` terminator. | |
624 | if self.local_has_storage_dead(place.local) { | |
625 | self.check_op(ops::TransientCellBorrow); | |
626 | } else { | |
627 | self.check_op(ops::CellBorrow); | |
628 | } | |
629 | } | |
630 | } | |
74b04a01 | 631 | } |
60c5eb7d XL |
632 | } |
633 | ||
1b1a35ee XL |
634 | Rvalue::Cast( |
635 | CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer), | |
636 | _, | |
637 | _, | |
638 | ) => {} | |
639 | ||
640 | Rvalue::Cast( | |
641 | CastKind::Pointer( | |
642 | PointerCast::UnsafeFnPointer | |
643 | | PointerCast::ClosureFnPointer(_) | |
644 | | PointerCast::ReifyFnPointer, | |
645 | ), | |
646 | _, | |
647 | _, | |
648 | ) => self.check_op(ops::FnPtrCast), | |
649 | ||
650 | Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, cast_ty) => { | |
651 | if let Some(TypeAndMut { ty, .. }) = cast_ty.builtin_deref(true) { | |
652 | let unsized_ty = self.tcx.struct_tail_erasing_lifetimes(ty, self.param_env); | |
653 | ||
654 | // Casting/coercing things to slices is fine. | |
655 | if let ty::Slice(_) | ty::Str = unsized_ty.kind() { | |
656 | return; | |
657 | } | |
658 | } | |
659 | ||
660 | self.check_op(ops::UnsizingCast); | |
661 | } | |
662 | ||
e74abb32 | 663 | Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => { |
f9f354fc | 664 | let operand_ty = operand.ty(self.body, self.tcx); |
e74abb32 XL |
665 | let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast"); |
666 | let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast"); | |
667 | ||
ba9703b0 | 668 | if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) { |
e74abb32 XL |
669 | self.check_op(ops::RawPtrToIntCast); |
670 | } | |
671 | } | |
672 | ||
1b1a35ee XL |
673 | Rvalue::NullaryOp(NullOp::SizeOf, _) => {} |
674 | Rvalue::NullaryOp(NullOp::Box, _) => self.check_op(ops::HeapAllocation), | |
675 | ||
676 | Rvalue::UnaryOp(_, ref operand) => { | |
677 | let ty = operand.ty(self.body, self.tcx); | |
678 | if is_int_bool_or_char(ty) { | |
679 | // Int, bool, and char operations are fine. | |
680 | } else if ty.is_floating_point() { | |
681 | self.check_op(ops::FloatingPointOp); | |
682 | } else { | |
683 | span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty); | |
684 | } | |
685 | } | |
686 | ||
6a06907d XL |
687 | Rvalue::BinaryOp(op, box (ref lhs, ref rhs)) |
688 | | Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => { | |
1b1a35ee XL |
689 | let lhs_ty = lhs.ty(self.body, self.tcx); |
690 | let rhs_ty = rhs.ty(self.body, self.tcx); | |
691 | ||
692 | if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) { | |
693 | // Int, bool, and char operations are fine. | |
694 | } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() { | |
695 | assert_eq!(lhs_ty, rhs_ty); | |
dfeec247 XL |
696 | assert!( |
697 | op == BinOp::Eq | |
698 | || op == BinOp::Ne | |
699 | || op == BinOp::Le | |
700 | || op == BinOp::Lt | |
701 | || op == BinOp::Ge | |
702 | || op == BinOp::Gt | |
703 | || op == BinOp::Offset | |
704 | ); | |
e74abb32 XL |
705 | |
706 | self.check_op(ops::RawPtrComparison); | |
1b1a35ee XL |
707 | } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() { |
708 | self.check_op(ops::FloatingPointOp); | |
709 | } else { | |
710 | span_bug!( | |
711 | self.span, | |
712 | "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}", | |
713 | lhs_ty, | |
714 | rhs_ty | |
715 | ); | |
e74abb32 XL |
716 | } |
717 | } | |
e74abb32 XL |
718 | } |
719 | } | |
720 | ||
dfeec247 | 721 | fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) { |
60c5eb7d XL |
722 | self.super_operand(op, location); |
723 | if let Operand::Constant(c) = op { | |
724 | if let Some(def_id) = c.check_static_ptr(self.tcx) { | |
725 | self.check_static(def_id, self.span); | |
e74abb32 XL |
726 | } |
727 | } | |
e74abb32 | 728 | } |
e74abb32 XL |
729 | fn visit_projection_elem( |
730 | &mut self, | |
74b04a01 | 731 | place_local: Local, |
e74abb32 | 732 | proj_base: &[PlaceElem<'tcx>], |
f9f354fc | 733 | elem: PlaceElem<'tcx>, |
e74abb32 XL |
734 | context: PlaceContext, |
735 | location: Location, | |
736 | ) { | |
737 | trace!( | |
dfeec247 | 738 | "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \ |
e74abb32 | 739 | context={:?} location={:?}", |
dfeec247 | 740 | place_local, |
e74abb32 XL |
741 | proj_base, |
742 | elem, | |
743 | context, | |
744 | location, | |
745 | ); | |
746 | ||
dfeec247 | 747 | self.super_projection_elem(place_local, proj_base, elem, context, location); |
e74abb32 XL |
748 | |
749 | match elem { | |
750 | ProjectionElem::Deref => { | |
f9f354fc | 751 | let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty; |
1b1a35ee | 752 | if let ty::RawPtr(_) = base_ty.kind() { |
60c5eb7d | 753 | if proj_base.is_empty() { |
dfeec247 | 754 | if let (local, []) = (place_local, proj_base) { |
74b04a01 | 755 | let decl = &self.body.local_decls[local]; |
f9f354fc | 756 | if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info { |
60c5eb7d XL |
757 | let span = decl.source_info.span; |
758 | self.check_static(def_id, span); | |
759 | return; | |
760 | } | |
761 | } | |
762 | } | |
e74abb32 XL |
763 | self.check_op(ops::RawPtrDeref); |
764 | } | |
60c5eb7d XL |
765 | |
766 | if context.is_mutating_use() { | |
767 | self.check_op(ops::MutDeref); | |
768 | } | |
e74abb32 XL |
769 | } |
770 | ||
dfeec247 | 771 | ProjectionElem::ConstantIndex { .. } |
f9f354fc | 772 | | ProjectionElem::Downcast(..) |
dfeec247 XL |
773 | | ProjectionElem::Subslice { .. } |
774 | | ProjectionElem::Field(..) | |
775 | | ProjectionElem::Index(_) => { | |
f9f354fc | 776 | let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty; |
e74abb32 XL |
777 | match base_ty.ty_adt_def() { |
778 | Some(def) if def.is_union() => { | |
779 | self.check_op(ops::UnionAccess); | |
780 | } | |
781 | ||
782 | _ => {} | |
783 | } | |
784 | } | |
e74abb32 XL |
785 | } |
786 | } | |
787 | ||
e74abb32 XL |
788 | fn visit_source_info(&mut self, source_info: &SourceInfo) { |
789 | trace!("visit_source_info: source_info={:?}", source_info); | |
790 | self.span = source_info.span; | |
791 | } | |
792 | ||
793 | fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { | |
794 | trace!("visit_statement: statement={:?} location={:?}", statement, location); | |
795 | ||
fc512014 | 796 | self.super_statement(statement, location); |
f9f354fc | 797 | |
fc512014 | 798 | match statement.kind { |
f9f354fc | 799 | StatementKind::LlvmInlineAsm { .. } => { |
f9f354fc XL |
800 | self.check_op(ops::InlineAsm); |
801 | } | |
802 | ||
fc512014 XL |
803 | StatementKind::Assign(..) |
804 | | StatementKind::SetDiscriminant { .. } | |
805 | | StatementKind::FakeRead(..) | |
dfeec247 XL |
806 | | StatementKind::StorageLive(_) |
807 | | StatementKind::StorageDead(_) | |
dfeec247 XL |
808 | | StatementKind::Retag { .. } |
809 | | StatementKind::AscribeUserType(..) | |
3dfed10e | 810 | | StatementKind::Coverage(..) |
6a06907d | 811 | | StatementKind::CopyNonOverlapping(..) |
dfeec247 | 812 | | StatementKind::Nop => {} |
e74abb32 XL |
813 | } |
814 | } | |
815 | ||
6a06907d | 816 | #[instrument(level = "debug", skip(self))] |
ba9703b0 | 817 | fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { |
1b1a35ee XL |
818 | use rustc_target::spec::abi::Abi::RustIntrinsic; |
819 | ||
ba9703b0 | 820 | self.super_terminator(terminator, location); |
e74abb32 | 821 | |
ba9703b0 | 822 | match &terminator.kind { |
6a06907d | 823 | TerminatorKind::Call { func, args, .. } => { |
29967ef6 XL |
824 | let ConstCx { tcx, body, param_env, .. } = *self.ccx; |
825 | let caller = self.def_id().to_def_id(); | |
1b1a35ee XL |
826 | |
827 | let fn_ty = func.ty(body, tcx); | |
e74abb32 | 828 | |
1b1a35ee | 829 | let (mut callee, substs) = match *fn_ty.kind() { |
74b04a01 | 830 | ty::FnDef(def_id, substs) => (def_id, substs), |
e74abb32 XL |
831 | |
832 | ty::FnPtr(_) => { | |
833 | self.check_op(ops::FnCallIndirect); | |
834 | return; | |
835 | } | |
836 | _ => { | |
ba9703b0 | 837 | span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty) |
e74abb32 XL |
838 | } |
839 | }; | |
840 | ||
fc512014 XL |
841 | // Attempting to call a trait method? |
842 | if let Some(trait_id) = tcx.trait_of_item(callee) { | |
5869c6ff | 843 | trace!("attempting to call a trait method"); |
fc512014 | 844 | if !self.tcx.features().const_trait_impl { |
5869c6ff | 845 | self.check_op(ops::FnCallNonConst); |
fc512014 XL |
846 | return; |
847 | } | |
848 | ||
849 | let trait_ref = TraitRef::from_method(tcx, trait_id, substs); | |
850 | let obligation = Obligation::new( | |
851 | ObligationCause::dummy(), | |
852 | param_env, | |
853 | Binder::bind(TraitPredicate { | |
854 | trait_ref: TraitRef::from_method(tcx, trait_id, substs), | |
855 | }), | |
856 | ); | |
857 | ||
858 | let implsrc = tcx.infer_ctxt().enter(|infcx| { | |
859 | let mut selcx = SelectionContext::new(&infcx); | |
860 | selcx.select(&obligation).unwrap() | |
861 | }); | |
862 | ||
863 | // If the method is provided via a where-clause that does not use the `?const` | |
864 | // opt-out, the call is allowed. | |
865 | if let Some(ImplSource::Param(_, hir::Constness::Const)) = implsrc { | |
866 | debug!( | |
867 | "const_trait_impl: provided {:?} via where-clause in {:?}", | |
868 | trait_ref, param_env | |
869 | ); | |
870 | return; | |
871 | } | |
872 | ||
873 | // Resolve a trait method call to its concrete implementation, which may be in a | |
874 | // `const` trait impl. | |
1b1a35ee XL |
875 | let instance = Instance::resolve(tcx, param_env, callee, substs); |
876 | debug!("Resolving ({:?}) -> {:?}", callee, instance); | |
f9f354fc | 877 | if let Ok(Some(func)) = instance { |
3dfed10e | 878 | if let InstanceDef::Item(def) = func.def { |
1b1a35ee | 879 | callee = def.did; |
74b04a01 XL |
880 | } |
881 | } | |
882 | } | |
883 | ||
1b1a35ee | 884 | // At this point, we are calling a function, `callee`, whose `DefId` is known... |
1b1a35ee | 885 | if is_lang_panic_fn(tcx, callee) { |
e74abb32 | 886 | self.check_op(ops::Panic); |
6a06907d XL |
887 | |
888 | // const-eval of the `begin_panic` fn assumes the argument is `&str` | |
889 | if Some(callee) == tcx.lang_items().begin_panic_fn() { | |
890 | match args[0].ty(&self.ccx.body.local_decls, tcx).kind() { | |
891 | ty::Ref(_, ty, _) if ty.is_str() => (), | |
892 | _ => self.check_op(ops::PanicNonStr), | |
893 | } | |
894 | } | |
895 | ||
1b1a35ee XL |
896 | return; |
897 | } | |
898 | ||
899 | // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`. | |
900 | let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn(); | |
901 | if is_async_block { | |
902 | let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block); | |
903 | self.check_op(ops::Generator(kind)); | |
904 | return; | |
905 | } | |
906 | ||
5869c6ff XL |
907 | let is_intrinsic = tcx.fn_sig(callee).abi() == RustIntrinsic; |
908 | ||
1b1a35ee XL |
909 | // HACK: This is to "unstabilize" the `transmute` intrinsic |
910 | // within const fns. `transmute` is allowed in all other const contexts. | |
911 | // This won't really scale to more intrinsics or functions. Let's allow const | |
912 | // transmutes in const fn before we add more hacks to this. | |
5869c6ff | 913 | if is_intrinsic && tcx.item_name(callee) == sym::transmute { |
1b1a35ee XL |
914 | self.check_op(ops::Transmute); |
915 | return; | |
916 | } | |
917 | ||
918 | if !tcx.is_const_fn_raw(callee) { | |
5869c6ff | 919 | self.check_op(ops::FnCallNonConst); |
1b1a35ee XL |
920 | return; |
921 | } | |
922 | ||
923 | // If the `const fn` we are trying to call is not const-stable, ensure that we have | |
924 | // the proper feature gate enabled. | |
925 | if let Some(gate) = is_unstable_const_fn(tcx, callee) { | |
5869c6ff | 926 | trace!(?gate, "calling unstable const fn"); |
1b1a35ee XL |
927 | if self.span.allows_unstable(gate) { |
928 | return; | |
929 | } | |
930 | ||
931 | // Calling an unstable function *always* requires that the corresponding gate | |
29967ef6 | 932 | // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`. |
1b1a35ee XL |
933 | if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) { |
934 | self.check_op(ops::FnCallUnstable(callee, Some(gate))); | |
935 | return; | |
936 | } | |
937 | ||
938 | // If this crate is not using stability attributes, or the caller is not claiming to be a | |
939 | // stable `const fn`, that is all that is required. | |
940 | if !self.ccx.is_const_stable_const_fn() { | |
5869c6ff | 941 | trace!("crate not using stability attributes or caller not stably const"); |
1b1a35ee XL |
942 | return; |
943 | } | |
944 | ||
945 | // Otherwise, we are something const-stable calling a const-unstable fn. | |
946 | ||
29967ef6 | 947 | if super::rustc_allow_const_fn_unstable(tcx, caller, gate) { |
5869c6ff | 948 | trace!("rustc_allow_const_fn_unstable gate active"); |
1b1a35ee XL |
949 | return; |
950 | } | |
951 | ||
952 | self.check_op(ops::FnCallUnstable(callee, Some(gate))); | |
953 | return; | |
954 | } | |
955 | ||
956 | // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that | |
957 | // have no `rustc_const_stable` attributes to be const-unstable as well. This | |
958 | // should be fixed later. | |
959 | let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none() | |
960 | && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable()); | |
961 | if callee_is_unstable_unmarked { | |
5869c6ff XL |
962 | trace!("callee_is_unstable_unmarked"); |
963 | // We do not use `const` modifiers for intrinsic "functions", as intrinsics are | |
964 | // `extern` funtions, and these have no way to get marked `const`. So instead we | |
965 | // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const` | |
966 | if self.ccx.is_const_stable_const_fn() || is_intrinsic { | |
1b1a35ee | 967 | self.check_op(ops::FnCallUnstable(callee, None)); |
5869c6ff | 968 | return; |
e74abb32 | 969 | } |
e74abb32 | 970 | } |
5869c6ff | 971 | trace!("permitting call"); |
e74abb32 XL |
972 | } |
973 | ||
974 | // Forbid all `Drop` terminators unless the place being dropped is a local with no | |
975 | // projections that cannot be `NeedsDrop`. | |
f035d41b XL |
976 | TerminatorKind::Drop { place: dropped_place, .. } |
977 | | TerminatorKind::DropAndReplace { place: dropped_place, .. } => { | |
978 | // If we are checking live drops after drop-elaboration, don't emit duplicate | |
979 | // errors here. | |
1b1a35ee | 980 | if super::post_drop_elaboration::checking_enabled(self.ccx) { |
f035d41b XL |
981 | return; |
982 | } | |
983 | ||
e74abb32 XL |
984 | let mut err_span = self.span; |
985 | ||
986 | // Check to see if the type of this place can ever have a drop impl. If not, this | |
987 | // `Drop` terminator is frivolous. | |
dfeec247 | 988 | let ty_needs_drop = |
f9f354fc | 989 | dropped_place.ty(self.body, self.tcx).ty.needs_drop(self.tcx, self.param_env); |
e74abb32 XL |
990 | |
991 | if !ty_needs_drop { | |
992 | return; | |
993 | } | |
994 | ||
995 | let needs_drop = if let Some(local) = dropped_place.as_local() { | |
996 | // Use the span where the local was declared as the span of the drop error. | |
997 | err_span = self.body.local_decls[local].source_info.span; | |
f9f354fc | 998 | self.qualifs.needs_drop(self.ccx, local, location) |
e74abb32 XL |
999 | } else { |
1000 | true | |
1001 | }; | |
1002 | ||
1003 | if needs_drop { | |
f035d41b | 1004 | self.check_op_spanned( |
1b1a35ee | 1005 | ops::LiveDrop { dropped_at: Some(terminator.source_info.span) }, |
f035d41b XL |
1006 | err_span, |
1007 | ); | |
e74abb32 XL |
1008 | } |
1009 | } | |
1010 | ||
1b1a35ee | 1011 | TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm), |
1b1a35ee XL |
1012 | |
1013 | TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => { | |
1014 | self.check_op(ops::Generator(hir::GeneratorKind::Gen)) | |
f9f354fc XL |
1015 | } |
1016 | ||
29967ef6 XL |
1017 | TerminatorKind::Abort => { |
1018 | // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`). | |
1019 | span_bug!(self.span, "`Abort` terminator outside of cleanup block") | |
1020 | } | |
1021 | ||
1b1a35ee | 1022 | TerminatorKind::Assert { .. } |
f035d41b | 1023 | | TerminatorKind::FalseEdge { .. } |
f9f354fc | 1024 | | TerminatorKind::FalseUnwind { .. } |
f9f354fc XL |
1025 | | TerminatorKind::Goto { .. } |
1026 | | TerminatorKind::Resume | |
1027 | | TerminatorKind::Return | |
1028 | | TerminatorKind::SwitchInt { .. } | |
1b1a35ee | 1029 | | TerminatorKind::Unreachable => {} |
e74abb32 XL |
1030 | } |
1031 | } | |
1032 | } | |
60c5eb7d | 1033 | |
60c5eb7d XL |
1034 | fn check_return_ty_is_sync(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, hir_id: HirId) { |
1035 | let ty = body.return_ty(); | |
1036 | tcx.infer_ctxt().enter(|infcx| { | |
1037 | let cause = traits::ObligationCause::new(body.span, hir_id, traits::SharedStatic); | |
1038 | let mut fulfillment_cx = traits::FulfillmentContext::new(); | |
3dfed10e | 1039 | let sync_def_id = tcx.require_lang_item(LangItem::Sync, Some(body.span)); |
60c5eb7d XL |
1040 | fulfillment_cx.register_bound(&infcx, ty::ParamEnv::empty(), ty, sync_def_id, cause); |
1041 | if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) { | |
1042 | infcx.report_fulfillment_errors(&err, None, false); | |
1043 | } | |
1044 | }); | |
1045 | } | |
1046 | ||
1047 | fn place_as_reborrow( | |
1048 | tcx: TyCtxt<'tcx>, | |
1049 | body: &Body<'tcx>, | |
ba9703b0 | 1050 | place: Place<'tcx>, |
6a06907d | 1051 | ) -> Option<PlaceRef<'tcx>> { |
5869c6ff XL |
1052 | match place.as_ref().last_projection() { |
1053 | Some((place_base, ProjectionElem::Deref)) => { | |
1054 | // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const` | |
1055 | // that points to the allocation for the static. Don't treat these as reborrows. | |
1056 | if body.local_decls[place_base.local].is_ref_to_static() { | |
1057 | None | |
1058 | } else { | |
1059 | // Ensure the type being derefed is a reference and not a raw pointer. | |
5869c6ff XL |
1060 | // This is sufficient to prevent an access to a `static mut` from being marked as a |
1061 | // reborrow, even if the check above were to disappear. | |
1062 | let inner_ty = place_base.ty(body, tcx).ty; | |
6a06907d XL |
1063 | |
1064 | if let ty::Ref(..) = inner_ty.kind() { | |
1065 | return Some(place_base); | |
1066 | } else { | |
1067 | return None; | |
5869c6ff XL |
1068 | } |
1069 | } | |
dfeec247 | 1070 | } |
5869c6ff XL |
1071 | _ => None, |
1072 | } | |
60c5eb7d | 1073 | } |
1b1a35ee XL |
1074 | |
1075 | fn is_int_bool_or_char(ty: Ty<'_>) -> bool { | |
1076 | ty.is_bool() || ty.is_integral() || ty.is_char() | |
1077 | } | |
1078 | ||
1079 | fn is_async_fn(ccx: &ConstCx<'_, '_>) -> bool { | |
1080 | ccx.fn_sig().map_or(false, |sig| sig.header.asyncness == hir::IsAsync::Async) | |
1081 | } | |
1082 | ||
1083 | fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) { | |
1084 | let attr_span = ccx.fn_sig().map_or(ccx.body.span, |sig| sig.span.shrink_to_lo()); | |
1085 | ||
1086 | ccx.tcx | |
1087 | .sess | |
1088 | .struct_span_err( | |
1089 | span, | |
1090 | &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()), | |
1091 | ) | |
1092 | .span_suggestion( | |
1093 | attr_span, | |
1094 | "if it is not part of the public API, make this function unstably const", | |
1095 | concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(), | |
1096 | Applicability::HasPlaceholders, | |
1097 | ) | |
1098 | .span_suggestion( | |
1099 | attr_span, | |
29967ef6 XL |
1100 | "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks", |
1101 | format!("#[rustc_allow_const_fn_unstable({})]\n", gate), | |
1b1a35ee XL |
1102 | Applicability::MaybeIncorrect, |
1103 | ) | |
1104 | .emit(); | |
1105 | } |