1 //! Type-checking for the rust-intrinsic and platform-intrinsic
2 //! intrinsics that the compiler exposes.
4 use rustc
::traits
::{ObligationCause, ObligationCauseCode}
;
5 use rustc
::ty
::{self, TyCtxt, Ty}
;
6 use rustc
::ty
::subst
::Subst
;
7 use crate::require_same_types
;
9 use rustc_target
::spec
::abi
::Abi
;
10 use syntax
::symbol
::Symbol
;
14 use rustc_error_codes
::*;
18 fn equate_intrinsic_type
<'tcx
>(
20 it
: &hir
::ForeignItem
,
23 safety
: hir
::Unsafety
,
24 inputs
: Vec
<Ty
<'tcx
>>,
27 let def_id
= tcx
.hir().local_def_id(it
.hir_id
);
30 hir
::ForeignItemKind
::Fn(..) => {}
32 struct_span_err
!(tcx
.sess
, it
.span
, E0622
,
33 "intrinsic must be a function")
34 .span_label(it
.span
, "expected a function")
40 let i_n_tps
= tcx
.generics_of(def_id
).own_counts().types
;
42 let span
= match it
.kind
{
43 hir
::ForeignItemKind
::Fn(_
, _
, ref generics
) => generics
.span
,
47 struct_span_err
!(tcx
.sess
, span
, E0094
,
48 "intrinsic has wrong number of type \
49 parameters: found {}, expected {}",
51 .span_label(span
, format
!("expected {} type parameter", n_tps
))
56 let fty
= tcx
.mk_fn_ptr(ty
::Binder
::bind(tcx
.mk_fn_sig(
63 let cause
= ObligationCause
::new(it
.span
, it
.hir_id
, ObligationCauseCode
::IntrinsicType
);
64 require_same_types(tcx
, &cause
, tcx
.mk_fn_ptr(tcx
.fn_sig(def_id
)), fty
);
67 /// Returns `true` if the given intrinsic is unsafe to call or not.
68 pub fn intrinsic_operation_unsafety(intrinsic
: &str) -> hir
::Unsafety
{
70 "size_of" | "min_align_of" | "needs_drop" | "caller_location" |
71 "size_of_val" | "min_align_of_val" |
72 "add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" |
73 "wrapping_add" | "wrapping_sub" | "wrapping_mul" |
74 "saturating_add" | "saturating_sub" |
75 "rotate_left" | "rotate_right" |
76 "ctpop" | "ctlz" | "cttz" | "bswap" | "bitreverse" |
77 "discriminant_value" | "type_id" | "likely" | "unlikely" |
78 "minnumf32" | "minnumf64" | "maxnumf32" | "maxnumf64" | "type_name"
79 => hir
::Unsafety
::Normal
,
80 _
=> hir
::Unsafety
::Unsafe
,
84 /// Remember to add all intrinsics here, in librustc_codegen_llvm/intrinsic.rs,
85 /// and in libcore/intrinsics.rs
86 pub fn check_intrinsic_type(tcx
: TyCtxt
<'_
>, it
: &hir
::ForeignItem
) {
87 let param
= |n
| tcx
.mk_ty_param(n
, Symbol
::intern(&format
!("P{}", n
)));
88 let name
= it
.ident
.as_str();
90 let mk_va_list_ty
= |mutbl
| {
91 tcx
.lang_items().va_list().map(|did
| {
92 let region
= tcx
.mk_region(ty
::ReLateBound(ty
::INNERMOST
, ty
::BrAnon(0)));
93 let env_region
= ty
::ReLateBound(ty
::INNERMOST
, ty
::BrEnv
);
94 let va_list_ty
= tcx
.type_of(did
).subst(tcx
, &[region
.into()]);
95 (tcx
.mk_ref(tcx
.mk_region(env_region
), ty
::TypeAndMut
{
102 let (n_tps
, inputs
, output
, unsafety
) = if name
.starts_with("atomic_") {
103 let split
: Vec
<&str> = name
.split('_'
).collect();
104 assert
!(split
.len() >= 2, "Atomic intrinsic in an incorrect format");
106 //We only care about the operation here
107 let (n_tps
, inputs
, output
) = match split
[1] {
108 "cxchg" | "cxchgweak" => (1, vec
![tcx
.mk_mut_ptr(param(0)),
111 tcx
.intern_tup(&[param(0), tcx
.types
.bool
])),
112 "load" => (1, vec
![tcx
.mk_imm_ptr(param(0))],
114 "store" => (1, vec
![tcx
.mk_mut_ptr(param(0)), param(0)],
117 "xchg" | "xadd" | "xsub" | "and" | "nand" | "or" | "xor" | "max" |
118 "min" | "umax" | "umin" => {
119 (1, vec
![tcx
.mk_mut_ptr(param(0)), param(0)],
122 "fence" | "singlethreadfence" => {
123 (0, Vec
::new(), tcx
.mk_unit())
126 struct_span_err
!(tcx
.sess
, it
.span
, E0092
,
127 "unrecognized atomic operation function: `{}`", op
)
128 .span_label(it
.span
, "unrecognized atomic operation")
133 (n_tps
, inputs
, output
, hir
::Unsafety
::Unsafe
)
134 } else if &name
[..] == "abort" || &name
[..] == "unreachable" {
135 (0, Vec
::new(), tcx
.types
.never
, hir
::Unsafety
::Unsafe
)
137 let unsafety
= intrinsic_operation_unsafety(&name
[..]);
138 let (n_tps
, inputs
, output
) = match &name
[..] {
139 "breakpoint" => (0, Vec
::new(), tcx
.mk_unit()),
141 "pref_align_of" | "min_align_of" => (1, Vec
::new(), tcx
.types
.usize),
142 "size_of_val" | "min_align_of_val" => {
144 tcx
.mk_imm_ref(tcx
.mk_region(ty
::ReLateBound(ty
::INNERMOST
,
149 "rustc_peek" => (1, vec
![param(0)], param(0)),
150 "caller_location" => (0, vec
![], tcx
.caller_location_ty()),
151 "panic_if_uninhabited" => (1, Vec
::new(), tcx
.mk_unit()),
152 "init" => (1, Vec
::new(), param(0)),
153 "uninit" => (1, Vec
::new(), param(0)),
154 "forget" => (1, vec
![param(0)], tcx
.mk_unit()),
155 "transmute" => (2, vec
![ param(0) ], param(1)),
159 tcx
.mk_mut_ptr(param(0)),
164 "prefetch_read_data" | "prefetch_write_data" |
165 "prefetch_read_instruction" | "prefetch_write_instruction" => {
166 (1, vec
![tcx
.mk_ptr(ty
::TypeAndMut
{
168 mutbl
: hir
::Mutability
::Immutable
173 (1, vec
![tcx
.mk_mut_ptr(param(0))], tcx
.mk_unit())
175 "needs_drop" => (1, Vec
::new(), tcx
.types
.bool
),
177 "type_name" => (1, Vec
::new(), tcx
.mk_static_str()),
178 "type_id" => (1, Vec
::new(), tcx
.types
.u64),
179 "offset" | "arith_offset" => {
182 tcx
.mk_ptr(ty
::TypeAndMut
{
184 mutbl
: hir
::Mutability
::Immutable
188 tcx
.mk_ptr(ty
::TypeAndMut
{
190 mutbl
: hir
::Mutability
::Immutable
193 "copy" | "copy_nonoverlapping" => {
196 tcx
.mk_ptr(ty
::TypeAndMut
{
198 mutbl
: hir
::Mutability
::Immutable
200 tcx
.mk_ptr(ty
::TypeAndMut
{
202 mutbl
: hir
::Mutability
::Mutable
208 "volatile_copy_memory" | "volatile_copy_nonoverlapping_memory" => {
211 tcx
.mk_ptr(ty
::TypeAndMut
{
213 mutbl
: hir
::Mutability
::Mutable
215 tcx
.mk_ptr(ty
::TypeAndMut
{
217 mutbl
: hir
::Mutability
::Immutable
223 "write_bytes" | "volatile_set_memory" => {
226 tcx
.mk_ptr(ty
::TypeAndMut
{
228 mutbl
: hir
::Mutability
::Mutable
235 "sqrtf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
236 "sqrtf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
239 vec
![ tcx
.types
.f32, tcx
.types
.i32 ],
244 vec
![ tcx
.types
.f64, tcx
.types
.i32 ],
247 "sinf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
248 "sinf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
249 "cosf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
250 "cosf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
253 vec
![ tcx
.types
.f32, tcx
.types
.f32 ],
258 vec
![ tcx
.types
.f64, tcx
.types
.f64 ],
261 "expf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
262 "expf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
263 "exp2f32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
264 "exp2f64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
265 "logf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
266 "logf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
267 "log10f32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
268 "log10f64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
269 "log2f32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
270 "log2f64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
273 vec
![ tcx
.types
.f32, tcx
.types
.f32, tcx
.types
.f32 ],
278 vec
![ tcx
.types
.f64, tcx
.types
.f64, tcx
.types
.f64 ],
281 "fabsf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
282 "fabsf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
283 "minnumf32" => (0, vec
![ tcx
.types
.f32, tcx
.types
.f32 ], tcx
.types
.f32),
284 "minnumf64" => (0, vec
![ tcx
.types
.f64, tcx
.types
.f64 ], tcx
.types
.f64),
285 "maxnumf32" => (0, vec
![ tcx
.types
.f32, tcx
.types
.f32 ], tcx
.types
.f32),
286 "maxnumf64" => (0, vec
![ tcx
.types
.f64, tcx
.types
.f64 ], tcx
.types
.f64),
287 "copysignf32" => (0, vec
![ tcx
.types
.f32, tcx
.types
.f32 ], tcx
.types
.f32),
288 "copysignf64" => (0, vec
![ tcx
.types
.f64, tcx
.types
.f64 ], tcx
.types
.f64),
289 "floorf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
290 "floorf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
291 "ceilf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
292 "ceilf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
293 "truncf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
294 "truncf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
295 "rintf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
296 "rintf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
297 "nearbyintf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
298 "nearbyintf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
299 "roundf32" => (0, vec
![ tcx
.types
.f32 ], tcx
.types
.f32),
300 "roundf64" => (0, vec
![ tcx
.types
.f64 ], tcx
.types
.f64),
302 "volatile_load" | "unaligned_volatile_load" =>
303 (1, vec
![ tcx
.mk_imm_ptr(param(0)) ], param(0)),
304 "volatile_store" | "unaligned_volatile_store" =>
305 (1, vec
![ tcx
.mk_mut_ptr(param(0)), param(0) ], tcx
.mk_unit()),
307 "ctpop" | "ctlz" | "ctlz_nonzero" | "cttz" | "cttz_nonzero" |
308 "bswap" | "bitreverse" =>
309 (1, vec
![param(0)], param(0)),
311 "add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" =>
312 (1, vec
![param(0), param(0)],
313 tcx
.intern_tup(&[param(0), tcx
.types
.bool
])),
316 (1, vec
![ tcx
.mk_imm_ptr(param(0)), tcx
.mk_imm_ptr(param(0)) ], tcx
.types
.isize),
317 "unchecked_div" | "unchecked_rem" | "exact_div" =>
318 (1, vec
![param(0), param(0)], param(0)),
319 "unchecked_shl" | "unchecked_shr" |
320 "rotate_left" | "rotate_right" =>
321 (1, vec
![param(0), param(0)], param(0)),
322 "unchecked_add" | "unchecked_sub" | "unchecked_mul" =>
323 (1, vec
![param(0), param(0)], param(0)),
324 "wrapping_add" | "wrapping_sub" | "wrapping_mul" =>
325 (1, vec
![param(0), param(0)], param(0)),
326 "saturating_add" | "saturating_sub" =>
327 (1, vec
![param(0), param(0)], param(0)),
328 "fadd_fast" | "fsub_fast" | "fmul_fast" | "fdiv_fast" | "frem_fast" =>
329 (1, vec
![param(0), param(0)], param(0)),
330 "float_to_int_approx_unchecked" => (2, vec
![ param(0) ], param(1)),
332 "assume" => (0, vec
![tcx
.types
.bool
], tcx
.mk_unit()),
333 "likely" => (0, vec
![tcx
.types
.bool
], tcx
.types
.bool
),
334 "unlikely" => (0, vec
![tcx
.types
.bool
], tcx
.types
.bool
),
336 "discriminant_value" => (1, vec
![
337 tcx
.mk_imm_ref(tcx
.mk_region(ty
::ReLateBound(ty
::INNERMOST
,
339 param(0))], tcx
.types
.u64),
342 let mut_u8
= tcx
.mk_mut_ptr(tcx
.types
.u8);
343 let fn_ty
= ty
::Binder
::bind(tcx
.mk_fn_sig(
347 hir
::Unsafety
::Normal
,
350 (0, vec
![tcx
.mk_fn_ptr(fn_ty
), mut_u8
, mut_u8
], tcx
.types
.i32)
353 "va_start" | "va_end" => {
354 match mk_va_list_ty(hir
::Mutability
::Mutable
) {
355 Some((va_list_ref_ty
, _
)) => (0, vec
![va_list_ref_ty
], tcx
.mk_unit()),
356 None
=> bug
!("`va_list` language item needed for C-variadic intrinsics")
361 match mk_va_list_ty(hir
::Mutability
::Immutable
) {
362 Some((va_list_ref_ty
, va_list_ty
)) => {
363 let va_list_ptr_ty
= tcx
.mk_mut_ptr(va_list_ty
);
364 (0, vec
![va_list_ptr_ty
, va_list_ref_ty
], tcx
.mk_unit())
366 None
=> bug
!("`va_list` language item needed for C-variadic intrinsics")
371 match mk_va_list_ty(hir
::Mutability
::Mutable
) {
372 Some((va_list_ref_ty
, _
)) => (1, vec
![va_list_ref_ty
], param(0)),
373 None
=> bug
!("`va_list` language item needed for C-variadic intrinsics")
377 "nontemporal_store" => {
378 (1, vec
![ tcx
.mk_mut_ptr(param(0)), param(0) ], tcx
.mk_unit())
381 "miri_start_panic" => {
382 // FIXME - the relevant types aren't lang items,
383 // so it's not trivial to check this
388 struct_span_err
!(tcx
.sess
, it
.span
, E0093
,
389 "unrecognized intrinsic function: `{}`",
391 .span_label(it
.span
, "unrecognized intrinsic")
396 (n_tps
, inputs
, output
, unsafety
)
398 equate_intrinsic_type(tcx
, it
, n_tps
, Abi
::RustIntrinsic
, unsafety
, inputs
, output
)
401 /// Type-check `extern "platform-intrinsic" { ... }` functions.
402 pub fn check_platform_intrinsic_type(tcx
: TyCtxt
<'_
>, it
: &hir
::ForeignItem
) {
404 let name
= Symbol
::intern(&format
!("P{}", n
));
405 tcx
.mk_ty_param(n
, name
)
408 let name
= it
.ident
.as_str();
410 let (n_tps
, inputs
, output
) = match &*name
{
411 "simd_eq" | "simd_ne" | "simd_lt" | "simd_le" | "simd_gt" | "simd_ge" => {
412 (2, vec
![param(0), param(0)], param(1))
414 "simd_add" | "simd_sub" | "simd_mul" | "simd_rem" |
415 "simd_div" | "simd_shl" | "simd_shr" |
416 "simd_and" | "simd_or" | "simd_xor" |
417 "simd_fmin" | "simd_fmax" | "simd_fpow" |
418 "simd_saturating_add" | "simd_saturating_sub" => {
419 (1, vec
![param(0), param(0)], param(0))
421 "simd_fsqrt" | "simd_fsin" | "simd_fcos" | "simd_fexp" | "simd_fexp2" |
422 "simd_flog2" | "simd_flog10" | "simd_flog" |
423 "simd_fabs" | "simd_floor" | "simd_ceil" => {
424 (1, vec
![param(0)], param(0))
427 (1, vec
![param(0), tcx
.types
.i32], param(0))
430 (1, vec
![param(0), param(0), param(0)], param(0))
433 (3, vec
![param(0), param(1), param(2)], param(0))
436 (3, vec
![param(0), param(1), param(2)], tcx
.mk_unit())
438 "simd_insert" => (2, vec
![param(0), tcx
.types
.u32, param(1)], param(0)),
439 "simd_extract" => (2, vec
![param(0), tcx
.types
.u32], param(1)),
440 "simd_cast" => (2, vec
![param(0)], param(1)),
441 "simd_bitmask" => (2, vec
![param(0)], param(1)),
443 "simd_select_bitmask" => (2, vec
![param(0), param(1), param(1)], param(1)),
444 "simd_reduce_all" | "simd_reduce_any" => (1, vec
![param(0)], tcx
.types
.bool
),
445 "simd_reduce_add_ordered" | "simd_reduce_mul_ordered"
446 => (2, vec
![param(0), param(1)], param(1)),
447 "simd_reduce_add_unordered" | "simd_reduce_mul_unordered" |
448 "simd_reduce_and" | "simd_reduce_or" | "simd_reduce_xor" |
449 "simd_reduce_min" | "simd_reduce_max" |
450 "simd_reduce_min_nanless" | "simd_reduce_max_nanless"
451 => (2, vec
![param(0)], param(1)),
452 name
if name
.starts_with("simd_shuffle") => {
453 match name
["simd_shuffle".len()..].parse() {
455 let params
= vec
![param(0), param(0),
456 tcx
.mk_array(tcx
.types
.u32, n
)];
457 (2, params
, param(1))
460 span_err
!(tcx
.sess
, it
.span
, E0439
,
461 "invalid `simd_shuffle`, needs length: `{}`", name
);
467 let msg
= format
!("unrecognized platform-specific intrinsic function: `{}`", name
);
468 tcx
.sess
.span_err(it
.span
, &msg
);
473 equate_intrinsic_type(tcx
, it
, n_tps
, Abi
::PlatformIntrinsic
, hir
::Unsafety
::Unsafe
,