]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_codegen_llvm/src/builder.rs
New upstream version 1.76.0+dfsg1
[rustc.git] / compiler / rustc_codegen_llvm / src / builder.rs
CommitLineData
2b03887a 1use crate::abi::FnAbiLlvmExt;
5e7ed085 2use crate::attributes;
9fa01778
XL
3use crate::common::Funclet;
4use crate::context::CodegenCx;
353b0b11 5use crate::llvm::{self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, False, True};
ed00b5ec 6use crate::llvm_util;
9fa01778
XL
7use crate::type_::Type;
8use crate::type_of::LayoutLlvmExt;
9use crate::value::Value;
dfeec247 10use libc::{c_char, c_uint};
2b03887a 11use rustc_codegen_ssa::common::{IntPredicate, RealPredicate, SynchronizationScope, TypeKind};
dfeec247 12use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
a1dfa0c6 13use rustc_codegen_ssa::mir::place::PlaceRef;
dfeec247
XL
14use rustc_codegen_ssa::traits::*;
15use rustc_codegen_ssa::MemFlags;
dfeec247
XL
16use rustc_data_structures::small_c_str::SmallCStr;
17use rustc_hir::def_id::DefId;
49aad941 18use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
c295e0f8
XL
19use rustc_middle::ty::layout::{
20 FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOfHelpers, TyAndLayout,
21};
ba9703b0 22use rustc_middle::ty::{self, Ty, TyCtxt};
cdc7bbd5 23use rustc_span::Span;
49aad941 24use rustc_symbol_mangling::typeid::{kcfi_typeid_for_fnabi, typeid_for_fnabi, TypeIdOptions};
c295e0f8 25use rustc_target::abi::{self, call::FnAbi, Align, Size, WrappingRange};
49aad941 26use rustc_target::spec::{HasTargetSpec, SanitizerSet, Target};
fe692bf9 27use smallvec::SmallVec;
1bb2cb6e 28use std::borrow::Cow;
cdc7bbd5 29use std::iter;
c295e0f8 30use std::ops::Deref;
85aaf69f 31use std::ptr;
1a4d82fc 32
32a655c1
SL
33// All Builders must have an llfn associated with them
34#[must_use]
dc9dc135 35pub struct Builder<'a, 'll, 'tcx> {
b7449926
XL
36 pub llbuilder: &'ll mut llvm::Builder<'ll>,
37 pub cx: &'a CodegenCx<'ll, 'tcx>,
1a4d82fc
JJ
38}
39
a2a8927a 40impl Drop for Builder<'_, '_, '_> {
32a655c1
SL
41 fn drop(&mut self) {
42 unsafe {
b7449926 43 llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
32a655c1
SL
44 }
45 }
46}
47
dc9dc135
XL
48/// Empty string, to be used where LLVM expects an instruction name, indicating
49/// that the instruction is to be left unnamed (i.e. numbered, in textual IR).
50// FIXME(eddyb) pass `&CStr` directly to FFI once it's a thin pointer.
4b012472 51const UNNAMED: *const c_char = c"".as_ptr();
1a4d82fc 52
a2a8927a 53impl<'ll, 'tcx> BackendTypes for Builder<'_, 'll, 'tcx> {
a1dfa0c6 54 type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value;
e74abb32 55 type Function = <CodegenCx<'ll, 'tcx> as BackendTypes>::Function;
a1dfa0c6
XL
56 type BasicBlock = <CodegenCx<'ll, 'tcx> as BackendTypes>::BasicBlock;
57 type Type = <CodegenCx<'ll, 'tcx> as BackendTypes>::Type;
58 type Funclet = <CodegenCx<'ll, 'tcx> as BackendTypes>::Funclet;
59
60 type DIScope = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIScope;
29967ef6 61 type DILocation = <CodegenCx<'ll, 'tcx> as BackendTypes>::DILocation;
74b04a01 62 type DIVariable = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIVariable;
a1dfa0c6
XL
63}
64
ba9703b0
XL
65impl abi::HasDataLayout for Builder<'_, '_, '_> {
66 fn data_layout(&self) -> &abi::TargetDataLayout {
a1dfa0c6 67 self.cx.data_layout()
83c7162d
XL
68 }
69}
70
a2a8927a 71impl<'tcx> ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
17df50a5 72 #[inline]
dc9dc135 73 fn tcx(&self) -> TyCtxt<'tcx> {
a1dfa0c6
XL
74 self.cx.tcx
75 }
76}
77
a2a8927a 78impl<'tcx> ty::layout::HasParamEnv<'tcx> for Builder<'_, '_, 'tcx> {
48663c56
XL
79 fn param_env(&self) -> ty::ParamEnv<'tcx> {
80 self.cx.param_env()
81 }
82}
83
a2a8927a 84impl HasTargetSpec for Builder<'_, '_, '_> {
17df50a5 85 #[inline]
48663c56 86 fn target_spec(&self) -> &Target {
c295e0f8 87 self.cx.target_spec()
48663c56
XL
88 }
89}
90
a2a8927a 91impl<'tcx> LayoutOfHelpers<'tcx> for Builder<'_, '_, 'tcx> {
c295e0f8 92 type LayoutOfResult = TyAndLayout<'tcx>;
a1dfa0c6 93
c295e0f8
XL
94 #[inline]
95 fn handle_layout_err(&self, err: LayoutError<'tcx>, span: Span, ty: Ty<'tcx>) -> ! {
96 self.cx.handle_layout_err(err, span, ty)
97 }
98}
99
a2a8927a 100impl<'tcx> FnAbiOfHelpers<'tcx> for Builder<'_, '_, 'tcx> {
c295e0f8
XL
101 type FnAbiOfResult = &'tcx FnAbi<'tcx, Ty<'tcx>>;
102
103 #[inline]
104 fn handle_fn_abi_err(
105 &self,
106 err: FnAbiError<'tcx>,
107 span: Span,
108 fn_abi_request: FnAbiRequest<'tcx>,
109 ) -> ! {
110 self.cx.handle_fn_abi_err(err, span, fn_abi_request)
a1dfa0c6
XL
111 }
112}
113
a2a8927a 114impl<'ll, 'tcx> Deref for Builder<'_, 'll, 'tcx> {
a1dfa0c6
XL
115 type Target = CodegenCx<'ll, 'tcx>;
116
17df50a5 117 #[inline]
a1dfa0c6
XL
118 fn deref(&self) -> &Self::Target {
119 self.cx
120 }
121}
122
a2a8927a 123impl<'ll, 'tcx> HasCodegen<'tcx> for Builder<'_, 'll, 'tcx> {
a1dfa0c6
XL
124 type CodegenCx = CodegenCx<'ll, 'tcx>;
125}
126
dc9dc135
XL
127macro_rules! builder_methods_for_value_instructions {
128 ($($name:ident($($arg:ident),*) => $llvm_capi:ident),+ $(,)?) => {
129 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
130 unsafe {
131 llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED)
132 }
133 })+
134 }
135}
136
a2a8927a 137impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
17df50a5
XL
138 fn build(cx: &'a CodegenCx<'ll, 'tcx>, llbb: &'ll BasicBlock) -> Self {
139 let bx = Builder::with_cx(cx);
140 unsafe {
141 llvm::LLVMPositionBuilderAtEnd(bx.llbuilder, llbb);
142 }
2c00a5a8 143 bx
32a655c1
SL
144 }
145
17df50a5
XL
146 fn cx(&self) -> &CodegenCx<'ll, 'tcx> {
147 self.cx
32a655c1
SL
148 }
149
a1dfa0c6 150 fn llbb(&self) -> &'ll BasicBlock {
dfeec247 151 unsafe { llvm::LLVMGetInsertBlock(self.llbuilder) }
32a655c1
SL
152 }
153
29967ef6
XL
154 fn set_span(&mut self, _span: Span) {}
155
17df50a5 156 fn append_block(cx: &'a CodegenCx<'ll, 'tcx>, llfn: &'ll Value, name: &str) -> &'ll BasicBlock {
1a4d82fc 157 unsafe {
17df50a5
XL
158 let name = SmallCStr::new(name);
159 llvm::LLVMAppendBasicBlockInContext(cx.llcx, llfn, name.as_ptr())
1a4d82fc
JJ
160 }
161 }
162
17df50a5
XL
163 fn append_sibling_block(&mut self, name: &str) -> &'ll BasicBlock {
164 Self::append_block(self.cx, self.llfn(), name)
165 }
166
5e7ed085
FG
167 fn switch_to_block(&mut self, llbb: Self::BasicBlock) {
168 *self = Self::build(self.cx, llbb)
17df50a5
XL
169 }
170
a1dfa0c6 171 fn ret_void(&mut self) {
1a4d82fc
JJ
172 unsafe {
173 llvm::LLVMBuildRetVoid(self.llbuilder);
174 }
175 }
176
a1dfa0c6 177 fn ret(&mut self, v: &'ll Value) {
1a4d82fc
JJ
178 unsafe {
179 llvm::LLVMBuildRet(self.llbuilder, v);
180 }
181 }
182
a1dfa0c6 183 fn br(&mut self, dest: &'ll BasicBlock) {
1a4d82fc
JJ
184 unsafe {
185 llvm::LLVMBuildBr(self.llbuilder, dest);
186 }
187 }
188
a1dfa0c6
XL
189 fn cond_br(
190 &mut self,
b7449926
XL
191 cond: &'ll Value,
192 then_llbb: &'ll BasicBlock,
193 else_llbb: &'ll BasicBlock,
194 ) {
1a4d82fc
JJ
195 unsafe {
196 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
197 }
198 }
199
a1dfa0c6
XL
200 fn switch(
201 &mut self,
b7449926
XL
202 v: &'ll Value,
203 else_llbb: &'ll BasicBlock,
1b1a35ee 204 cases: impl ExactSizeIterator<Item = (u128, &'ll BasicBlock)>,
532ac7d7 205 ) {
dfeec247
XL
206 let switch =
207 unsafe { llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint) };
532ac7d7
XL
208 for (on_val, dest) in cases {
209 let on_val = self.const_uint_big(self.val_ty(v), on_val);
dfeec247 210 unsafe { llvm::LLVMAddCase(switch, on_val, dest) }
1a4d82fc
JJ
211 }
212 }
213
a1dfa0c6
XL
214 fn invoke(
215 &mut self,
94222f64 216 llty: &'ll Type,
49aad941 217 fn_attrs: Option<&CodegenFnAttrs>,
2b03887a 218 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
a1dfa0c6
XL
219 llfn: &'ll Value,
220 args: &[&'ll Value],
221 then: &'ll BasicBlock,
222 catch: &'ll BasicBlock,
223 funclet: Option<&Funclet<'ll>>,
224 ) -> &'ll Value {
dfeec247 225 debug!("invoke {:?} with args ({:?})", llfn, args);
1a4d82fc 226
94222f64 227 let args = self.check_call("invoke", llty, llfn, args);
9c376795
FG
228 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
229 let funclet_bundle = funclet_bundle.as_ref().map(|b| &*b.raw);
fe692bf9
FG
230 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
231 if let Some(funclet_bundle) = funclet_bundle {
232 bundles.push(funclet_bundle);
233 }
9c376795 234
49aad941
FG
235 // Emit CFI pointer type membership test
236 self.cfi_type_test(fn_attrs, fn_abi, llfn);
237
238 // Emit KCFI operand bundle
239 let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, llfn);
240 let kcfi_bundle = kcfi_bundle.as_ref().map(|b| &*b.raw);
fe692bf9
FG
241 if let Some(kcfi_bundle) = kcfi_bundle {
242 bundles.push(kcfi_bundle);
243 }
7453a54e 244
2b03887a 245 let invoke = unsafe {
dfeec247
XL
246 llvm::LLVMRustBuildInvoke(
247 self.llbuilder,
94222f64 248 llty,
dfeec247
XL
249 llfn,
250 args.as_ptr(),
251 args.len() as c_uint,
252 then,
253 catch,
9c376795
FG
254 bundles.as_ptr(),
255 bundles.len() as c_uint,
dfeec247
XL
256 UNNAMED,
257 )
2b03887a
FG
258 };
259 if let Some(fn_abi) = fn_abi {
260 fn_abi.apply_attrs_callsite(self, invoke);
1a4d82fc 261 }
2b03887a 262 invoke
1a4d82fc
JJ
263 }
264
a1dfa0c6 265 fn unreachable(&mut self) {
1a4d82fc
JJ
266 unsafe {
267 llvm::LLVMBuildUnreachable(self.llbuilder);
268 }
269 }
270
dc9dc135
XL
271 builder_methods_for_value_instructions! {
272 add(a, b) => LLVMBuildAdd,
273 fadd(a, b) => LLVMBuildFAdd,
274 sub(a, b) => LLVMBuildSub,
275 fsub(a, b) => LLVMBuildFSub,
276 mul(a, b) => LLVMBuildMul,
277 fmul(a, b) => LLVMBuildFMul,
278 udiv(a, b) => LLVMBuildUDiv,
279 exactudiv(a, b) => LLVMBuildExactUDiv,
280 sdiv(a, b) => LLVMBuildSDiv,
281 exactsdiv(a, b) => LLVMBuildExactSDiv,
282 fdiv(a, b) => LLVMBuildFDiv,
283 urem(a, b) => LLVMBuildURem,
284 srem(a, b) => LLVMBuildSRem,
285 frem(a, b) => LLVMBuildFRem,
286 shl(a, b) => LLVMBuildShl,
287 lshr(a, b) => LLVMBuildLShr,
288 ashr(a, b) => LLVMBuildAShr,
289 and(a, b) => LLVMBuildAnd,
290 or(a, b) => LLVMBuildOr,
291 xor(a, b) => LLVMBuildXor,
292 neg(x) => LLVMBuildNeg,
293 fneg(x) => LLVMBuildFNeg,
294 not(x) => LLVMBuildNot,
295 unchecked_sadd(x, y) => LLVMBuildNSWAdd,
296 unchecked_uadd(x, y) => LLVMBuildNUWAdd,
297 unchecked_ssub(x, y) => LLVMBuildNSWSub,
298 unchecked_usub(x, y) => LLVMBuildNUWSub,
299 unchecked_smul(x, y) => LLVMBuildNSWMul,
300 unchecked_umul(x, y) => LLVMBuildNUWMul,
1a4d82fc
JJ
301 }
302
a1dfa0c6 303 fn fadd_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 304 unsafe {
dc9dc135 305 let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, UNNAMED);
cdc7bbd5 306 llvm::LLVMRustSetFastMath(instr);
54a0048b
SL
307 instr
308 }
309 }
310
a1dfa0c6 311 fn fsub_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 312 unsafe {
dc9dc135 313 let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, UNNAMED);
cdc7bbd5 314 llvm::LLVMRustSetFastMath(instr);
54a0048b
SL
315 instr
316 }
317 }
318
a1dfa0c6 319 fn fmul_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 320 unsafe {
dc9dc135 321 let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, UNNAMED);
cdc7bbd5 322 llvm::LLVMRustSetFastMath(instr);
54a0048b
SL
323 instr
324 }
325 }
326
a1dfa0c6 327 fn fdiv_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 328 unsafe {
dc9dc135 329 let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, UNNAMED);
cdc7bbd5 330 llvm::LLVMRustSetFastMath(instr);
54a0048b
SL
331 instr
332 }
333 }
334
a1dfa0c6 335 fn frem_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 336 unsafe {
dc9dc135 337 let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, UNNAMED);
cdc7bbd5 338 llvm::LLVMRustSetFastMath(instr);
54a0048b
SL
339 instr
340 }
341 }
342
a1dfa0c6
XL
343 fn checked_binop(
344 &mut self,
345 oop: OverflowOp,
9fa01778 346 ty: Ty<'_>,
a1dfa0c6
XL
347 lhs: Self::Value,
348 rhs: Self::Value,
349 ) -> (Self::Value, Self::Value) {
ba9703b0 350 use rustc_middle::ty::{Int, Uint};
5869c6ff 351 use rustc_middle::ty::{IntTy::*, UintTy::*};
a1dfa0c6 352
1b1a35ee 353 let new_kind = match ty.kind() {
29967ef6
XL
354 Int(t @ Isize) => Int(t.normalize(self.tcx.sess.target.pointer_width)),
355 Uint(t @ Usize) => Uint(t.normalize(self.tcx.sess.target.pointer_width)),
4b012472 356 t @ (Uint(_) | Int(_)) => *t,
dfeec247 357 _ => panic!("tried to get overflow intrinsic for op applied to non-int type"),
a1dfa0c6
XL
358 };
359
360 let name = match oop {
e74abb32 361 OverflowOp::Add => match new_kind {
a1dfa0c6
XL
362 Int(I8) => "llvm.sadd.with.overflow.i8",
363 Int(I16) => "llvm.sadd.with.overflow.i16",
364 Int(I32) => "llvm.sadd.with.overflow.i32",
365 Int(I64) => "llvm.sadd.with.overflow.i64",
366 Int(I128) => "llvm.sadd.with.overflow.i128",
367
368 Uint(U8) => "llvm.uadd.with.overflow.i8",
369 Uint(U16) => "llvm.uadd.with.overflow.i16",
370 Uint(U32) => "llvm.uadd.with.overflow.i32",
371 Uint(U64) => "llvm.uadd.with.overflow.i64",
372 Uint(U128) => "llvm.uadd.with.overflow.i128",
373
374 _ => unreachable!(),
375 },
e74abb32 376 OverflowOp::Sub => match new_kind {
a1dfa0c6
XL
377 Int(I8) => "llvm.ssub.with.overflow.i8",
378 Int(I16) => "llvm.ssub.with.overflow.i16",
379 Int(I32) => "llvm.ssub.with.overflow.i32",
380 Int(I64) => "llvm.ssub.with.overflow.i64",
381 Int(I128) => "llvm.ssub.with.overflow.i128",
382
487cf647
FG
383 Uint(_) => {
384 // Emit sub and icmp instead of llvm.usub.with.overflow. LLVM considers these
385 // to be the canonical form. It will attempt to reform llvm.usub.with.overflow
386 // in the backend if profitable.
387 let sub = self.sub(lhs, rhs);
388 let cmp = self.icmp(IntPredicate::IntULT, lhs, rhs);
389 return (sub, cmp);
390 }
a1dfa0c6
XL
391
392 _ => unreachable!(),
393 },
e74abb32 394 OverflowOp::Mul => match new_kind {
a1dfa0c6
XL
395 Int(I8) => "llvm.smul.with.overflow.i8",
396 Int(I16) => "llvm.smul.with.overflow.i16",
397 Int(I32) => "llvm.smul.with.overflow.i32",
398 Int(I64) => "llvm.smul.with.overflow.i64",
399 Int(I128) => "llvm.smul.with.overflow.i128",
400
401 Uint(U8) => "llvm.umul.with.overflow.i8",
402 Uint(U16) => "llvm.umul.with.overflow.i16",
403 Uint(U32) => "llvm.umul.with.overflow.i32",
404 Uint(U64) => "llvm.umul.with.overflow.i64",
405 Uint(U128) => "llvm.umul.with.overflow.i128",
406
407 _ => unreachable!(),
408 },
409 };
410
94222f64 411 let res = self.call_intrinsic(name, &[lhs, rhs]);
dfeec247 412 (self.extract_value(res, 0), self.extract_value(res, 1))
a1dfa0c6
XL
413 }
414
1b1a35ee
XL
415 fn from_immediate(&mut self, val: Self::Value) -> Self::Value {
416 if self.cx().val_ty(val) == self.cx().type_i1() {
417 self.zext(val, self.cx().type_i8())
418 } else {
419 val
420 }
421 }
c295e0f8 422 fn to_immediate_scalar(&mut self, val: Self::Value, scalar: abi::Scalar) -> Self::Value {
1b1a35ee
XL
423 if scalar.is_bool() {
424 return self.trunc(val, self.cx().type_i1());
425 }
426 val
427 }
428
e1599b0c 429 fn alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value {
a1dfa0c6 430 let mut bx = Builder::with_cx(self.cx);
dfeec247 431 bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
1a4d82fc 432 unsafe {
2b03887a 433 let alloca = llvm::LLVMBuildAlloca(bx.llbuilder, ty, UNNAMED);
a1dfa0c6 434 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
cc61c64b 435 alloca
1a4d82fc
JJ
436 }
437 }
438
2b03887a 439 fn byte_array_alloca(&mut self, len: &'ll Value, align: Align) -> &'ll Value {
1a4d82fc 440 unsafe {
2b03887a
FG
441 let alloca =
442 llvm::LLVMBuildArrayAlloca(self.llbuilder, self.cx().type_i8(), len, UNNAMED);
a1dfa0c6 443 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
b7449926 444 alloca
1a4d82fc
JJ
445 }
446 }
447
136023e0 448 fn load(&mut self, ty: &'ll Type, ptr: &'ll Value, align: Align) -> &'ll Value {
1a4d82fc 449 unsafe {
136023e0 450 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
a1dfa0c6 451 llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
32a655c1 452 load
1a4d82fc
JJ
453 }
454 }
455
136023e0 456 fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value {
1a4d82fc 457 unsafe {
136023e0 458 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
dc9dc135
XL
459 llvm::LLVMSetVolatile(load, llvm::True);
460 load
1a4d82fc
JJ
461 }
462 }
463
a1dfa0c6
XL
464 fn atomic_load(
465 &mut self,
136023e0 466 ty: &'ll Type,
a1dfa0c6
XL
467 ptr: &'ll Value,
468 order: rustc_codegen_ssa::common::AtomicOrdering,
469 size: Size,
470 ) -> &'ll Value {
1a4d82fc 471 unsafe {
a1dfa0c6
XL
472 let load = llvm::LLVMRustBuildAtomicLoad(
473 self.llbuilder,
136023e0 474 ty,
a1dfa0c6 475 ptr,
dc9dc135 476 UNNAMED,
a1dfa0c6
XL
477 AtomicOrdering::from_generic(order),
478 );
479 // LLVM requires the alignment of atomic loads to be at least the size of the type.
480 llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
ff7c6d11 481 load
1a4d82fc
JJ
482 }
483 }
484
064997fb 485 #[instrument(level = "trace", skip(self))]
dfeec247 486 fn load_operand(&mut self, place: PlaceRef<'tcx, &'ll Value>) -> OperandRef<'tcx, &'ll Value> {
4b012472
FG
487 if place.layout.is_unsized() {
488 let tail = self.tcx.struct_tail_with_normalize(place.layout.ty, |ty| ty, || {});
489 if matches!(tail.kind(), ty::Foreign(..)) {
490 // Unsized locals and, at least conceptually, even unsized arguments must be copied
491 // around, which requires dynamically determining their size. Therefore, we cannot
492 // allow `extern` types here. Consult t-opsem before removing this check.
493 panic!("unsized locals must not be `extern` types");
494 }
495 }
a1dfa0c6
XL
496 assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
497
498 if place.layout.is_zst() {
fe692bf9 499 return OperandRef::zero_sized(place.layout);
a1dfa0c6
XL
500 }
501
064997fb 502 #[instrument(level = "trace", skip(bx))]
a1dfa0c6
XL
503 fn scalar_load_metadata<'a, 'll, 'tcx>(
504 bx: &mut Builder<'a, 'll, 'tcx>,
505 load: &'ll Value,
c295e0f8 506 scalar: abi::Scalar,
5e7ed085
FG
507 layout: TyAndLayout<'tcx>,
508 offset: Size,
a1dfa0c6 509 ) {
9c376795 510 if !scalar.is_uninit_valid() {
5e7ed085
FG
511 bx.noundef_metadata(load);
512 }
513
04454e1e 514 match scalar.primitive() {
ba9703b0 515 abi::Int(..) => {
c295e0f8 516 if !scalar.is_always_valid(bx) {
04454e1e 517 bx.range_metadata(load, scalar.valid_range(bx));
a1dfa0c6
XL
518 }
519 }
9ffffee4 520 abi::Pointer(_) => {
04454e1e 521 if !scalar.valid_range(bx).contains(0) {
5e7ed085
FG
522 bx.nonnull_metadata(load);
523 }
524
525 if let Some(pointee) = layout.pointee_info_at(bx, offset) {
526 if let Some(_) = pointee.safe {
527 bx.align_metadata(load, pointee.align);
528 }
529 }
a1dfa0c6 530 }
5e7ed085 531 abi::F32 | abi::F64 => {}
a1dfa0c6
XL
532 }
533 }
534
535 let val = if let Some(llextra) = place.llextra {
536 OperandValue::Ref(place.llval, Some(llextra), place.align)
537 } else if place.layout.is_llvm_immediate() {
538 let mut const_llval = None;
923072b8 539 let llty = place.layout.llvm_type(self);
a1dfa0c6
XL
540 unsafe {
541 if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) {
542 if llvm::LLVMIsGlobalConstant(global) == llvm::True {
923072b8
FG
543 if let Some(init) = llvm::LLVMGetInitializer(global) {
544 if self.val_ty(init) == llty {
545 const_llval = Some(init);
546 }
547 }
a1dfa0c6
XL
548 }
549 }
550 }
551 let llval = const_llval.unwrap_or_else(|| {
923072b8 552 let load = self.load(llty, place.llval, place.align);
c295e0f8 553 if let abi::Abi::Scalar(scalar) = place.layout.abi {
5e7ed085 554 scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO);
a1dfa0c6
XL
555 }
556 load
557 });
1b1a35ee 558 OperandValue::Immediate(self.to_immediate(llval, place.layout))
c295e0f8 559 } else if let abi::Abi::ScalarPair(a, b) = place.layout.abi {
04454e1e 560 let b_offset = a.size(self).align_to(b.align(self).abi);
94222f64 561 let pair_ty = place.layout.llvm_type(self);
a1dfa0c6 562
5e7ed085 563 let mut load = |i, scalar: abi::Scalar, layout, align, offset| {
94222f64 564 let llptr = self.struct_gep(pair_ty, place.llval, i as u64);
136023e0
XL
565 let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
566 let load = self.load(llty, llptr, align);
5e7ed085 567 scalar_load_metadata(self, load, scalar, layout, offset);
1b1a35ee 568 self.to_immediate_scalar(load, scalar)
a1dfa0c6
XL
569 };
570
571 OperandValue::Pair(
5e7ed085
FG
572 load(0, a, place.layout, place.align, Size::ZERO),
573 load(1, b, place.layout, place.align.restrict_for_offset(b_offset), b_offset),
a1dfa0c6
XL
574 )
575 } else {
576 OperandValue::Ref(place.llval, None, place.align)
577 };
578
579 OperandRef { val, layout: place.layout }
580 }
581
532ac7d7 582 fn write_operand_repeatedly(
487cf647 583 &mut self,
532ac7d7
XL
584 cg_elem: OperandRef<'tcx, &'ll Value>,
585 count: u64,
586 dest: PlaceRef<'tcx, &'ll Value>,
487cf647 587 ) {
532ac7d7
XL
588 let zero = self.const_usize(0);
589 let count = self.const_usize(count);
532ac7d7 590
5e7ed085
FG
591 let header_bb = self.append_sibling_block("repeat_loop_header");
592 let body_bb = self.append_sibling_block("repeat_loop_body");
593 let next_bb = self.append_sibling_block("repeat_loop_next");
594
595 self.br(header_bb);
532ac7d7 596
5e7ed085 597 let mut header_bx = Self::build(self.cx, header_bb);
fe692bf9 598 let i = header_bx.phi(self.val_ty(zero), &[zero], &[self.llbb()]);
532ac7d7 599
fe692bf9 600 let keep_going = header_bx.icmp(IntPredicate::IntULT, i, count);
5e7ed085 601 header_bx.cond_br(keep_going, body_bb, next_bb);
532ac7d7 602
5e7ed085 603 let mut body_bx = Self::build(self.cx, body_bb);
fe692bf9
FG
604 let dest_elem = dest.project_index(&mut body_bx, i);
605 cg_elem.val.store(&mut body_bx, dest_elem);
606
607 let next = body_bx.unchecked_uadd(i, self.const_usize(1));
5e7ed085 608 body_bx.br(header_bb);
fe692bf9 609 header_bx.add_incoming_to_phi(i, next, body_bb);
532ac7d7 610
487cf647 611 *self = Self::build(self.cx, next_bb);
532ac7d7 612 }
1a4d82fc 613
c295e0f8 614 fn range_metadata(&mut self, load: &'ll Value, range: WrappingRange) {
29967ef6 615 if self.sess().target.arch == "amdgpu" {
94222f64 616 // amdgpu/LLVM does something weird and thinks an i64 value is
b7449926
XL
617 // split into a v2i32, halving the bitwidth LLVM expects,
618 // tripping an assertion. So, for now, just disable this
619 // optimization.
620 return;
621 }
622
1a4d82fc 623 unsafe {
a1dfa0c6 624 let llty = self.cx.val_ty(load);
ff7c6d11 625 let v = [
a1dfa0c6 626 self.cx.const_uint_big(llty, range.start),
c295e0f8 627 self.cx.const_uint_big(llty, range.end.wrapping_add(1)),
ff7c6d11 628 ];
1a4d82fc 629
dfeec247
XL
630 llvm::LLVMSetMetadata(
631 load,
632 llvm::MD_range as c_uint,
633 llvm::LLVMMDNodeInContext(self.cx.llcx, v.as_ptr(), v.len() as c_uint),
634 );
1a4d82fc 635 }
1a4d82fc
JJ
636 }
637
a1dfa0c6 638 fn nonnull_metadata(&mut self, load: &'ll Value) {
85aaf69f 639 unsafe {
dfeec247
XL
640 llvm::LLVMSetMetadata(
641 load,
642 llvm::MD_nonnull as c_uint,
643 llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0),
644 );
85aaf69f 645 }
85aaf69f
SL
646 }
647
a1dfa0c6 648 fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
83c7162d
XL
649 self.store_with_flags(val, ptr, align, MemFlags::empty())
650 }
651
a1dfa0c6
XL
652 fn store_with_flags(
653 &mut self,
b7449926
XL
654 val: &'ll Value,
655 ptr: &'ll Value,
83c7162d
XL
656 align: Align,
657 flags: MemFlags,
b7449926
XL
658 ) -> &'ll Value {
659 debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
add651ee 660 assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
1a4d82fc 661 unsafe {
32a655c1 662 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
dfeec247
XL
663 let align =
664 if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint };
8faf50e0 665 llvm::LLVMSetAlignment(store, align);
83c7162d
XL
666 if flags.contains(MemFlags::VOLATILE) {
667 llvm::LLVMSetVolatile(store, llvm::True);
668 }
669 if flags.contains(MemFlags::NONTEMPORAL) {
670 // According to LLVM [1] building a nontemporal store must
671 // *always* point to a metadata value of the integer 1.
672 //
136023e0 673 // [1]: https://llvm.org/docs/LangRef.html#store-instruction
a1dfa0c6 674 let one = self.cx.const_i32(1);
83c7162d
XL
675 let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
676 llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
677 }
32a655c1 678 store
1a4d82fc
JJ
679 }
680 }
681
dfeec247
XL
682 fn atomic_store(
683 &mut self,
684 val: &'ll Value,
685 ptr: &'ll Value,
686 order: rustc_codegen_ssa::common::AtomicOrdering,
687 size: Size,
688 ) {
b7449926 689 debug!("Store {:?} -> {:?}", val, ptr);
add651ee 690 assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
1a4d82fc 691 unsafe {
a1dfa0c6
XL
692 let store = llvm::LLVMRustBuildAtomicStore(
693 self.llbuilder,
694 val,
695 ptr,
696 AtomicOrdering::from_generic(order),
697 );
698 // LLVM requires the alignment of atomic stores to be at least the size of the type.
699 llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
ff7c6d11
XL
700 }
701 }
702
94222f64 703 fn gep(&mut self, ty: &'ll Type, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
1a4d82fc 704 unsafe {
94222f64 705 llvm::LLVMBuildGEP2(
dfeec247 706 self.llbuilder,
94222f64 707 ty,
dfeec247
XL
708 ptr,
709 indices.as_ptr(),
710 indices.len() as c_uint,
711 UNNAMED,
712 )
1a4d82fc
JJ
713 }
714 }
715
94222f64
XL
716 fn inbounds_gep(
717 &mut self,
718 ty: &'ll Type,
719 ptr: &'ll Value,
720 indices: &[&'ll Value],
721 ) -> &'ll Value {
1a4d82fc 722 unsafe {
94222f64 723 llvm::LLVMBuildInBoundsGEP2(
dfeec247 724 self.llbuilder,
94222f64 725 ty,
dfeec247
XL
726 ptr,
727 indices.as_ptr(),
728 indices.len() as c_uint,
729 UNNAMED,
730 )
1a4d82fc
JJ
731 }
732 }
733
94222f64 734 fn struct_gep(&mut self, ty: &'ll Type, ptr: &'ll Value, idx: u64) -> &'ll Value {
532ac7d7 735 assert_eq!(idx as c_uint as u64, idx);
94222f64 736 unsafe { llvm::LLVMBuildStructGEP2(self.llbuilder, ty, ptr, idx as c_uint, UNNAMED) }
532ac7d7
XL
737 }
738
1a4d82fc 739 /* Casts */
a1dfa0c6 740 fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 741 unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
742 }
743
a1dfa0c6 744 fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 745 unsafe { llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
746 }
747
f2b60f7d 748 fn fptoui_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
5099ac24 749 self.fptoint_sat(false, val, dest_ty)
f035d41b
XL
750 }
751
f2b60f7d 752 fn fptosi_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
5099ac24 753 self.fptoint_sat(true, val, dest_ty)
3dfed10e
XL
754 }
755
a1dfa0c6 756 fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
cdc7bbd5
XL
757 // On WebAssembly the `fptoui` and `fptosi` instructions currently have
758 // poor codegen. The reason for this is that the corresponding wasm
759 // instructions, `i32.trunc_f32_s` for example, will trap when the float
760 // is out-of-bounds, infinity, or nan. This means that LLVM
761 // automatically inserts control flow around `fptoui` and `fptosi`
762 // because the LLVM instruction `fptoui` is defined as producing a
763 // poison value, not having UB on out-of-bounds values.
3dfed10e 764 //
cdc7bbd5
XL
765 // This method, however, is only used with non-saturating casts that
766 // have UB on out-of-bounds values. This means that it's ok if we use
767 // the raw wasm instruction since out-of-bounds values can do whatever
768 // we like. To ensure that LLVM picks the right instruction we choose
769 // the raw wasm intrinsic functions which avoid LLVM inserting all the
770 // other control flow automatically.
3c0e092e 771 if self.sess().target.is_like_wasm {
3dfed10e
XL
772 let src_ty = self.cx.val_ty(val);
773 if self.cx.type_kind(src_ty) != TypeKind::Vector {
774 let float_width = self.cx.float_width(src_ty);
775 let int_width = self.cx.int_width(dest_ty);
776 let name = match (int_width, float_width) {
777 (32, 32) => Some("llvm.wasm.trunc.unsigned.i32.f32"),
778 (32, 64) => Some("llvm.wasm.trunc.unsigned.i32.f64"),
779 (64, 32) => Some("llvm.wasm.trunc.unsigned.i64.f32"),
780 (64, 64) => Some("llvm.wasm.trunc.unsigned.i64.f64"),
781 _ => None,
782 };
783 if let Some(name) = name {
94222f64 784 return self.call_intrinsic(name, &[val]);
3dfed10e
XL
785 }
786 }
787 }
dfeec247 788 unsafe { llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
789 }
790
a1dfa0c6 791 fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
cdc7bbd5 792 // see `fptoui` above for why wasm is different here
3c0e092e 793 if self.sess().target.is_like_wasm {
3dfed10e
XL
794 let src_ty = self.cx.val_ty(val);
795 if self.cx.type_kind(src_ty) != TypeKind::Vector {
796 let float_width = self.cx.float_width(src_ty);
797 let int_width = self.cx.int_width(dest_ty);
798 let name = match (int_width, float_width) {
799 (32, 32) => Some("llvm.wasm.trunc.signed.i32.f32"),
800 (32, 64) => Some("llvm.wasm.trunc.signed.i32.f64"),
801 (64, 32) => Some("llvm.wasm.trunc.signed.i64.f32"),
802 (64, 64) => Some("llvm.wasm.trunc.signed.i64.f64"),
803 _ => None,
804 };
805 if let Some(name) = name {
94222f64 806 return self.call_intrinsic(name, &[val]);
3dfed10e
XL
807 }
808 }
809 }
dfeec247 810 unsafe { llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
811 }
812
a1dfa0c6 813 fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 814 unsafe { llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
815 }
816
a1dfa0c6 817 fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 818 unsafe { llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
819 }
820
a1dfa0c6 821 fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 822 unsafe { llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
823 }
824
a1dfa0c6 825 fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 826 unsafe { llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
827 }
828
a1dfa0c6 829 fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 830 unsafe { llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
831 }
832
a1dfa0c6 833 fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 834 unsafe { llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
835 }
836
a1dfa0c6 837 fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 838 unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
839 }
840
a1dfa0c6 841 fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
353b0b11
FG
842 unsafe {
843 llvm::LLVMBuildIntCast2(
844 self.llbuilder,
845 val,
846 dest_ty,
847 if is_signed { True } else { False },
848 UNNAMED,
849 )
850 }
1a4d82fc
JJ
851 }
852
a1dfa0c6 853 fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 854 unsafe { llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
855 }
856
1a4d82fc 857 /* Comparisons */
a1dfa0c6 858 fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
a1dfa0c6 859 let op = llvm::IntPredicate::from_generic(op);
dfeec247 860 unsafe { llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1a4d82fc
JJ
861 }
862
a1dfa0c6 863 fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
c295e0f8 864 let op = llvm::RealPredicate::from_generic(op);
dfeec247 865 unsafe { llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1a4d82fc
JJ
866 }
867
868 /* Miscellaneous instructions */
dfeec247
XL
869 fn memcpy(
870 &mut self,
871 dst: &'ll Value,
872 dst_align: Align,
873 src: &'ll Value,
874 src_align: Align,
875 size: &'ll Value,
876 flags: MemFlags,
877 ) {
136023e0 878 assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memcpy not supported");
a1dfa0c6
XL
879 let size = self.intcast(size, self.type_isize(), false);
880 let is_volatile = flags.contains(MemFlags::VOLATILE);
a1dfa0c6 881 unsafe {
dfeec247
XL
882 llvm::LLVMRustBuildMemCpy(
883 self.llbuilder,
884 dst,
885 dst_align.bytes() as c_uint,
886 src,
887 src_align.bytes() as c_uint,
888 size,
889 is_volatile,
890 );
a1dfa0c6
XL
891 }
892 }
7453a54e 893
dfeec247
XL
894 fn memmove(
895 &mut self,
896 dst: &'ll Value,
897 dst_align: Align,
898 src: &'ll Value,
899 src_align: Align,
900 size: &'ll Value,
901 flags: MemFlags,
902 ) {
136023e0 903 assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memmove not supported");
a1dfa0c6
XL
904 let size = self.intcast(size, self.type_isize(), false);
905 let is_volatile = flags.contains(MemFlags::VOLATILE);
1a4d82fc 906 unsafe {
dfeec247
XL
907 llvm::LLVMRustBuildMemMove(
908 self.llbuilder,
909 dst,
910 dst_align.bytes() as c_uint,
911 src,
912 src_align.bytes() as c_uint,
913 size,
914 is_volatile,
915 );
1a4d82fc
JJ
916 }
917 }
918
a1dfa0c6
XL
919 fn memset(
920 &mut self,
921 ptr: &'ll Value,
922 fill_byte: &'ll Value,
923 size: &'ll Value,
924 align: Align,
925 flags: MemFlags,
926 ) {
74b04a01 927 let is_volatile = flags.contains(MemFlags::VOLATILE);
74b04a01
XL
928 unsafe {
929 llvm::LLVMRustBuildMemSet(
930 self.llbuilder,
931 ptr,
932 align.bytes() as c_uint,
933 fill_byte,
934 size,
935 is_volatile,
936 );
937 }
a1dfa0c6
XL
938 }
939
a1dfa0c6 940 fn select(
dfeec247
XL
941 &mut self,
942 cond: &'ll Value,
b7449926
XL
943 then_val: &'ll Value,
944 else_val: &'ll Value,
945 ) -> &'ll Value {
dfeec247 946 unsafe { llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, UNNAMED) }
1a4d82fc
JJ
947 }
948
a1dfa0c6 949 fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
dfeec247 950 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
1a4d82fc
JJ
951 }
952
a1dfa0c6 953 fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
dfeec247 954 unsafe { llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, UNNAMED) }
1a4d82fc
JJ
955 }
956
a1dfa0c6 957 fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
1a4d82fc 958 unsafe {
a1dfa0c6
XL
959 let elt_ty = self.cx.val_ty(elt);
960 let undef = llvm::LLVMGetUndef(self.type_vector(elt_ty, num_elts as u64));
961 let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
962 let vec_i32_ty = self.type_vector(self.type_i32(), num_elts as u64);
963 self.shuffle_vector(vec, undef, self.const_null(vec_i32_ty))
1a4d82fc
JJ
964 }
965 }
966
a1dfa0c6 967 fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
ff7c6d11 968 assert_eq!(idx as c_uint as u64, idx);
dfeec247 969 unsafe { llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, UNNAMED) }
1a4d82fc
JJ
970 }
971
dfeec247 972 fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value, idx: u64) -> &'ll Value {
ff7c6d11 973 assert_eq!(idx as c_uint as u64, idx);
dfeec247 974 unsafe { llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint, UNNAMED) }
1a4d82fc
JJ
975 }
976
5099ac24 977 fn set_personality_fn(&mut self, personality: &'ll Value) {
1a4d82fc 978 unsafe {
5099ac24 979 llvm::LLVMSetPersonalityFn(self.llfn(), personality);
c1a9b12d
SL
980 }
981 }
982
9c376795 983 fn cleanup_landing_pad(&mut self, pers_fn: &'ll Value) -> (&'ll Value, &'ll Value) {
add651ee 984 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
49aad941 985 let landing_pad = self.landing_pad(ty, pers_fn, 0);
1a4d82fc
JJ
986 unsafe {
987 llvm::LLVMSetCleanup(landing_pad, llvm::True);
988 }
9c376795 989 (self.extract_value(landing_pad, 0), self.extract_value(landing_pad, 1))
1a4d82fc
JJ
990 }
991
49aad941 992 fn filter_landing_pad(&mut self, pers_fn: &'ll Value) -> (&'ll Value, &'ll Value) {
add651ee 993 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
49aad941 994 let landing_pad = self.landing_pad(ty, pers_fn, 1);
add651ee 995 self.add_clause(landing_pad, self.const_array(self.type_ptr(), &[]));
49aad941
FG
996 (self.extract_value(landing_pad, 0), self.extract_value(landing_pad, 1))
997 }
998
9c376795 999 fn resume(&mut self, exn0: &'ll Value, exn1: &'ll Value) {
add651ee 1000 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
353b0b11 1001 let mut exn = self.const_poison(ty);
9c376795
FG
1002 exn = self.insert_value(exn, exn0, 0);
1003 exn = self.insert_value(exn, exn1, 1);
5099ac24
FG
1004 unsafe {
1005 llvm::LLVMBuildResume(self.llbuilder, exn);
1006 }
1a4d82fc
JJ
1007 }
1008
dfeec247 1009 fn cleanup_pad(&mut self, parent: Option<&'ll Value>, args: &[&'ll Value]) -> Funclet<'ll> {
7453a54e 1010 let ret = unsafe {
353b0b11 1011 llvm::LLVMBuildCleanupPad(
dfeec247
XL
1012 self.llbuilder,
1013 parent,
dfeec247 1014 args.as_ptr(),
353b0b11 1015 args.len() as c_uint,
4b012472 1016 c"cleanuppad".as_ptr(),
dfeec247 1017 )
7453a54e 1018 };
a1dfa0c6 1019 Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
7453a54e
SL
1020 }
1021
5099ac24
FG
1022 fn cleanup_ret(&mut self, funclet: &Funclet<'ll>, unwind: Option<&'ll BasicBlock>) {
1023 unsafe {
353b0b11 1024 llvm::LLVMBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind)
5099ac24
FG
1025 .expect("LLVM does not have support for cleanupret");
1026 }
7453a54e
SL
1027 }
1028
dfeec247 1029 fn catch_pad(&mut self, parent: &'ll Value, args: &[&'ll Value]) -> Funclet<'ll> {
7453a54e 1030 let ret = unsafe {
353b0b11 1031 llvm::LLVMBuildCatchPad(
dfeec247
XL
1032 self.llbuilder,
1033 parent,
dfeec247 1034 args.as_ptr(),
353b0b11 1035 args.len() as c_uint,
4b012472 1036 c"catchpad".as_ptr(),
dfeec247 1037 )
7453a54e 1038 };
a1dfa0c6 1039 Funclet::new(ret.expect("LLVM does not have support for catchpad"))
7453a54e
SL
1040 }
1041
a1dfa0c6
XL
1042 fn catch_switch(
1043 &mut self,
b7449926
XL
1044 parent: Option<&'ll Value>,
1045 unwind: Option<&'ll BasicBlock>,
5099ac24 1046 handlers: &[&'ll BasicBlock],
b7449926 1047 ) -> &'ll Value {
7453a54e 1048 let ret = unsafe {
353b0b11 1049 llvm::LLVMBuildCatchSwitch(
dfeec247
XL
1050 self.llbuilder,
1051 parent,
1052 unwind,
5099ac24 1053 handlers.len() as c_uint,
4b012472 1054 c"catchswitch".as_ptr(),
dfeec247 1055 )
7453a54e 1056 };
5099ac24
FG
1057 let ret = ret.expect("LLVM does not have support for catchswitch");
1058 for handler in handlers {
1059 unsafe {
353b0b11 1060 llvm::LLVMAddHandler(ret, handler);
5099ac24 1061 }
7453a54e 1062 }
5099ac24 1063 ret
7453a54e
SL
1064 }
1065
1a4d82fc 1066 // Atomic Operations
a1dfa0c6
XL
1067 fn atomic_cmpxchg(
1068 &mut self,
b7449926
XL
1069 dst: &'ll Value,
1070 cmp: &'ll Value,
1071 src: &'ll Value,
f2b60f7d 1072 order: rustc_codegen_ssa::common::AtomicOrdering,
a1dfa0c6
XL
1073 failure_order: rustc_codegen_ssa::common::AtomicOrdering,
1074 weak: bool,
b7449926 1075 ) -> &'ll Value {
a1dfa0c6
XL
1076 let weak = if weak { llvm::True } else { llvm::False };
1077 unsafe {
2b03887a 1078 let value = llvm::LLVMBuildAtomicCmpXchg(
a1dfa0c6
XL
1079 self.llbuilder,
1080 dst,
1081 cmp,
1082 src,
1083 AtomicOrdering::from_generic(order),
1084 AtomicOrdering::from_generic(failure_order),
2b03887a
FG
1085 llvm::False, // SingleThreaded
1086 );
1087 llvm::LLVMSetWeak(value, weak);
1088 value
1a4d82fc
JJ
1089 }
1090 }
a1dfa0c6
XL
1091 fn atomic_rmw(
1092 &mut self,
1093 op: rustc_codegen_ssa::common::AtomicRmwBinOp,
b7449926
XL
1094 dst: &'ll Value,
1095 src: &'ll Value,
a1dfa0c6 1096 order: rustc_codegen_ssa::common::AtomicOrdering,
b7449926 1097 ) -> &'ll Value {
1a4d82fc 1098 unsafe {
a1dfa0c6
XL
1099 llvm::LLVMBuildAtomicRMW(
1100 self.llbuilder,
1101 AtomicRmwBinOp::from_generic(op),
1102 dst,
1103 src,
1104 AtomicOrdering::from_generic(order),
2b03887a 1105 llvm::False, // SingleThreaded
dfeec247 1106 )
1a4d82fc
JJ
1107 }
1108 }
1109
a1dfa0c6
XL
1110 fn atomic_fence(
1111 &mut self,
1112 order: rustc_codegen_ssa::common::AtomicOrdering,
2b03887a 1113 scope: SynchronizationScope,
a1dfa0c6 1114 ) {
2b03887a
FG
1115 let single_threaded = match scope {
1116 SynchronizationScope::SingleThread => llvm::True,
1117 SynchronizationScope::CrossThread => llvm::False,
1118 };
1a4d82fc 1119 unsafe {
2b03887a 1120 llvm::LLVMBuildFence(
a1dfa0c6
XL
1121 self.llbuilder,
1122 AtomicOrdering::from_generic(order),
2b03887a
FG
1123 single_threaded,
1124 UNNAMED,
a1dfa0c6 1125 );
1a4d82fc
JJ
1126 }
1127 }
a7813a04 1128
532ac7d7 1129 fn set_invariant_load(&mut self, load: &'ll Value) {
32a655c1 1130 unsafe {
dfeec247
XL
1131 llvm::LLVMSetMetadata(
1132 load,
1133 llvm::MD_invariant_load as c_uint,
1134 llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0),
1135 );
32a655c1
SL
1136 }
1137 }
1138
532ac7d7 1139 fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) {
74b04a01 1140 self.call_lifetime_intrinsic("llvm.lifetime.start.p0i8", ptr, size);
532ac7d7
XL
1141 }
1142
1143 fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) {
74b04a01 1144 self.call_lifetime_intrinsic("llvm.lifetime.end.p0i8", ptr, size);
532ac7d7
XL
1145 }
1146
f035d41b
XL
1147 fn instrprof_increment(
1148 &mut self,
1149 fn_name: &'ll Value,
1150 hash: &'ll Value,
1151 num_counters: &'ll Value,
1152 index: &'ll Value,
3dfed10e 1153 ) {
f035d41b
XL
1154 debug!(
1155 "instrprof_increment() with args ({:?}, {:?}, {:?}, {:?})",
1156 fn_name, hash, num_counters, index
1157 );
1158
3dfed10e 1159 let llfn = unsafe { llvm::LLVMRustGetInstrProfIncrementIntrinsic(self.cx().llmod) };
94222f64 1160 let llty = self.cx.type_func(
add651ee 1161 &[self.cx.type_ptr(), self.cx.type_i64(), self.cx.type_i32(), self.cx.type_i32()],
94222f64
XL
1162 self.cx.type_void(),
1163 );
f035d41b 1164 let args = &[fn_name, hash, num_counters, index];
94222f64 1165 let args = self.check_call("call", llty, llfn, args);
f035d41b
XL
1166
1167 unsafe {
3dfed10e 1168 let _ = llvm::LLVMRustBuildCall(
f035d41b 1169 self.llbuilder,
94222f64 1170 llty,
f035d41b
XL
1171 llfn,
1172 args.as_ptr() as *const &llvm::Value,
1173 args.len() as c_uint,
9c376795
FG
1174 [].as_ptr(),
1175 0 as c_uint,
3dfed10e 1176 );
f035d41b
XL
1177 }
1178 }
1179
532ac7d7
XL
1180 fn call(
1181 &mut self,
94222f64 1182 llty: &'ll Type,
49aad941 1183 fn_attrs: Option<&CodegenFnAttrs>,
2b03887a 1184 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
532ac7d7
XL
1185 llfn: &'ll Value,
1186 args: &[&'ll Value],
1187 funclet: Option<&Funclet<'ll>>,
1188 ) -> &'ll Value {
dfeec247 1189 debug!("call {:?} with args ({:?})", llfn, args);
532ac7d7 1190
94222f64 1191 let args = self.check_call("call", llty, llfn, args);
9c376795
FG
1192 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
1193 let funclet_bundle = funclet_bundle.as_ref().map(|b| &*b.raw);
fe692bf9
FG
1194 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
1195 if let Some(funclet_bundle) = funclet_bundle {
1196 bundles.push(funclet_bundle);
1197 }
9c376795 1198
49aad941
FG
1199 // Emit CFI pointer type membership test
1200 self.cfi_type_test(fn_attrs, fn_abi, llfn);
1201
1202 // Emit KCFI operand bundle
1203 let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, llfn);
1204 let kcfi_bundle = kcfi_bundle.as_ref().map(|b| &*b.raw);
fe692bf9
FG
1205 if let Some(kcfi_bundle) = kcfi_bundle {
1206 bundles.push(kcfi_bundle);
1207 }
532ac7d7 1208
2b03887a 1209 let call = unsafe {
532ac7d7
XL
1210 llvm::LLVMRustBuildCall(
1211 self.llbuilder,
94222f64 1212 llty,
532ac7d7
XL
1213 llfn,
1214 args.as_ptr() as *const &llvm::Value,
1215 args.len() as c_uint,
9c376795
FG
1216 bundles.as_ptr(),
1217 bundles.len() as c_uint,
532ac7d7 1218 )
2b03887a
FG
1219 };
1220 if let Some(fn_abi) = fn_abi {
1221 fn_abi.apply_attrs_callsite(self, call);
32a655c1 1222 }
2b03887a 1223 call
32a655c1
SL
1224 }
1225
532ac7d7 1226 fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 1227 unsafe { llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED) }
8bb4bdeb
XL
1228 }
1229
ed00b5ec
FG
1230 fn apply_attrs_to_cleanup_callsite(&mut self, llret: &'ll Value) {
1231 if llvm_util::get_version() < (17, 0, 2) {
1232 // Work around https://github.com/llvm/llvm-project/issues/66984.
1233 let noinline = llvm::AttributeKind::NoInline.create_attr(self.llcx);
1234 attributes::apply_to_callsite(llret, llvm::AttributePlace::Function, &[noinline]);
1235 } else {
1236 // Cleanup is always the cold path.
1237 let cold_inline = llvm::AttributeKind::Cold.create_attr(self.llcx);
1238 attributes::apply_to_callsite(llret, llvm::AttributePlace::Function, &[cold_inline]);
1239 }
532ac7d7
XL
1240 }
1241}
1242
a2a8927a 1243impl<'ll> StaticBuilderMethods for Builder<'_, 'll, '_> {
dc9dc135 1244 fn get_static(&mut self, def_id: DefId) -> &'ll Value {
532ac7d7
XL
1245 // Forward to the `get_static` method of `CodegenCx`
1246 self.cx().get_static(def_id)
1247 }
532ac7d7
XL
1248}
1249
a2a8927a 1250impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
17df50a5
XL
1251 fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
1252 // Create a fresh builder from the crate context.
1253 let llbuilder = unsafe { llvm::LLVMCreateBuilderInContext(cx.llcx) };
1254 Builder { llbuilder, cx }
1255 }
1256
532ac7d7 1257 pub fn llfn(&self) -> &'ll Value {
dfeec247 1258 unsafe { llvm::LLVMGetBasicBlockParent(self.llbb()) }
532ac7d7
XL
1259 }
1260
532ac7d7
XL
1261 fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
1262 unsafe {
1263 llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
1264 }
1265 }
1266
5e7ed085
FG
1267 fn align_metadata(&mut self, load: &'ll Value, align: Align) {
1268 unsafe {
1269 let v = [self.cx.const_u64(align.bytes())];
1270
1271 llvm::LLVMSetMetadata(
1272 load,
1273 llvm::MD_align as c_uint,
1274 llvm::LLVMMDNodeInContext(self.cx.llcx, v.as_ptr(), v.len() as c_uint),
1275 );
1276 }
1277 }
1278
1279 fn noundef_metadata(&mut self, load: &'ll Value) {
1280 unsafe {
1281 llvm::LLVMSetMetadata(
1282 load,
1283 llvm::MD_noundef as c_uint,
1284 llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0),
1285 );
1286 }
1287 }
1288
532ac7d7 1289 pub fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
532ac7d7
XL
1290 unsafe { llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs) }
1291 }
1292
1293 pub fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
532ac7d7
XL
1294 unsafe { llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs) }
1295 }
1296
1297 pub fn insert_element(
dfeec247
XL
1298 &mut self,
1299 vec: &'ll Value,
532ac7d7
XL
1300 elt: &'ll Value,
1301 idx: &'ll Value,
1302 ) -> &'ll Value {
dfeec247 1303 unsafe { llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, UNNAMED) }
532ac7d7
XL
1304 }
1305
1306 pub fn shuffle_vector(
1307 &mut self,
1308 v1: &'ll Value,
1309 v2: &'ll Value,
1310 mask: &'ll Value,
1311 ) -> &'ll Value {
dfeec247 1312 unsafe { llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, UNNAMED) }
532ac7d7
XL
1313 }
1314
416331ca
XL
1315 pub fn vector_reduce_fadd(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1316 unsafe { llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src) }
1317 }
1318 pub fn vector_reduce_fmul(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1319 unsafe { llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src) }
1320 }
532ac7d7 1321 pub fn vector_reduce_fadd_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
532ac7d7 1322 unsafe {
532ac7d7 1323 let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
cdc7bbd5 1324 llvm::LLVMRustSetFastMath(instr);
532ac7d7
XL
1325 instr
1326 }
1327 }
1328 pub fn vector_reduce_fmul_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
532ac7d7 1329 unsafe {
532ac7d7 1330 let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
cdc7bbd5 1331 llvm::LLVMRustSetFastMath(instr);
532ac7d7
XL
1332 instr
1333 }
1334 }
1335 pub fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1336 unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
1337 }
1338 pub fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1339 unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
1340 }
1341 pub fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1342 unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
1343 }
1344 pub fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1345 unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
1346 }
1347 pub fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1348 unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
1349 }
1350 pub fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
dfeec247
XL
1351 unsafe {
1352 llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false)
1353 }
532ac7d7
XL
1354 }
1355 pub fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
dfeec247
XL
1356 unsafe {
1357 llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false)
1358 }
532ac7d7
XL
1359 }
1360 pub fn vector_reduce_fmin_fast(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7 1361 unsafe {
dfeec247
XL
1362 let instr =
1363 llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
cdc7bbd5 1364 llvm::LLVMRustSetFastMath(instr);
532ac7d7
XL
1365 instr
1366 }
1367 }
1368 pub fn vector_reduce_fmax_fast(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7 1369 unsafe {
dfeec247
XL
1370 let instr =
1371 llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
cdc7bbd5 1372 llvm::LLVMRustSetFastMath(instr);
532ac7d7
XL
1373 instr
1374 }
1375 }
1376 pub fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
532ac7d7
XL
1377 unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
1378 }
1379 pub fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
532ac7d7
XL
1380 unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
1381 }
1382
1383 pub fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) {
1384 unsafe {
1385 llvm::LLVMAddClause(landing_pad, clause);
1386 }
1387 }
1388
1389 pub fn catch_ret(&mut self, funclet: &Funclet<'ll>, unwind: &'ll BasicBlock) -> &'ll Value {
353b0b11 1390 let ret = unsafe { llvm::LLVMBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind) };
532ac7d7
XL
1391 ret.expect("LLVM does not have support for catchret")
1392 }
1393
dfeec247
XL
1394 fn check_call<'b>(
1395 &mut self,
1396 typ: &str,
94222f64 1397 fn_ty: &'ll Type,
dfeec247
XL
1398 llfn: &'ll Value,
1399 args: &'b [&'ll Value],
1400 ) -> Cow<'b, [&'ll Value]> {
dfeec247
XL
1401 assert!(
1402 self.cx.type_kind(fn_ty) == TypeKind::Function,
add651ee 1403 "builder::{typ} not passed a function, but {fn_ty:?}"
dfeec247 1404 );
a7813a04 1405
a1dfa0c6 1406 let param_tys = self.cx.func_params_types(fn_ty);
a7813a04 1407
cdc7bbd5 1408 let all_args_match = iter::zip(&param_tys, args.iter().map(|&v| self.val_ty(v)))
1bb2cb6e
SL
1409 .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1410
1411 if all_args_match {
1412 return Cow::Borrowed(args);
1413 }
1414
cdc7bbd5 1415 let casted_args: Vec<_> = iter::zip(param_tys, args)
1bb2cb6e
SL
1416 .enumerate()
1417 .map(|(i, (expected_ty, &actual_val))| {
a1dfa0c6 1418 let actual_ty = self.val_ty(actual_val);
1bb2cb6e 1419 if expected_ty != actual_ty {
dfeec247
XL
1420 debug!(
1421 "type mismatch in function call of {:?}. \
1bb2cb6e 1422 Expected {:?} for param {}, got {:?}; injecting bitcast",
dfeec247
XL
1423 llfn, expected_ty, i, actual_ty
1424 );
1bb2cb6e
SL
1425 self.bitcast(actual_val, expected_ty)
1426 } else {
1427 actual_val
1428 }
1429 })
1430 .collect();
a7813a04 1431
0bf4aa26 1432 Cow::Owned(casted_args)
a7813a04 1433 }
ff7c6d11 1434
532ac7d7 1435 pub fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
dfeec247 1436 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
a1dfa0c6
XL
1437 }
1438
923072b8 1439 pub(crate) fn call_intrinsic(&mut self, intrinsic: &str, args: &[&'ll Value]) -> &'ll Value {
94222f64 1440 let (ty, f) = self.cx.get_intrinsic(intrinsic);
49aad941 1441 self.call(ty, None, None, f, args, None)
94222f64
XL
1442 }
1443
a1dfa0c6 1444 fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) {
74b04a01
XL
1445 let size = size.bytes();
1446 if size == 0 {
ff7c6d11
XL
1447 return;
1448 }
1449
f9f354fc 1450 if !self.cx().sess().emit_lifetime_markers() {
ff7c6d11
XL
1451 return;
1452 }
1453
94222f64 1454 self.call_intrinsic(intrinsic, &[self.cx.const_u64(size), ptr]);
ff7c6d11 1455 }
532ac7d7 1456
3dfed10e
XL
1457 pub(crate) fn phi(
1458 &mut self,
1459 ty: &'ll Type,
1460 vals: &[&'ll Value],
1461 bbs: &[&'ll BasicBlock],
1462 ) -> &'ll Value {
532ac7d7 1463 assert_eq!(vals.len(), bbs.len());
dfeec247 1464 let phi = unsafe { llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED) };
532ac7d7 1465 unsafe {
dfeec247 1466 llvm::LLVMAddIncoming(phi, vals.as_ptr(), bbs.as_ptr(), vals.len() as c_uint);
532ac7d7
XL
1467 phi
1468 }
1469 }
1470
1471 fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
532ac7d7
XL
1472 unsafe {
1473 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1474 }
1475 }
3dfed10e 1476
f2b60f7d
FG
1477 fn fptoint_sat(&mut self, signed: bool, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1478 let src_ty = self.cx.val_ty(val);
1479 let (float_ty, int_ty, vector_length) = if self.cx.type_kind(src_ty) == TypeKind::Vector {
1480 assert_eq!(self.cx.vector_length(src_ty), self.cx.vector_length(dest_ty));
1481 (
1482 self.cx.element_type(src_ty),
1483 self.cx.element_type(dest_ty),
1484 Some(self.cx.vector_length(src_ty)),
1485 )
5099ac24 1486 } else {
f2b60f7d
FG
1487 (src_ty, dest_ty, None)
1488 };
1489 let float_width = self.cx.float_width(float_ty);
1490 let int_width = self.cx.int_width(int_ty);
1491
1492 let instr = if signed { "fptosi" } else { "fptoui" };
1493 let name = if let Some(vector_length) = vector_length {
add651ee 1494 format!("llvm.{instr}.sat.v{vector_length}i{int_width}.v{vector_length}f{float_width}")
f2b60f7d 1495 } else {
add651ee 1496 format!("llvm.{instr}.sat.i{int_width}.f{float_width}")
f2b60f7d
FG
1497 };
1498 let f = self.declare_cfn(&name, llvm::UnnamedAddr::No, self.type_func(&[src_ty], dest_ty));
49aad941 1499 self.call(self.type_func(&[src_ty], dest_ty), None, None, f, &[val], None)
5099ac24
FG
1500 }
1501
1502 pub(crate) fn landing_pad(
1503 &mut self,
1504 ty: &'ll Type,
1505 pers_fn: &'ll Value,
1506 num_clauses: usize,
1507 ) -> &'ll Value {
1508 // Use LLVMSetPersonalityFn to set the personality. It supports arbitrary Consts while,
1509 // LLVMBuildLandingPad requires the argument to be a Function (as of LLVM 12). The
1510 // personality lives on the parent function anyway.
1511 self.set_personality_fn(pers_fn);
1512 unsafe {
1513 llvm::LLVMBuildLandingPad(self.llbuilder, ty, None, num_clauses as c_uint, UNNAMED)
1514 }
1515 }
49aad941
FG
1516
1517 // Emits CFI pointer type membership tests.
1518 fn cfi_type_test(
1519 &mut self,
1520 fn_attrs: Option<&CodegenFnAttrs>,
1521 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1522 llfn: &'ll Value,
1523 ) {
add651ee 1524 let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
ed00b5ec
FG
1525 if self.tcx.sess.is_sanitizer_cfi_enabled()
1526 && let Some(fn_abi) = fn_abi
1527 && is_indirect_call
1528 {
1529 if let Some(fn_attrs) = fn_attrs
1530 && fn_attrs.no_sanitize.contains(SanitizerSet::CFI)
1531 {
49aad941
FG
1532 return;
1533 }
1534
1535 let mut options = TypeIdOptions::empty();
1536 if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() {
1537 options.insert(TypeIdOptions::GENERALIZE_POINTERS);
1538 }
1539 if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() {
1540 options.insert(TypeIdOptions::NORMALIZE_INTEGERS);
1541 }
1542
add651ee 1543 let typeid = typeid_for_fnabi(self.tcx, fn_abi, options);
49aad941
FG
1544 let typeid_metadata = self.cx.typeid_metadata(typeid).unwrap();
1545
1546 // Test whether the function pointer is associated with the type identifier.
1547 let cond = self.type_test(llfn, typeid_metadata);
1548 let bb_pass = self.append_sibling_block("type_test.pass");
1549 let bb_fail = self.append_sibling_block("type_test.fail");
1550 self.cond_br(cond, bb_pass, bb_fail);
1551
1552 self.switch_to_block(bb_fail);
1553 self.abort();
1554 self.unreachable();
1555
1556 self.switch_to_block(bb_pass);
1557 }
1558 }
1559
1560 // Emits KCFI operand bundles.
1561 fn kcfi_operand_bundle(
1562 &mut self,
1563 fn_attrs: Option<&CodegenFnAttrs>,
1564 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1565 llfn: &'ll Value,
1566 ) -> Option<llvm::OperandBundleDef<'ll>> {
add651ee 1567 let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
ed00b5ec
FG
1568 let kcfi_bundle = if self.tcx.sess.is_sanitizer_kcfi_enabled()
1569 && let Some(fn_abi) = fn_abi
1570 && is_indirect_call
1571 {
1572 if let Some(fn_attrs) = fn_attrs
1573 && fn_attrs.no_sanitize.contains(SanitizerSet::KCFI)
1574 {
1575 return None;
1576 }
49aad941 1577
ed00b5ec
FG
1578 let mut options = TypeIdOptions::empty();
1579 if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() {
1580 options.insert(TypeIdOptions::GENERALIZE_POINTERS);
1581 }
1582 if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() {
1583 options.insert(TypeIdOptions::NORMALIZE_INTEGERS);
1584 }
49aad941 1585
ed00b5ec
FG
1586 let kcfi_typeid = kcfi_typeid_for_fnabi(self.tcx, fn_abi, options);
1587 Some(llvm::OperandBundleDef::new("kcfi", &[self.const_u32(kcfi_typeid)]))
1588 } else {
1589 None
1590 };
49aad941
FG
1591 kcfi_bundle
1592 }
a7813a04 1593}