]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_codegen_llvm/src/builder.rs
New upstream version 1.49.0~beta.4+dfsg1
[rustc.git] / compiler / rustc_codegen_llvm / src / builder.rs
CommitLineData
9fa01778
XL
1use crate::common::Funclet;
2use crate::context::CodegenCx;
dfeec247
XL
3use crate::llvm::{self, BasicBlock, False};
4use crate::llvm::{AtomicOrdering, AtomicRmwBinOp, SynchronizationScope};
9fa01778
XL
5use crate::type_::Type;
6use crate::type_of::LayoutLlvmExt;
7use crate::value::Value;
dfeec247 8use libc::{c_char, c_uint};
dfeec247
XL
9use rustc_codegen_ssa::common::{IntPredicate, RealPredicate, TypeKind};
10use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
a1dfa0c6 11use rustc_codegen_ssa::mir::place::PlaceRef;
dfeec247
XL
12use rustc_codegen_ssa::traits::*;
13use rustc_codegen_ssa::MemFlags;
14use rustc_data_structures::const_cstr;
15use rustc_data_structures::small_c_str::SmallCStr;
16use rustc_hir::def_id::DefId;
ba9703b0
XL
17use rustc_middle::ty::layout::TyAndLayout;
18use rustc_middle::ty::{self, Ty, TyCtxt};
29967ef6 19use rustc_span::{sym, Span};
ba9703b0 20use rustc_target::abi::{self, Align, Size};
48663c56 21use rustc_target::spec::{HasTargetSpec, Target};
1bb2cb6e 22use std::borrow::Cow;
dc9dc135 23use std::ffi::CStr;
a1dfa0c6 24use std::ops::{Deref, Range};
85aaf69f 25use std::ptr;
3dfed10e 26use tracing::debug;
1a4d82fc 27
32a655c1
SL
28// All Builders must have an llfn associated with them
29#[must_use]
dc9dc135 30pub struct Builder<'a, 'll, 'tcx> {
b7449926
XL
31 pub llbuilder: &'ll mut llvm::Builder<'ll>,
32 pub cx: &'a CodegenCx<'ll, 'tcx>,
1a4d82fc
JJ
33}
34
b7449926 35impl Drop for Builder<'a, 'll, 'tcx> {
32a655c1
SL
36 fn drop(&mut self) {
37 unsafe {
b7449926 38 llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
32a655c1
SL
39 }
40 }
41}
42
dc9dc135 43// FIXME(eddyb) use a checked constructor when they become `const fn`.
dfeec247 44const EMPTY_C_STR: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"\0") };
dc9dc135
XL
45
46/// Empty string, to be used where LLVM expects an instruction name, indicating
47/// that the instruction is to be left unnamed (i.e. numbered, in textual IR).
48// FIXME(eddyb) pass `&CStr` directly to FFI once it's a thin pointer.
49const UNNAMED: *const c_char = EMPTY_C_STR.as_ptr();
1a4d82fc 50
a1dfa0c6
XL
51impl BackendTypes for Builder<'_, 'll, 'tcx> {
52 type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value;
e74abb32 53 type Function = <CodegenCx<'ll, 'tcx> as BackendTypes>::Function;
a1dfa0c6
XL
54 type BasicBlock = <CodegenCx<'ll, 'tcx> as BackendTypes>::BasicBlock;
55 type Type = <CodegenCx<'ll, 'tcx> as BackendTypes>::Type;
56 type Funclet = <CodegenCx<'ll, 'tcx> as BackendTypes>::Funclet;
57
58 type DIScope = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIScope;
29967ef6 59 type DILocation = <CodegenCx<'ll, 'tcx> as BackendTypes>::DILocation;
74b04a01 60 type DIVariable = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIVariable;
a1dfa0c6
XL
61}
62
ba9703b0
XL
63impl abi::HasDataLayout for Builder<'_, '_, '_> {
64 fn data_layout(&self) -> &abi::TargetDataLayout {
a1dfa0c6 65 self.cx.data_layout()
83c7162d
XL
66 }
67}
68
a1dfa0c6 69impl ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
dc9dc135 70 fn tcx(&self) -> TyCtxt<'tcx> {
a1dfa0c6
XL
71 self.cx.tcx
72 }
73}
74
48663c56
XL
75impl ty::layout::HasParamEnv<'tcx> for Builder<'_, '_, 'tcx> {
76 fn param_env(&self) -> ty::ParamEnv<'tcx> {
77 self.cx.param_env()
78 }
79}
80
81impl HasTargetSpec for Builder<'_, '_, 'tcx> {
82 fn target_spec(&self) -> &Target {
83 &self.cx.target_spec()
84 }
85}
86
ba9703b0 87impl abi::LayoutOf for Builder<'_, '_, 'tcx> {
a1dfa0c6 88 type Ty = Ty<'tcx>;
ba9703b0 89 type TyAndLayout = TyAndLayout<'tcx>;
a1dfa0c6 90
ba9703b0 91 fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyAndLayout {
a1dfa0c6
XL
92 self.cx.layout_of(ty)
93 }
94}
95
96impl Deref for Builder<'_, 'll, 'tcx> {
97 type Target = CodegenCx<'ll, 'tcx>;
98
99 fn deref(&self) -> &Self::Target {
100 self.cx
101 }
102}
103
104impl HasCodegen<'tcx> for Builder<'_, 'll, 'tcx> {
105 type CodegenCx = CodegenCx<'ll, 'tcx>;
106}
107
dc9dc135
XL
108macro_rules! builder_methods_for_value_instructions {
109 ($($name:ident($($arg:ident),*) => $llvm_capi:ident),+ $(,)?) => {
110 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
111 unsafe {
112 llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED)
113 }
114 })+
115 }
116}
117
a1dfa0c6 118impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
dfeec247 119 fn new_block<'b>(cx: &'a CodegenCx<'ll, 'tcx>, llfn: &'ll Value, name: &'b str) -> Self {
a1dfa0c6 120 let mut bx = Builder::with_cx(cx);
32a655c1 121 let llbb = unsafe {
b7449926 122 let name = SmallCStr::new(name);
dfeec247 123 llvm::LLVMAppendBasicBlockInContext(cx.llcx, llfn, name.as_ptr())
32a655c1 124 };
2c00a5a8
XL
125 bx.position_at_end(llbb);
126 bx
32a655c1
SL
127 }
128
a1dfa0c6 129 fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
32a655c1 130 // Create a fresh builder from the crate context.
dfeec247
XL
131 let llbuilder = unsafe { llvm::LLVMCreateBuilderInContext(cx.llcx) };
132 Builder { llbuilder, cx }
1a4d82fc
JJ
133 }
134
416331ca 135 fn build_sibling_block(&self, name: &str) -> Self {
2c00a5a8 136 Builder::new_block(self.cx, self.llfn(), name)
32a655c1
SL
137 }
138
a1dfa0c6 139 fn llbb(&self) -> &'ll BasicBlock {
dfeec247 140 unsafe { llvm::LLVMGetInsertBlock(self.llbuilder) }
32a655c1
SL
141 }
142
29967ef6
XL
143 fn set_span(&mut self, _span: Span) {}
144
a1dfa0c6 145 fn position_at_end(&mut self, llbb: &'ll BasicBlock) {
1a4d82fc
JJ
146 unsafe {
147 llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
148 }
149 }
150
a1dfa0c6 151 fn ret_void(&mut self) {
1a4d82fc
JJ
152 unsafe {
153 llvm::LLVMBuildRetVoid(self.llbuilder);
154 }
155 }
156
a1dfa0c6 157 fn ret(&mut self, v: &'ll Value) {
1a4d82fc
JJ
158 unsafe {
159 llvm::LLVMBuildRet(self.llbuilder, v);
160 }
161 }
162
a1dfa0c6 163 fn br(&mut self, dest: &'ll BasicBlock) {
1a4d82fc
JJ
164 unsafe {
165 llvm::LLVMBuildBr(self.llbuilder, dest);
166 }
167 }
168
a1dfa0c6
XL
169 fn cond_br(
170 &mut self,
b7449926
XL
171 cond: &'ll Value,
172 then_llbb: &'ll BasicBlock,
173 else_llbb: &'ll BasicBlock,
174 ) {
1a4d82fc
JJ
175 unsafe {
176 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
177 }
178 }
179
a1dfa0c6
XL
180 fn switch(
181 &mut self,
b7449926
XL
182 v: &'ll Value,
183 else_llbb: &'ll BasicBlock,
1b1a35ee 184 cases: impl ExactSizeIterator<Item = (u128, &'ll BasicBlock)>,
532ac7d7 185 ) {
dfeec247
XL
186 let switch =
187 unsafe { llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint) };
532ac7d7
XL
188 for (on_val, dest) in cases {
189 let on_val = self.const_uint_big(self.val_ty(v), on_val);
dfeec247 190 unsafe { llvm::LLVMAddCase(switch, on_val, dest) }
1a4d82fc
JJ
191 }
192 }
193
a1dfa0c6
XL
194 fn invoke(
195 &mut self,
196 llfn: &'ll Value,
197 args: &[&'ll Value],
198 then: &'ll BasicBlock,
199 catch: &'ll BasicBlock,
200 funclet: Option<&Funclet<'ll>>,
201 ) -> &'ll Value {
dfeec247 202 debug!("invoke {:?} with args ({:?})", llfn, args);
1a4d82fc 203
1bb2cb6e 204 let args = self.check_call("invoke", llfn, args);
a1dfa0c6
XL
205 let bundle = funclet.map(|funclet| funclet.bundle());
206 let bundle = bundle.as_ref().map(|b| &*b.raw);
7453a54e 207
1a4d82fc 208 unsafe {
dfeec247
XL
209 llvm::LLVMRustBuildInvoke(
210 self.llbuilder,
211 llfn,
212 args.as_ptr(),
213 args.len() as c_uint,
214 then,
215 catch,
216 bundle,
217 UNNAMED,
218 )
1a4d82fc
JJ
219 }
220 }
221
a1dfa0c6 222 fn unreachable(&mut self) {
1a4d82fc
JJ
223 unsafe {
224 llvm::LLVMBuildUnreachable(self.llbuilder);
225 }
226 }
227
dc9dc135
XL
228 builder_methods_for_value_instructions! {
229 add(a, b) => LLVMBuildAdd,
230 fadd(a, b) => LLVMBuildFAdd,
231 sub(a, b) => LLVMBuildSub,
232 fsub(a, b) => LLVMBuildFSub,
233 mul(a, b) => LLVMBuildMul,
234 fmul(a, b) => LLVMBuildFMul,
235 udiv(a, b) => LLVMBuildUDiv,
236 exactudiv(a, b) => LLVMBuildExactUDiv,
237 sdiv(a, b) => LLVMBuildSDiv,
238 exactsdiv(a, b) => LLVMBuildExactSDiv,
239 fdiv(a, b) => LLVMBuildFDiv,
240 urem(a, b) => LLVMBuildURem,
241 srem(a, b) => LLVMBuildSRem,
242 frem(a, b) => LLVMBuildFRem,
243 shl(a, b) => LLVMBuildShl,
244 lshr(a, b) => LLVMBuildLShr,
245 ashr(a, b) => LLVMBuildAShr,
246 and(a, b) => LLVMBuildAnd,
247 or(a, b) => LLVMBuildOr,
248 xor(a, b) => LLVMBuildXor,
249 neg(x) => LLVMBuildNeg,
250 fneg(x) => LLVMBuildFNeg,
251 not(x) => LLVMBuildNot,
252 unchecked_sadd(x, y) => LLVMBuildNSWAdd,
253 unchecked_uadd(x, y) => LLVMBuildNUWAdd,
254 unchecked_ssub(x, y) => LLVMBuildNSWSub,
255 unchecked_usub(x, y) => LLVMBuildNUWSub,
256 unchecked_smul(x, y) => LLVMBuildNSWMul,
257 unchecked_umul(x, y) => LLVMBuildNUWMul,
1a4d82fc
JJ
258 }
259
a1dfa0c6 260 fn fadd_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 261 unsafe {
dc9dc135 262 let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
263 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
264 instr
265 }
266 }
267
a1dfa0c6 268 fn fsub_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 269 unsafe {
dc9dc135 270 let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
271 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
272 instr
273 }
274 }
275
a1dfa0c6 276 fn fmul_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 277 unsafe {
dc9dc135 278 let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
279 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
280 instr
281 }
282 }
283
a1dfa0c6 284 fn fdiv_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 285 unsafe {
dc9dc135 286 let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
287 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
288 instr
289 }
290 }
291
a1dfa0c6 292 fn frem_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 293 unsafe {
dc9dc135 294 let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
295 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
296 instr
297 }
298 }
299
a1dfa0c6
XL
300 fn checked_binop(
301 &mut self,
302 oop: OverflowOp,
9fa01778 303 ty: Ty<'_>,
a1dfa0c6
XL
304 lhs: Self::Value,
305 rhs: Self::Value,
306 ) -> (Self::Value, Self::Value) {
3dfed10e
XL
307 use rustc_ast::IntTy::*;
308 use rustc_ast::UintTy::*;
ba9703b0 309 use rustc_middle::ty::{Int, Uint};
a1dfa0c6 310
1b1a35ee 311 let new_kind = match ty.kind() {
29967ef6
XL
312 Int(t @ Isize) => Int(t.normalize(self.tcx.sess.target.pointer_width)),
313 Uint(t @ Usize) => Uint(t.normalize(self.tcx.sess.target.pointer_width)),
1b1a35ee 314 t @ (Uint(_) | Int(_)) => t.clone(),
dfeec247 315 _ => panic!("tried to get overflow intrinsic for op applied to non-int type"),
a1dfa0c6
XL
316 };
317
318 let name = match oop {
e74abb32 319 OverflowOp::Add => match new_kind {
a1dfa0c6
XL
320 Int(I8) => "llvm.sadd.with.overflow.i8",
321 Int(I16) => "llvm.sadd.with.overflow.i16",
322 Int(I32) => "llvm.sadd.with.overflow.i32",
323 Int(I64) => "llvm.sadd.with.overflow.i64",
324 Int(I128) => "llvm.sadd.with.overflow.i128",
325
326 Uint(U8) => "llvm.uadd.with.overflow.i8",
327 Uint(U16) => "llvm.uadd.with.overflow.i16",
328 Uint(U32) => "llvm.uadd.with.overflow.i32",
329 Uint(U64) => "llvm.uadd.with.overflow.i64",
330 Uint(U128) => "llvm.uadd.with.overflow.i128",
331
332 _ => unreachable!(),
333 },
e74abb32 334 OverflowOp::Sub => match new_kind {
a1dfa0c6
XL
335 Int(I8) => "llvm.ssub.with.overflow.i8",
336 Int(I16) => "llvm.ssub.with.overflow.i16",
337 Int(I32) => "llvm.ssub.with.overflow.i32",
338 Int(I64) => "llvm.ssub.with.overflow.i64",
339 Int(I128) => "llvm.ssub.with.overflow.i128",
340
341 Uint(U8) => "llvm.usub.with.overflow.i8",
342 Uint(U16) => "llvm.usub.with.overflow.i16",
343 Uint(U32) => "llvm.usub.with.overflow.i32",
344 Uint(U64) => "llvm.usub.with.overflow.i64",
345 Uint(U128) => "llvm.usub.with.overflow.i128",
346
347 _ => unreachable!(),
348 },
e74abb32 349 OverflowOp::Mul => match new_kind {
a1dfa0c6
XL
350 Int(I8) => "llvm.smul.with.overflow.i8",
351 Int(I16) => "llvm.smul.with.overflow.i16",
352 Int(I32) => "llvm.smul.with.overflow.i32",
353 Int(I64) => "llvm.smul.with.overflow.i64",
354 Int(I128) => "llvm.smul.with.overflow.i128",
355
356 Uint(U8) => "llvm.umul.with.overflow.i8",
357 Uint(U16) => "llvm.umul.with.overflow.i16",
358 Uint(U32) => "llvm.umul.with.overflow.i32",
359 Uint(U64) => "llvm.umul.with.overflow.i64",
360 Uint(U128) => "llvm.umul.with.overflow.i128",
361
362 _ => unreachable!(),
363 },
364 };
365
366 let intrinsic = self.get_intrinsic(&name);
367 let res = self.call(intrinsic, &[lhs, rhs], None);
dfeec247 368 (self.extract_value(res, 0), self.extract_value(res, 1))
a1dfa0c6
XL
369 }
370
1b1a35ee
XL
371 fn from_immediate(&mut self, val: Self::Value) -> Self::Value {
372 if self.cx().val_ty(val) == self.cx().type_i1() {
373 self.zext(val, self.cx().type_i8())
374 } else {
375 val
376 }
377 }
378 fn to_immediate_scalar(&mut self, val: Self::Value, scalar: &abi::Scalar) -> Self::Value {
379 if scalar.is_bool() {
380 return self.trunc(val, self.cx().type_i1());
381 }
382 val
383 }
384
e1599b0c 385 fn alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value {
a1dfa0c6 386 let mut bx = Builder::with_cx(self.cx);
dfeec247 387 bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
e1599b0c 388 bx.dynamic_alloca(ty, align)
32a655c1
SL
389 }
390
e1599b0c 391 fn dynamic_alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value {
1a4d82fc 392 unsafe {
e1599b0c 393 let alloca = llvm::LLVMBuildAlloca(self.llbuilder, ty, UNNAMED);
a1dfa0c6 394 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
cc61c64b 395 alloca
1a4d82fc
JJ
396 }
397 }
398
dfeec247 399 fn array_alloca(&mut self, ty: &'ll Type, len: &'ll Value, align: Align) -> &'ll Value {
1a4d82fc 400 unsafe {
e1599b0c 401 let alloca = llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, UNNAMED);
a1dfa0c6 402 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
b7449926 403 alloca
1a4d82fc
JJ
404 }
405 }
406
a1dfa0c6 407 fn load(&mut self, ptr: &'ll Value, align: Align) -> &'ll Value {
1a4d82fc 408 unsafe {
dc9dc135 409 let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
a1dfa0c6 410 llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
32a655c1 411 load
1a4d82fc
JJ
412 }
413 }
414
a1dfa0c6 415 fn volatile_load(&mut self, ptr: &'ll Value) -> &'ll Value {
1a4d82fc 416 unsafe {
dc9dc135
XL
417 let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
418 llvm::LLVMSetVolatile(load, llvm::True);
419 load
1a4d82fc
JJ
420 }
421 }
422
a1dfa0c6
XL
423 fn atomic_load(
424 &mut self,
425 ptr: &'ll Value,
426 order: rustc_codegen_ssa::common::AtomicOrdering,
427 size: Size,
428 ) -> &'ll Value {
1a4d82fc 429 unsafe {
a1dfa0c6
XL
430 let load = llvm::LLVMRustBuildAtomicLoad(
431 self.llbuilder,
432 ptr,
dc9dc135 433 UNNAMED,
a1dfa0c6
XL
434 AtomicOrdering::from_generic(order),
435 );
436 // LLVM requires the alignment of atomic loads to be at least the size of the type.
437 llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
ff7c6d11 438 load
1a4d82fc
JJ
439 }
440 }
441
dfeec247 442 fn load_operand(&mut self, place: PlaceRef<'tcx, &'ll Value>) -> OperandRef<'tcx, &'ll Value> {
a1dfa0c6
XL
443 debug!("PlaceRef::load: {:?}", place);
444
445 assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
446
447 if place.layout.is_zst() {
532ac7d7 448 return OperandRef::new_zst(self, place.layout);
a1dfa0c6
XL
449 }
450
451 fn scalar_load_metadata<'a, 'll, 'tcx>(
452 bx: &mut Builder<'a, 'll, 'tcx>,
453 load: &'ll Value,
ba9703b0 454 scalar: &abi::Scalar,
a1dfa0c6
XL
455 ) {
456 let vr = scalar.valid_range.clone();
457 match scalar.value {
ba9703b0 458 abi::Int(..) => {
a1dfa0c6
XL
459 let range = scalar.valid_range_exclusive(bx);
460 if range.start != range.end {
461 bx.range_metadata(load, range);
462 }
463 }
ba9703b0 464 abi::Pointer if vr.start() < vr.end() && !vr.contains(&0) => {
a1dfa0c6
XL
465 bx.nonnull_metadata(load);
466 }
467 _ => {}
468 }
469 }
470
471 let val = if let Some(llextra) = place.llextra {
472 OperandValue::Ref(place.llval, Some(llextra), place.align)
473 } else if place.layout.is_llvm_immediate() {
474 let mut const_llval = None;
475 unsafe {
476 if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) {
477 if llvm::LLVMIsGlobalConstant(global) == llvm::True {
478 const_llval = llvm::LLVMGetInitializer(global);
479 }
480 }
481 }
482 let llval = const_llval.unwrap_or_else(|| {
483 let load = self.load(place.llval, place.align);
ba9703b0 484 if let abi::Abi::Scalar(ref scalar) = place.layout.abi {
a1dfa0c6
XL
485 scalar_load_metadata(self, load, scalar);
486 }
487 load
488 });
1b1a35ee 489 OperandValue::Immediate(self.to_immediate(llval, place.layout))
ba9703b0 490 } else if let abi::Abi::ScalarPair(ref a, ref b) = place.layout.abi {
a1dfa0c6
XL
491 let b_offset = a.value.size(self).align_to(b.value.align(self).abi);
492
ba9703b0 493 let mut load = |i, scalar: &abi::Scalar, align| {
a1dfa0c6
XL
494 let llptr = self.struct_gep(place.llval, i as u64);
495 let load = self.load(llptr, align);
496 scalar_load_metadata(self, load, scalar);
1b1a35ee 497 self.to_immediate_scalar(load, scalar)
a1dfa0c6
XL
498 };
499
500 OperandValue::Pair(
501 load(0, a, place.align),
502 load(1, b, place.align.restrict_for_offset(b_offset)),
503 )
504 } else {
505 OperandValue::Ref(place.llval, None, place.align)
506 };
507
508 OperandRef { val, layout: place.layout }
509 }
510
532ac7d7
XL
511 fn write_operand_repeatedly(
512 mut self,
513 cg_elem: OperandRef<'tcx, &'ll Value>,
514 count: u64,
515 dest: PlaceRef<'tcx, &'ll Value>,
516 ) -> Self {
517 let zero = self.const_usize(0);
518 let count = self.const_usize(count);
519 let start = dest.project_index(&mut self, zero).llval;
520 let end = dest.project_index(&mut self, count).llval;
521
522 let mut header_bx = self.build_sibling_block("repeat_loop_header");
523 let mut body_bx = self.build_sibling_block("repeat_loop_body");
524 let next_bx = self.build_sibling_block("repeat_loop_next");
525
526 self.br(header_bx.llbb());
527 let current = header_bx.phi(self.val_ty(start), &[start], &[self.llbb()]);
528
529 let keep_going = header_bx.icmp(IntPredicate::IntNE, current, end);
530 header_bx.cond_br(keep_going, body_bx.llbb(), next_bx.llbb());
531
532 let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
dfeec247
XL
533 cg_elem
534 .val
535 .store(&mut body_bx, PlaceRef::new_sized_aligned(current, cg_elem.layout, align));
a1dfa0c6 536
532ac7d7
XL
537 let next = body_bx.inbounds_gep(current, &[self.const_usize(1)]);
538 body_bx.br(header_bx.llbb());
539 header_bx.add_incoming_to_phi(current, next, body_bx.llbb());
540
541 next_bx
542 }
1a4d82fc 543
a1dfa0c6 544 fn range_metadata(&mut self, load: &'ll Value, range: Range<u128>) {
29967ef6 545 if self.sess().target.arch == "amdgpu" {
b7449926
XL
546 // amdgpu/LLVM does something weird and thinks a i64 value is
547 // split into a v2i32, halving the bitwidth LLVM expects,
548 // tripping an assertion. So, for now, just disable this
549 // optimization.
550 return;
551 }
552
1a4d82fc 553 unsafe {
a1dfa0c6 554 let llty = self.cx.val_ty(load);
ff7c6d11 555 let v = [
a1dfa0c6 556 self.cx.const_uint_big(llty, range.start),
dfeec247 557 self.cx.const_uint_big(llty, range.end),
ff7c6d11 558 ];
1a4d82fc 559
dfeec247
XL
560 llvm::LLVMSetMetadata(
561 load,
562 llvm::MD_range as c_uint,
563 llvm::LLVMMDNodeInContext(self.cx.llcx, v.as_ptr(), v.len() as c_uint),
564 );
1a4d82fc 565 }
1a4d82fc
JJ
566 }
567
a1dfa0c6 568 fn nonnull_metadata(&mut self, load: &'ll Value) {
85aaf69f 569 unsafe {
dfeec247
XL
570 llvm::LLVMSetMetadata(
571 load,
572 llvm::MD_nonnull as c_uint,
573 llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0),
574 );
85aaf69f 575 }
85aaf69f
SL
576 }
577
a1dfa0c6 578 fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
83c7162d
XL
579 self.store_with_flags(val, ptr, align, MemFlags::empty())
580 }
581
a1dfa0c6
XL
582 fn store_with_flags(
583 &mut self,
b7449926
XL
584 val: &'ll Value,
585 ptr: &'ll Value,
83c7162d
XL
586 align: Align,
587 flags: MemFlags,
b7449926
XL
588 ) -> &'ll Value {
589 debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
1bb2cb6e 590 let ptr = self.check_store(val, ptr);
1a4d82fc 591 unsafe {
32a655c1 592 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
dfeec247
XL
593 let align =
594 if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint };
8faf50e0 595 llvm::LLVMSetAlignment(store, align);
83c7162d
XL
596 if flags.contains(MemFlags::VOLATILE) {
597 llvm::LLVMSetVolatile(store, llvm::True);
598 }
599 if flags.contains(MemFlags::NONTEMPORAL) {
600 // According to LLVM [1] building a nontemporal store must
601 // *always* point to a metadata value of the integer 1.
602 //
603 // [1]: http://llvm.org/docs/LangRef.html#store-instruction
a1dfa0c6 604 let one = self.cx.const_i32(1);
83c7162d
XL
605 let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
606 llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
607 }
32a655c1 608 store
1a4d82fc
JJ
609 }
610 }
611
dfeec247
XL
612 fn atomic_store(
613 &mut self,
614 val: &'ll Value,
615 ptr: &'ll Value,
616 order: rustc_codegen_ssa::common::AtomicOrdering,
617 size: Size,
618 ) {
b7449926 619 debug!("Store {:?} -> {:?}", val, ptr);
1bb2cb6e 620 let ptr = self.check_store(val, ptr);
1a4d82fc 621 unsafe {
a1dfa0c6
XL
622 let store = llvm::LLVMRustBuildAtomicStore(
623 self.llbuilder,
624 val,
625 ptr,
626 AtomicOrdering::from_generic(order),
627 );
628 // LLVM requires the alignment of atomic stores to be at least the size of the type.
629 llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
ff7c6d11
XL
630 }
631 }
632
a1dfa0c6 633 fn gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
1a4d82fc 634 unsafe {
dfeec247
XL
635 llvm::LLVMBuildGEP(
636 self.llbuilder,
637 ptr,
638 indices.as_ptr(),
639 indices.len() as c_uint,
640 UNNAMED,
641 )
1a4d82fc
JJ
642 }
643 }
644
a1dfa0c6 645 fn inbounds_gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
1a4d82fc
JJ
646 unsafe {
647 llvm::LLVMBuildInBoundsGEP(
dfeec247
XL
648 self.llbuilder,
649 ptr,
650 indices.as_ptr(),
651 indices.len() as c_uint,
652 UNNAMED,
653 )
1a4d82fc
JJ
654 }
655 }
656
532ac7d7 657 fn struct_gep(&mut self, ptr: &'ll Value, idx: u64) -> &'ll Value {
532ac7d7 658 assert_eq!(idx as c_uint as u64, idx);
dfeec247 659 unsafe { llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, UNNAMED) }
532ac7d7
XL
660 }
661
1a4d82fc 662 /* Casts */
a1dfa0c6 663 fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 664 unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
665 }
666
a1dfa0c6 667 fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 668 unsafe { llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
669 }
670
f035d41b
XL
671 fn fptoui_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> Option<&'ll Value> {
672 // WebAssembly has saturating floating point to integer casts if the
673 // `nontrapping-fptoint` target feature is activated. We'll use those if
674 // they are available.
29967ef6 675 if self.sess().target.arch == "wasm32"
3dfed10e 676 && self.sess().target_features.contains(&sym::nontrapping_dash_fptoint)
f035d41b
XL
677 {
678 let src_ty = self.cx.val_ty(val);
679 let float_width = self.cx.float_width(src_ty);
680 let int_width = self.cx.int_width(dest_ty);
681 let name = match (int_width, float_width) {
682 (32, 32) => Some("llvm.wasm.trunc.saturate.unsigned.i32.f32"),
683 (32, 64) => Some("llvm.wasm.trunc.saturate.unsigned.i32.f64"),
684 (64, 32) => Some("llvm.wasm.trunc.saturate.unsigned.i64.f32"),
685 (64, 64) => Some("llvm.wasm.trunc.saturate.unsigned.i64.f64"),
686 _ => None,
687 };
688 if let Some(name) = name {
689 let intrinsic = self.get_intrinsic(name);
690 return Some(self.call(intrinsic, &[val], None));
691 }
692 }
693 None
694 }
695
696 fn fptosi_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> Option<&'ll Value> {
697 // WebAssembly has saturating floating point to integer casts if the
698 // `nontrapping-fptoint` target feature is activated. We'll use those if
699 // they are available.
29967ef6 700 if self.sess().target.arch == "wasm32"
3dfed10e 701 && self.sess().target_features.contains(&sym::nontrapping_dash_fptoint)
f035d41b
XL
702 {
703 let src_ty = self.cx.val_ty(val);
704 let float_width = self.cx.float_width(src_ty);
705 let int_width = self.cx.int_width(dest_ty);
706 let name = match (int_width, float_width) {
707 (32, 32) => Some("llvm.wasm.trunc.saturate.signed.i32.f32"),
708 (32, 64) => Some("llvm.wasm.trunc.saturate.signed.i32.f64"),
709 (64, 32) => Some("llvm.wasm.trunc.saturate.signed.i64.f32"),
710 (64, 64) => Some("llvm.wasm.trunc.saturate.signed.i64.f64"),
711 _ => None,
712 };
713 if let Some(name) = name {
714 let intrinsic = self.get_intrinsic(name);
715 return Some(self.call(intrinsic, &[val], None));
716 }
717 }
718 None
719 }
720
3dfed10e
XL
721 fn fptosui_may_trap(&self, val: &'ll Value, dest_ty: &'ll Type) -> bool {
722 // Most of the time we'll be generating the `fptosi` or `fptoui`
723 // instruction for floating-point-to-integer conversions. These
724 // instructions by definition in LLVM do not trap. For the WebAssembly
725 // target, however, we'll lower in some cases to intrinsic calls instead
726 // which may trap. If we detect that this is a situation where we'll be
727 // using the intrinsics then we report that the call map trap, which
728 // callers might need to handle.
729 if !self.wasm_and_missing_nontrapping_fptoint() {
730 return false;
731 }
732 let src_ty = self.cx.val_ty(val);
733 let float_width = self.cx.float_width(src_ty);
734 let int_width = self.cx.int_width(dest_ty);
29967ef6 735 matches!((int_width, float_width), (32, 32) | (32, 64) | (64, 32) | (64, 64))
3dfed10e
XL
736 }
737
a1dfa0c6 738 fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
3dfed10e
XL
739 // When we can, use the native wasm intrinsics which have tighter
740 // codegen. Note that this has a semantic difference in that the
741 // intrinsic can trap whereas `fptoui` never traps. That difference,
742 // however, is handled by `fptosui_may_trap` above.
743 //
744 // Note that we skip the wasm intrinsics for vector types where `fptoui`
745 // must be used instead.
746 if self.wasm_and_missing_nontrapping_fptoint() {
747 let src_ty = self.cx.val_ty(val);
748 if self.cx.type_kind(src_ty) != TypeKind::Vector {
749 let float_width = self.cx.float_width(src_ty);
750 let int_width = self.cx.int_width(dest_ty);
751 let name = match (int_width, float_width) {
752 (32, 32) => Some("llvm.wasm.trunc.unsigned.i32.f32"),
753 (32, 64) => Some("llvm.wasm.trunc.unsigned.i32.f64"),
754 (64, 32) => Some("llvm.wasm.trunc.unsigned.i64.f32"),
755 (64, 64) => Some("llvm.wasm.trunc.unsigned.i64.f64"),
756 _ => None,
757 };
758 if let Some(name) = name {
759 let intrinsic = self.get_intrinsic(name);
760 return self.call(intrinsic, &[val], None);
761 }
762 }
763 }
dfeec247 764 unsafe { llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
765 }
766
a1dfa0c6 767 fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
3dfed10e
XL
768 if self.wasm_and_missing_nontrapping_fptoint() {
769 let src_ty = self.cx.val_ty(val);
770 if self.cx.type_kind(src_ty) != TypeKind::Vector {
771 let float_width = self.cx.float_width(src_ty);
772 let int_width = self.cx.int_width(dest_ty);
773 let name = match (int_width, float_width) {
774 (32, 32) => Some("llvm.wasm.trunc.signed.i32.f32"),
775 (32, 64) => Some("llvm.wasm.trunc.signed.i32.f64"),
776 (64, 32) => Some("llvm.wasm.trunc.signed.i64.f32"),
777 (64, 64) => Some("llvm.wasm.trunc.signed.i64.f64"),
778 _ => None,
779 };
780 if let Some(name) = name {
781 let intrinsic = self.get_intrinsic(name);
782 return self.call(intrinsic, &[val], None);
783 }
784 }
785 }
dfeec247 786 unsafe { llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
787 }
788
a1dfa0c6 789 fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 790 unsafe { llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
791 }
792
a1dfa0c6 793 fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 794 unsafe { llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
795 }
796
a1dfa0c6 797 fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 798 unsafe { llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
799 }
800
a1dfa0c6 801 fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 802 unsafe { llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
803 }
804
a1dfa0c6 805 fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 806 unsafe { llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
807 }
808
a1dfa0c6 809 fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 810 unsafe { llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
811 }
812
a1dfa0c6 813 fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 814 unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
815 }
816
a1dfa0c6 817 fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
dfeec247 818 unsafe { llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed) }
1a4d82fc
JJ
819 }
820
a1dfa0c6 821 fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 822 unsafe { llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
823 }
824
1a4d82fc 825 /* Comparisons */
a1dfa0c6 826 fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
a1dfa0c6 827 let op = llvm::IntPredicate::from_generic(op);
dfeec247 828 unsafe { llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1a4d82fc
JJ
829 }
830
a1dfa0c6 831 fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
dfeec247 832 unsafe { llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1a4d82fc
JJ
833 }
834
835 /* Miscellaneous instructions */
dfeec247
XL
836 fn memcpy(
837 &mut self,
838 dst: &'ll Value,
839 dst_align: Align,
840 src: &'ll Value,
841 src_align: Align,
842 size: &'ll Value,
843 flags: MemFlags,
844 ) {
a1dfa0c6
XL
845 if flags.contains(MemFlags::NONTEMPORAL) {
846 // HACK(nox): This is inefficient but there is no nontemporal memcpy.
847 let val = self.load(src, src_align);
848 let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
849 self.store_with_flags(val, ptr, dst_align, flags);
850 return;
851 }
852 let size = self.intcast(size, self.type_isize(), false);
853 let is_volatile = flags.contains(MemFlags::VOLATILE);
854 let dst = self.pointercast(dst, self.type_i8p());
855 let src = self.pointercast(src, self.type_i8p());
856 unsafe {
dfeec247
XL
857 llvm::LLVMRustBuildMemCpy(
858 self.llbuilder,
859 dst,
860 dst_align.bytes() as c_uint,
861 src,
862 src_align.bytes() as c_uint,
863 size,
864 is_volatile,
865 );
a1dfa0c6
XL
866 }
867 }
7453a54e 868
dfeec247
XL
869 fn memmove(
870 &mut self,
871 dst: &'ll Value,
872 dst_align: Align,
873 src: &'ll Value,
874 src_align: Align,
875 size: &'ll Value,
876 flags: MemFlags,
877 ) {
a1dfa0c6
XL
878 if flags.contains(MemFlags::NONTEMPORAL) {
879 // HACK(nox): This is inefficient but there is no nontemporal memmove.
880 let val = self.load(src, src_align);
881 let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
882 self.store_with_flags(val, ptr, dst_align, flags);
883 return;
884 }
885 let size = self.intcast(size, self.type_isize(), false);
886 let is_volatile = flags.contains(MemFlags::VOLATILE);
887 let dst = self.pointercast(dst, self.type_i8p());
888 let src = self.pointercast(src, self.type_i8p());
1a4d82fc 889 unsafe {
dfeec247
XL
890 llvm::LLVMRustBuildMemMove(
891 self.llbuilder,
892 dst,
893 dst_align.bytes() as c_uint,
894 src,
895 src_align.bytes() as c_uint,
896 size,
897 is_volatile,
898 );
1a4d82fc
JJ
899 }
900 }
901
a1dfa0c6
XL
902 fn memset(
903 &mut self,
904 ptr: &'ll Value,
905 fill_byte: &'ll Value,
906 size: &'ll Value,
907 align: Align,
908 flags: MemFlags,
909 ) {
74b04a01 910 let is_volatile = flags.contains(MemFlags::VOLATILE);
a1dfa0c6 911 let ptr = self.pointercast(ptr, self.type_i8p());
74b04a01
XL
912 unsafe {
913 llvm::LLVMRustBuildMemSet(
914 self.llbuilder,
915 ptr,
916 align.bytes() as c_uint,
917 fill_byte,
918 size,
919 is_volatile,
920 );
921 }
a1dfa0c6
XL
922 }
923
a1dfa0c6 924 fn select(
dfeec247
XL
925 &mut self,
926 cond: &'ll Value,
b7449926
XL
927 then_val: &'ll Value,
928 else_val: &'ll Value,
929 ) -> &'ll Value {
dfeec247 930 unsafe { llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, UNNAMED) }
1a4d82fc
JJ
931 }
932
a1dfa0c6 933 fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
dfeec247 934 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
1a4d82fc
JJ
935 }
936
a1dfa0c6 937 fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
dfeec247 938 unsafe { llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, UNNAMED) }
1a4d82fc
JJ
939 }
940
a1dfa0c6 941 fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
1a4d82fc 942 unsafe {
a1dfa0c6
XL
943 let elt_ty = self.cx.val_ty(elt);
944 let undef = llvm::LLVMGetUndef(self.type_vector(elt_ty, num_elts as u64));
945 let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
946 let vec_i32_ty = self.type_vector(self.type_i32(), num_elts as u64);
947 self.shuffle_vector(vec, undef, self.const_null(vec_i32_ty))
1a4d82fc
JJ
948 }
949 }
950
a1dfa0c6 951 fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
ff7c6d11 952 assert_eq!(idx as c_uint as u64, idx);
dfeec247 953 unsafe { llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, UNNAMED) }
1a4d82fc
JJ
954 }
955
dfeec247 956 fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value, idx: u64) -> &'ll Value {
ff7c6d11 957 assert_eq!(idx as c_uint as u64, idx);
dfeec247 958 unsafe { llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint, UNNAMED) }
1a4d82fc
JJ
959 }
960
dfeec247
XL
961 fn landing_pad(
962 &mut self,
963 ty: &'ll Type,
964 pers_fn: &'ll Value,
965 num_clauses: usize,
966 ) -> &'ll Value {
1a4d82fc 967 unsafe {
dfeec247 968 llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn, num_clauses as c_uint, UNNAMED)
c1a9b12d
SL
969 }
970 }
971
a1dfa0c6 972 fn set_cleanup(&mut self, landing_pad: &'ll Value) {
1a4d82fc
JJ
973 unsafe {
974 llvm::LLVMSetCleanup(landing_pad, llvm::True);
975 }
976 }
977
a1dfa0c6 978 fn resume(&mut self, exn: &'ll Value) -> &'ll Value {
dfeec247 979 unsafe { llvm::LLVMBuildResume(self.llbuilder, exn) }
1a4d82fc
JJ
980 }
981
dfeec247 982 fn cleanup_pad(&mut self, parent: Option<&'ll Value>, args: &[&'ll Value]) -> Funclet<'ll> {
b7449926 983 let name = const_cstr!("cleanuppad");
7453a54e 984 let ret = unsafe {
dfeec247
XL
985 llvm::LLVMRustBuildCleanupPad(
986 self.llbuilder,
987 parent,
988 args.len() as c_uint,
989 args.as_ptr(),
990 name.as_ptr(),
991 )
7453a54e 992 };
a1dfa0c6 993 Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
7453a54e
SL
994 }
995
a1dfa0c6 996 fn cleanup_ret(
dfeec247
XL
997 &mut self,
998 funclet: &Funclet<'ll>,
b7449926
XL
999 unwind: Option<&'ll BasicBlock>,
1000 ) -> &'ll Value {
dfeec247
XL
1001 let ret =
1002 unsafe { llvm::LLVMRustBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind) };
b7449926 1003 ret.expect("LLVM does not have support for cleanupret")
7453a54e
SL
1004 }
1005
dfeec247 1006 fn catch_pad(&mut self, parent: &'ll Value, args: &[&'ll Value]) -> Funclet<'ll> {
b7449926 1007 let name = const_cstr!("catchpad");
7453a54e 1008 let ret = unsafe {
dfeec247
XL
1009 llvm::LLVMRustBuildCatchPad(
1010 self.llbuilder,
1011 parent,
1012 args.len() as c_uint,
1013 args.as_ptr(),
1014 name.as_ptr(),
1015 )
7453a54e 1016 };
a1dfa0c6 1017 Funclet::new(ret.expect("LLVM does not have support for catchpad"))
7453a54e
SL
1018 }
1019
a1dfa0c6
XL
1020 fn catch_switch(
1021 &mut self,
b7449926
XL
1022 parent: Option<&'ll Value>,
1023 unwind: Option<&'ll BasicBlock>,
1024 num_handlers: usize,
1025 ) -> &'ll Value {
b7449926 1026 let name = const_cstr!("catchswitch");
7453a54e 1027 let ret = unsafe {
dfeec247
XL
1028 llvm::LLVMRustBuildCatchSwitch(
1029 self.llbuilder,
1030 parent,
1031 unwind,
1032 num_handlers as c_uint,
1033 name.as_ptr(),
1034 )
7453a54e 1035 };
b7449926 1036 ret.expect("LLVM does not have support for catchswitch")
7453a54e
SL
1037 }
1038
a1dfa0c6 1039 fn add_handler(&mut self, catch_switch: &'ll Value, handler: &'ll BasicBlock) {
7453a54e
SL
1040 unsafe {
1041 llvm::LLVMRustAddHandler(catch_switch, handler);
1042 }
1043 }
1044
a1dfa0c6 1045 fn set_personality_fn(&mut self, personality: &'ll Value) {
7453a54e 1046 unsafe {
8bb4bdeb 1047 llvm::LLVMSetPersonalityFn(self.llfn(), personality);
7453a54e
SL
1048 }
1049 }
1050
1a4d82fc 1051 // Atomic Operations
a1dfa0c6
XL
1052 fn atomic_cmpxchg(
1053 &mut self,
b7449926
XL
1054 dst: &'ll Value,
1055 cmp: &'ll Value,
1056 src: &'ll Value,
a1dfa0c6
XL
1057 order: rustc_codegen_ssa::common::AtomicOrdering,
1058 failure_order: rustc_codegen_ssa::common::AtomicOrdering,
1059 weak: bool,
b7449926 1060 ) -> &'ll Value {
a1dfa0c6
XL
1061 let weak = if weak { llvm::True } else { llvm::False };
1062 unsafe {
1063 llvm::LLVMRustBuildAtomicCmpXchg(
1064 self.llbuilder,
1065 dst,
1066 cmp,
1067 src,
1068 AtomicOrdering::from_generic(order),
1069 AtomicOrdering::from_generic(failure_order),
dfeec247 1070 weak,
a1dfa0c6 1071 )
1a4d82fc
JJ
1072 }
1073 }
a1dfa0c6
XL
1074 fn atomic_rmw(
1075 &mut self,
1076 op: rustc_codegen_ssa::common::AtomicRmwBinOp,
b7449926
XL
1077 dst: &'ll Value,
1078 src: &'ll Value,
a1dfa0c6 1079 order: rustc_codegen_ssa::common::AtomicOrdering,
b7449926 1080 ) -> &'ll Value {
1a4d82fc 1081 unsafe {
a1dfa0c6
XL
1082 llvm::LLVMBuildAtomicRMW(
1083 self.llbuilder,
1084 AtomicRmwBinOp::from_generic(op),
1085 dst,
1086 src,
1087 AtomicOrdering::from_generic(order),
dfeec247
XL
1088 False,
1089 )
1a4d82fc
JJ
1090 }
1091 }
1092
a1dfa0c6
XL
1093 fn atomic_fence(
1094 &mut self,
1095 order: rustc_codegen_ssa::common::AtomicOrdering,
dfeec247 1096 scope: rustc_codegen_ssa::common::SynchronizationScope,
a1dfa0c6 1097 ) {
1a4d82fc 1098 unsafe {
a1dfa0c6
XL
1099 llvm::LLVMRustBuildAtomicFence(
1100 self.llbuilder,
1101 AtomicOrdering::from_generic(order),
dfeec247 1102 SynchronizationScope::from_generic(scope),
a1dfa0c6 1103 );
1a4d82fc
JJ
1104 }
1105 }
a7813a04 1106
532ac7d7 1107 fn set_invariant_load(&mut self, load: &'ll Value) {
32a655c1 1108 unsafe {
dfeec247
XL
1109 llvm::LLVMSetMetadata(
1110 load,
1111 llvm::MD_invariant_load as c_uint,
1112 llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0),
1113 );
32a655c1
SL
1114 }
1115 }
1116
532ac7d7 1117 fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) {
74b04a01 1118 self.call_lifetime_intrinsic("llvm.lifetime.start.p0i8", ptr, size);
532ac7d7
XL
1119 }
1120
1121 fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) {
74b04a01 1122 self.call_lifetime_intrinsic("llvm.lifetime.end.p0i8", ptr, size);
532ac7d7
XL
1123 }
1124
f035d41b
XL
1125 fn instrprof_increment(
1126 &mut self,
1127 fn_name: &'ll Value,
1128 hash: &'ll Value,
1129 num_counters: &'ll Value,
1130 index: &'ll Value,
3dfed10e 1131 ) {
f035d41b
XL
1132 debug!(
1133 "instrprof_increment() with args ({:?}, {:?}, {:?}, {:?})",
1134 fn_name, hash, num_counters, index
1135 );
1136
3dfed10e 1137 let llfn = unsafe { llvm::LLVMRustGetInstrProfIncrementIntrinsic(self.cx().llmod) };
f035d41b
XL
1138 let args = &[fn_name, hash, num_counters, index];
1139 let args = self.check_call("call", llfn, args);
1140
1141 unsafe {
3dfed10e 1142 let _ = llvm::LLVMRustBuildCall(
f035d41b
XL
1143 self.llbuilder,
1144 llfn,
1145 args.as_ptr() as *const &llvm::Value,
1146 args.len() as c_uint,
1147 None,
3dfed10e 1148 );
f035d41b
XL
1149 }
1150 }
1151
532ac7d7
XL
1152 fn call(
1153 &mut self,
1154 llfn: &'ll Value,
1155 args: &[&'ll Value],
1156 funclet: Option<&Funclet<'ll>>,
1157 ) -> &'ll Value {
dfeec247 1158 debug!("call {:?} with args ({:?})", llfn, args);
532ac7d7
XL
1159
1160 let args = self.check_call("call", llfn, args);
1161 let bundle = funclet.map(|funclet| funclet.bundle());
1162 let bundle = bundle.as_ref().map(|b| &*b.raw);
1163
32a655c1 1164 unsafe {
532ac7d7
XL
1165 llvm::LLVMRustBuildCall(
1166 self.llbuilder,
1167 llfn,
1168 args.as_ptr() as *const &llvm::Value,
1169 args.len() as c_uint,
dfeec247 1170 bundle,
532ac7d7 1171 )
32a655c1
SL
1172 }
1173 }
1174
532ac7d7 1175 fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 1176 unsafe { llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED) }
8bb4bdeb
XL
1177 }
1178
532ac7d7
XL
1179 fn cx(&self) -> &CodegenCx<'ll, 'tcx> {
1180 self.cx
1181 }
1182
1183 unsafe fn delete_basic_block(&mut self, bb: &'ll BasicBlock) {
1184 llvm::LLVMDeleteBasicBlock(bb);
1185 }
1186
1187 fn do_not_inline(&mut self, llret: &'ll Value) {
1188 llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
1189 }
1190}
1191
dc9dc135
XL
1192impl StaticBuilderMethods for Builder<'a, 'll, 'tcx> {
1193 fn get_static(&mut self, def_id: DefId) -> &'ll Value {
532ac7d7
XL
1194 // Forward to the `get_static` method of `CodegenCx`
1195 self.cx().get_static(def_id)
1196 }
532ac7d7
XL
1197}
1198
1199impl Builder<'a, 'll, 'tcx> {
1200 pub fn llfn(&self) -> &'ll Value {
dfeec247 1201 unsafe { llvm::LLVMGetBasicBlockParent(self.llbb()) }
532ac7d7
XL
1202 }
1203
532ac7d7
XL
1204 fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
1205 unsafe {
1206 llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
1207 }
1208 }
1209
1210 pub fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
532ac7d7
XL
1211 unsafe { llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs) }
1212 }
1213
1214 pub fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
532ac7d7
XL
1215 unsafe { llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs) }
1216 }
1217
1218 pub fn insert_element(
dfeec247
XL
1219 &mut self,
1220 vec: &'ll Value,
532ac7d7
XL
1221 elt: &'ll Value,
1222 idx: &'ll Value,
1223 ) -> &'ll Value {
dfeec247 1224 unsafe { llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, UNNAMED) }
532ac7d7
XL
1225 }
1226
1227 pub fn shuffle_vector(
1228 &mut self,
1229 v1: &'ll Value,
1230 v2: &'ll Value,
1231 mask: &'ll Value,
1232 ) -> &'ll Value {
dfeec247 1233 unsafe { llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, UNNAMED) }
532ac7d7
XL
1234 }
1235
416331ca
XL
1236 pub fn vector_reduce_fadd(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1237 unsafe { llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src) }
1238 }
1239 pub fn vector_reduce_fmul(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1240 unsafe { llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src) }
1241 }
532ac7d7 1242 pub fn vector_reduce_fadd_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
532ac7d7 1243 unsafe {
532ac7d7
XL
1244 let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
1245 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1246 instr
1247 }
1248 }
1249 pub fn vector_reduce_fmul_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
532ac7d7 1250 unsafe {
532ac7d7
XL
1251 let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
1252 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1253 instr
1254 }
1255 }
1256 pub fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1257 unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
1258 }
1259 pub fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1260 unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
1261 }
1262 pub fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1263 unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
1264 }
1265 pub fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1266 unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
1267 }
1268 pub fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1269 unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
1270 }
1271 pub fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
dfeec247
XL
1272 unsafe {
1273 llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false)
1274 }
532ac7d7
XL
1275 }
1276 pub fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
dfeec247
XL
1277 unsafe {
1278 llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false)
1279 }
532ac7d7
XL
1280 }
1281 pub fn vector_reduce_fmin_fast(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7 1282 unsafe {
dfeec247
XL
1283 let instr =
1284 llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
532ac7d7
XL
1285 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1286 instr
1287 }
1288 }
1289 pub fn vector_reduce_fmax_fast(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7 1290 unsafe {
dfeec247
XL
1291 let instr =
1292 llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
532ac7d7
XL
1293 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1294 instr
1295 }
1296 }
1297 pub fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
532ac7d7
XL
1298 unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
1299 }
1300 pub fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
532ac7d7
XL
1301 unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
1302 }
1303
1304 pub fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) {
1305 unsafe {
1306 llvm::LLVMAddClause(landing_pad, clause);
1307 }
1308 }
1309
1310 pub fn catch_ret(&mut self, funclet: &Funclet<'ll>, unwind: &'ll BasicBlock) -> &'ll Value {
dfeec247
XL
1311 let ret =
1312 unsafe { llvm::LLVMRustBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind) };
532ac7d7
XL
1313 ret.expect("LLVM does not have support for catchret")
1314 }
1315
dc9dc135 1316 fn check_store(&mut self, val: &'ll Value, ptr: &'ll Value) -> &'ll Value {
a1dfa0c6
XL
1317 let dest_ptr_ty = self.cx.val_ty(ptr);
1318 let stored_ty = self.cx.val_ty(val);
1319 let stored_ptr_ty = self.cx.type_ptr_to(stored_ty);
1bb2cb6e 1320
a1dfa0c6 1321 assert_eq!(self.cx.type_kind(dest_ptr_ty), TypeKind::Pointer);
1bb2cb6e
SL
1322
1323 if dest_ptr_ty == stored_ptr_ty {
1324 ptr
1325 } else {
dfeec247
XL
1326 debug!(
1327 "type mismatch in store. \
1bb2cb6e 1328 Expected {:?}, got {:?}; inserting bitcast",
dfeec247
XL
1329 dest_ptr_ty, stored_ptr_ty
1330 );
1bb2cb6e
SL
1331 self.bitcast(ptr, stored_ptr_ty)
1332 }
1333 }
1334
dfeec247
XL
1335 fn check_call<'b>(
1336 &mut self,
1337 typ: &str,
1338 llfn: &'ll Value,
1339 args: &'b [&'ll Value],
1340 ) -> Cow<'b, [&'ll Value]> {
a1dfa0c6 1341 let mut fn_ty = self.cx.val_ty(llfn);
a7813a04 1342 // Strip off pointers
a1dfa0c6
XL
1343 while self.cx.type_kind(fn_ty) == TypeKind::Pointer {
1344 fn_ty = self.cx.element_type(fn_ty);
a7813a04
XL
1345 }
1346
dfeec247
XL
1347 assert!(
1348 self.cx.type_kind(fn_ty) == TypeKind::Function,
1349 "builder::{} not passed a function, but {:?}",
1350 typ,
1351 fn_ty
1352 );
a7813a04 1353
a1dfa0c6 1354 let param_tys = self.cx.func_params_types(fn_ty);
a7813a04 1355
dfeec247
XL
1356 let all_args_match = param_tys
1357 .iter()
a1dfa0c6 1358 .zip(args.iter().map(|&v| self.val_ty(v)))
1bb2cb6e
SL
1359 .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1360
1361 if all_args_match {
1362 return Cow::Borrowed(args);
1363 }
1364
dfeec247
XL
1365 let casted_args: Vec<_> = param_tys
1366 .into_iter()
1bb2cb6e
SL
1367 .zip(args.iter())
1368 .enumerate()
1369 .map(|(i, (expected_ty, &actual_val))| {
a1dfa0c6 1370 let actual_ty = self.val_ty(actual_val);
1bb2cb6e 1371 if expected_ty != actual_ty {
dfeec247
XL
1372 debug!(
1373 "type mismatch in function call of {:?}. \
1bb2cb6e 1374 Expected {:?} for param {}, got {:?}; injecting bitcast",
dfeec247
XL
1375 llfn, expected_ty, i, actual_ty
1376 );
1bb2cb6e
SL
1377 self.bitcast(actual_val, expected_ty)
1378 } else {
1379 actual_val
1380 }
1381 })
1382 .collect();
a7813a04 1383
0bf4aa26 1384 Cow::Owned(casted_args)
a7813a04 1385 }
ff7c6d11 1386
532ac7d7 1387 pub fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
dfeec247 1388 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
a1dfa0c6
XL
1389 }
1390
a1dfa0c6 1391 fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) {
74b04a01
XL
1392 let size = size.bytes();
1393 if size == 0 {
ff7c6d11
XL
1394 return;
1395 }
1396
f9f354fc 1397 if !self.cx().sess().emit_lifetime_markers() {
ff7c6d11
XL
1398 return;
1399 }
1400
2c00a5a8 1401 let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);
ff7c6d11 1402
a1dfa0c6
XL
1403 let ptr = self.pointercast(ptr, self.cx.type_i8p());
1404 self.call(lifetime_intrinsic, &[self.cx.const_u64(size), ptr], None);
ff7c6d11 1405 }
532ac7d7 1406
3dfed10e
XL
1407 pub(crate) fn phi(
1408 &mut self,
1409 ty: &'ll Type,
1410 vals: &[&'ll Value],
1411 bbs: &[&'ll BasicBlock],
1412 ) -> &'ll Value {
532ac7d7 1413 assert_eq!(vals.len(), bbs.len());
dfeec247 1414 let phi = unsafe { llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED) };
532ac7d7 1415 unsafe {
dfeec247 1416 llvm::LLVMAddIncoming(phi, vals.as_ptr(), bbs.as_ptr(), vals.len() as c_uint);
532ac7d7
XL
1417 phi
1418 }
1419 }
1420
1421 fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
532ac7d7
XL
1422 unsafe {
1423 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1424 }
1425 }
3dfed10e
XL
1426
1427 fn wasm_and_missing_nontrapping_fptoint(&self) -> bool {
29967ef6 1428 self.sess().target.arch == "wasm32"
3dfed10e
XL
1429 && !self.sess().target_features.contains(&sym::nontrapping_dash_fptoint)
1430 }
a7813a04 1431}