]> git.proxmox.com Git - rustc.git/blame - src/librustc_codegen_llvm/builder.rs
New upstream version 1.47.0+dfsg1
[rustc.git] / src / librustc_codegen_llvm / builder.rs
CommitLineData
9fa01778
XL
1use crate::common::Funclet;
2use crate::context::CodegenCx;
dfeec247
XL
3use crate::llvm::{self, BasicBlock, False};
4use crate::llvm::{AtomicOrdering, AtomicRmwBinOp, SynchronizationScope};
9fa01778
XL
5use crate::type_::Type;
6use crate::type_of::LayoutLlvmExt;
7use crate::value::Value;
dfeec247 8use libc::{c_char, c_uint};
a1dfa0c6 9use rustc_codegen_ssa::base::to_immediate;
dfeec247
XL
10use rustc_codegen_ssa::common::{IntPredicate, RealPredicate, TypeKind};
11use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
a1dfa0c6 12use rustc_codegen_ssa::mir::place::PlaceRef;
dfeec247
XL
13use rustc_codegen_ssa::traits::*;
14use rustc_codegen_ssa::MemFlags;
15use rustc_data_structures::const_cstr;
16use rustc_data_structures::small_c_str::SmallCStr;
17use rustc_hir::def_id::DefId;
ba9703b0
XL
18use rustc_middle::ty::layout::TyAndLayout;
19use rustc_middle::ty::{self, Ty, TyCtxt};
f035d41b 20use rustc_span::sym;
ba9703b0 21use rustc_target::abi::{self, Align, Size};
48663c56 22use rustc_target::spec::{HasTargetSpec, Target};
1bb2cb6e 23use std::borrow::Cow;
dc9dc135 24use std::ffi::CStr;
dfeec247 25use std::iter::TrustedLen;
a1dfa0c6 26use std::ops::{Deref, Range};
85aaf69f 27use std::ptr;
3dfed10e 28use tracing::debug;
1a4d82fc 29
32a655c1
SL
30// All Builders must have an llfn associated with them
31#[must_use]
dc9dc135 32pub struct Builder<'a, 'll, 'tcx> {
b7449926
XL
33 pub llbuilder: &'ll mut llvm::Builder<'ll>,
34 pub cx: &'a CodegenCx<'ll, 'tcx>,
1a4d82fc
JJ
35}
36
b7449926 37impl Drop for Builder<'a, 'll, 'tcx> {
32a655c1
SL
38 fn drop(&mut self) {
39 unsafe {
b7449926 40 llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
32a655c1
SL
41 }
42 }
43}
44
dc9dc135 45// FIXME(eddyb) use a checked constructor when they become `const fn`.
dfeec247 46const EMPTY_C_STR: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"\0") };
dc9dc135
XL
47
48/// Empty string, to be used where LLVM expects an instruction name, indicating
49/// that the instruction is to be left unnamed (i.e. numbered, in textual IR).
50// FIXME(eddyb) pass `&CStr` directly to FFI once it's a thin pointer.
51const UNNAMED: *const c_char = EMPTY_C_STR.as_ptr();
1a4d82fc 52
a1dfa0c6
XL
53impl BackendTypes for Builder<'_, 'll, 'tcx> {
54 type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value;
e74abb32 55 type Function = <CodegenCx<'ll, 'tcx> as BackendTypes>::Function;
a1dfa0c6
XL
56 type BasicBlock = <CodegenCx<'ll, 'tcx> as BackendTypes>::BasicBlock;
57 type Type = <CodegenCx<'ll, 'tcx> as BackendTypes>::Type;
58 type Funclet = <CodegenCx<'ll, 'tcx> as BackendTypes>::Funclet;
59
60 type DIScope = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIScope;
74b04a01 61 type DIVariable = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIVariable;
a1dfa0c6
XL
62}
63
ba9703b0
XL
64impl abi::HasDataLayout for Builder<'_, '_, '_> {
65 fn data_layout(&self) -> &abi::TargetDataLayout {
a1dfa0c6 66 self.cx.data_layout()
83c7162d
XL
67 }
68}
69
a1dfa0c6 70impl ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
dc9dc135 71 fn tcx(&self) -> TyCtxt<'tcx> {
a1dfa0c6
XL
72 self.cx.tcx
73 }
74}
75
48663c56
XL
76impl ty::layout::HasParamEnv<'tcx> for Builder<'_, '_, 'tcx> {
77 fn param_env(&self) -> ty::ParamEnv<'tcx> {
78 self.cx.param_env()
79 }
80}
81
82impl HasTargetSpec for Builder<'_, '_, 'tcx> {
83 fn target_spec(&self) -> &Target {
84 &self.cx.target_spec()
85 }
86}
87
ba9703b0 88impl abi::LayoutOf for Builder<'_, '_, 'tcx> {
a1dfa0c6 89 type Ty = Ty<'tcx>;
ba9703b0 90 type TyAndLayout = TyAndLayout<'tcx>;
a1dfa0c6 91
ba9703b0 92 fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyAndLayout {
a1dfa0c6
XL
93 self.cx.layout_of(ty)
94 }
95}
96
97impl Deref for Builder<'_, 'll, 'tcx> {
98 type Target = CodegenCx<'ll, 'tcx>;
99
100 fn deref(&self) -> &Self::Target {
101 self.cx
102 }
103}
104
105impl HasCodegen<'tcx> for Builder<'_, 'll, 'tcx> {
106 type CodegenCx = CodegenCx<'ll, 'tcx>;
107}
108
dc9dc135
XL
109macro_rules! builder_methods_for_value_instructions {
110 ($($name:ident($($arg:ident),*) => $llvm_capi:ident),+ $(,)?) => {
111 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
112 unsafe {
113 llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED)
114 }
115 })+
116 }
117}
118
a1dfa0c6 119impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
dfeec247 120 fn new_block<'b>(cx: &'a CodegenCx<'ll, 'tcx>, llfn: &'ll Value, name: &'b str) -> Self {
a1dfa0c6 121 let mut bx = Builder::with_cx(cx);
32a655c1 122 let llbb = unsafe {
b7449926 123 let name = SmallCStr::new(name);
dfeec247 124 llvm::LLVMAppendBasicBlockInContext(cx.llcx, llfn, name.as_ptr())
32a655c1 125 };
2c00a5a8
XL
126 bx.position_at_end(llbb);
127 bx
32a655c1
SL
128 }
129
a1dfa0c6 130 fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
32a655c1 131 // Create a fresh builder from the crate context.
dfeec247
XL
132 let llbuilder = unsafe { llvm::LLVMCreateBuilderInContext(cx.llcx) };
133 Builder { llbuilder, cx }
1a4d82fc
JJ
134 }
135
416331ca 136 fn build_sibling_block(&self, name: &str) -> Self {
2c00a5a8 137 Builder::new_block(self.cx, self.llfn(), name)
32a655c1
SL
138 }
139
a1dfa0c6 140 fn llbb(&self) -> &'ll BasicBlock {
dfeec247 141 unsafe { llvm::LLVMGetInsertBlock(self.llbuilder) }
32a655c1
SL
142 }
143
a1dfa0c6 144 fn position_at_end(&mut self, llbb: &'ll BasicBlock) {
1a4d82fc
JJ
145 unsafe {
146 llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
147 }
148 }
149
a1dfa0c6 150 fn ret_void(&mut self) {
1a4d82fc
JJ
151 unsafe {
152 llvm::LLVMBuildRetVoid(self.llbuilder);
153 }
154 }
155
a1dfa0c6 156 fn ret(&mut self, v: &'ll Value) {
1a4d82fc
JJ
157 unsafe {
158 llvm::LLVMBuildRet(self.llbuilder, v);
159 }
160 }
161
a1dfa0c6 162 fn br(&mut self, dest: &'ll BasicBlock) {
1a4d82fc
JJ
163 unsafe {
164 llvm::LLVMBuildBr(self.llbuilder, dest);
165 }
166 }
167
a1dfa0c6
XL
168 fn cond_br(
169 &mut self,
b7449926
XL
170 cond: &'ll Value,
171 then_llbb: &'ll BasicBlock,
172 else_llbb: &'ll BasicBlock,
173 ) {
1a4d82fc
JJ
174 unsafe {
175 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
176 }
177 }
178
a1dfa0c6
XL
179 fn switch(
180 &mut self,
b7449926
XL
181 v: &'ll Value,
182 else_llbb: &'ll BasicBlock,
532ac7d7
XL
183 cases: impl ExactSizeIterator<Item = (u128, &'ll BasicBlock)> + TrustedLen,
184 ) {
dfeec247
XL
185 let switch =
186 unsafe { llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint) };
532ac7d7
XL
187 for (on_val, dest) in cases {
188 let on_val = self.const_uint_big(self.val_ty(v), on_val);
dfeec247 189 unsafe { llvm::LLVMAddCase(switch, on_val, dest) }
1a4d82fc
JJ
190 }
191 }
192
a1dfa0c6
XL
193 fn invoke(
194 &mut self,
195 llfn: &'ll Value,
196 args: &[&'ll Value],
197 then: &'ll BasicBlock,
198 catch: &'ll BasicBlock,
199 funclet: Option<&Funclet<'ll>>,
200 ) -> &'ll Value {
dfeec247 201 debug!("invoke {:?} with args ({:?})", llfn, args);
1a4d82fc 202
1bb2cb6e 203 let args = self.check_call("invoke", llfn, args);
a1dfa0c6
XL
204 let bundle = funclet.map(|funclet| funclet.bundle());
205 let bundle = bundle.as_ref().map(|b| &*b.raw);
7453a54e 206
1a4d82fc 207 unsafe {
dfeec247
XL
208 llvm::LLVMRustBuildInvoke(
209 self.llbuilder,
210 llfn,
211 args.as_ptr(),
212 args.len() as c_uint,
213 then,
214 catch,
215 bundle,
216 UNNAMED,
217 )
1a4d82fc
JJ
218 }
219 }
220
a1dfa0c6 221 fn unreachable(&mut self) {
1a4d82fc
JJ
222 unsafe {
223 llvm::LLVMBuildUnreachable(self.llbuilder);
224 }
225 }
226
dc9dc135
XL
227 builder_methods_for_value_instructions! {
228 add(a, b) => LLVMBuildAdd,
229 fadd(a, b) => LLVMBuildFAdd,
230 sub(a, b) => LLVMBuildSub,
231 fsub(a, b) => LLVMBuildFSub,
232 mul(a, b) => LLVMBuildMul,
233 fmul(a, b) => LLVMBuildFMul,
234 udiv(a, b) => LLVMBuildUDiv,
235 exactudiv(a, b) => LLVMBuildExactUDiv,
236 sdiv(a, b) => LLVMBuildSDiv,
237 exactsdiv(a, b) => LLVMBuildExactSDiv,
238 fdiv(a, b) => LLVMBuildFDiv,
239 urem(a, b) => LLVMBuildURem,
240 srem(a, b) => LLVMBuildSRem,
241 frem(a, b) => LLVMBuildFRem,
242 shl(a, b) => LLVMBuildShl,
243 lshr(a, b) => LLVMBuildLShr,
244 ashr(a, b) => LLVMBuildAShr,
245 and(a, b) => LLVMBuildAnd,
246 or(a, b) => LLVMBuildOr,
247 xor(a, b) => LLVMBuildXor,
248 neg(x) => LLVMBuildNeg,
249 fneg(x) => LLVMBuildFNeg,
250 not(x) => LLVMBuildNot,
251 unchecked_sadd(x, y) => LLVMBuildNSWAdd,
252 unchecked_uadd(x, y) => LLVMBuildNUWAdd,
253 unchecked_ssub(x, y) => LLVMBuildNSWSub,
254 unchecked_usub(x, y) => LLVMBuildNUWSub,
255 unchecked_smul(x, y) => LLVMBuildNSWMul,
256 unchecked_umul(x, y) => LLVMBuildNUWMul,
1a4d82fc
JJ
257 }
258
a1dfa0c6 259 fn fadd_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 260 unsafe {
dc9dc135 261 let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
262 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
263 instr
264 }
265 }
266
a1dfa0c6 267 fn fsub_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 268 unsafe {
dc9dc135 269 let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
270 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
271 instr
272 }
273 }
274
a1dfa0c6 275 fn fmul_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 276 unsafe {
dc9dc135 277 let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
278 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
279 instr
280 }
281 }
282
a1dfa0c6 283 fn fdiv_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 284 unsafe {
dc9dc135 285 let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
286 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
287 instr
288 }
289 }
290
a1dfa0c6 291 fn frem_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
54a0048b 292 unsafe {
dc9dc135 293 let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, UNNAMED);
54a0048b
SL
294 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
295 instr
296 }
297 }
298
a1dfa0c6
XL
299 fn checked_binop(
300 &mut self,
301 oop: OverflowOp,
9fa01778 302 ty: Ty<'_>,
a1dfa0c6
XL
303 lhs: Self::Value,
304 rhs: Self::Value,
305 ) -> (Self::Value, Self::Value) {
3dfed10e
XL
306 use rustc_ast::IntTy::*;
307 use rustc_ast::UintTy::*;
ba9703b0 308 use rustc_middle::ty::{Int, Uint};
a1dfa0c6 309
e74abb32 310 let new_kind = match ty.kind {
60c5eb7d
XL
311 Int(t @ Isize) => Int(t.normalize(self.tcx.sess.target.ptr_width)),
312 Uint(t @ Usize) => Uint(t.normalize(self.tcx.sess.target.ptr_width)),
ba9703b0 313 ref t @ (Uint(_) | Int(_)) => t.clone(),
dfeec247 314 _ => panic!("tried to get overflow intrinsic for op applied to non-int type"),
a1dfa0c6
XL
315 };
316
317 let name = match oop {
e74abb32 318 OverflowOp::Add => match new_kind {
a1dfa0c6
XL
319 Int(I8) => "llvm.sadd.with.overflow.i8",
320 Int(I16) => "llvm.sadd.with.overflow.i16",
321 Int(I32) => "llvm.sadd.with.overflow.i32",
322 Int(I64) => "llvm.sadd.with.overflow.i64",
323 Int(I128) => "llvm.sadd.with.overflow.i128",
324
325 Uint(U8) => "llvm.uadd.with.overflow.i8",
326 Uint(U16) => "llvm.uadd.with.overflow.i16",
327 Uint(U32) => "llvm.uadd.with.overflow.i32",
328 Uint(U64) => "llvm.uadd.with.overflow.i64",
329 Uint(U128) => "llvm.uadd.with.overflow.i128",
330
331 _ => unreachable!(),
332 },
e74abb32 333 OverflowOp::Sub => match new_kind {
a1dfa0c6
XL
334 Int(I8) => "llvm.ssub.with.overflow.i8",
335 Int(I16) => "llvm.ssub.with.overflow.i16",
336 Int(I32) => "llvm.ssub.with.overflow.i32",
337 Int(I64) => "llvm.ssub.with.overflow.i64",
338 Int(I128) => "llvm.ssub.with.overflow.i128",
339
340 Uint(U8) => "llvm.usub.with.overflow.i8",
341 Uint(U16) => "llvm.usub.with.overflow.i16",
342 Uint(U32) => "llvm.usub.with.overflow.i32",
343 Uint(U64) => "llvm.usub.with.overflow.i64",
344 Uint(U128) => "llvm.usub.with.overflow.i128",
345
346 _ => unreachable!(),
347 },
e74abb32 348 OverflowOp::Mul => match new_kind {
a1dfa0c6
XL
349 Int(I8) => "llvm.smul.with.overflow.i8",
350 Int(I16) => "llvm.smul.with.overflow.i16",
351 Int(I32) => "llvm.smul.with.overflow.i32",
352 Int(I64) => "llvm.smul.with.overflow.i64",
353 Int(I128) => "llvm.smul.with.overflow.i128",
354
355 Uint(U8) => "llvm.umul.with.overflow.i8",
356 Uint(U16) => "llvm.umul.with.overflow.i16",
357 Uint(U32) => "llvm.umul.with.overflow.i32",
358 Uint(U64) => "llvm.umul.with.overflow.i64",
359 Uint(U128) => "llvm.umul.with.overflow.i128",
360
361 _ => unreachable!(),
362 },
363 };
364
365 let intrinsic = self.get_intrinsic(&name);
366 let res = self.call(intrinsic, &[lhs, rhs], None);
dfeec247 367 (self.extract_value(res, 0), self.extract_value(res, 1))
a1dfa0c6
XL
368 }
369
e1599b0c 370 fn alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value {
a1dfa0c6 371 let mut bx = Builder::with_cx(self.cx);
dfeec247 372 bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
e1599b0c 373 bx.dynamic_alloca(ty, align)
32a655c1
SL
374 }
375
e1599b0c 376 fn dynamic_alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value {
1a4d82fc 377 unsafe {
e1599b0c 378 let alloca = llvm::LLVMBuildAlloca(self.llbuilder, ty, UNNAMED);
a1dfa0c6 379 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
cc61c64b 380 alloca
1a4d82fc
JJ
381 }
382 }
383
dfeec247 384 fn array_alloca(&mut self, ty: &'ll Type, len: &'ll Value, align: Align) -> &'ll Value {
1a4d82fc 385 unsafe {
e1599b0c 386 let alloca = llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, UNNAMED);
a1dfa0c6 387 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
b7449926 388 alloca
1a4d82fc
JJ
389 }
390 }
391
a1dfa0c6 392 fn load(&mut self, ptr: &'ll Value, align: Align) -> &'ll Value {
1a4d82fc 393 unsafe {
dc9dc135 394 let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
a1dfa0c6 395 llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
32a655c1 396 load
1a4d82fc
JJ
397 }
398 }
399
a1dfa0c6 400 fn volatile_load(&mut self, ptr: &'ll Value) -> &'ll Value {
1a4d82fc 401 unsafe {
dc9dc135
XL
402 let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
403 llvm::LLVMSetVolatile(load, llvm::True);
404 load
1a4d82fc
JJ
405 }
406 }
407
a1dfa0c6
XL
408 fn atomic_load(
409 &mut self,
410 ptr: &'ll Value,
411 order: rustc_codegen_ssa::common::AtomicOrdering,
412 size: Size,
413 ) -> &'ll Value {
1a4d82fc 414 unsafe {
a1dfa0c6
XL
415 let load = llvm::LLVMRustBuildAtomicLoad(
416 self.llbuilder,
417 ptr,
dc9dc135 418 UNNAMED,
a1dfa0c6
XL
419 AtomicOrdering::from_generic(order),
420 );
421 // LLVM requires the alignment of atomic loads to be at least the size of the type.
422 llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
ff7c6d11 423 load
1a4d82fc
JJ
424 }
425 }
426
dfeec247 427 fn load_operand(&mut self, place: PlaceRef<'tcx, &'ll Value>) -> OperandRef<'tcx, &'ll Value> {
a1dfa0c6
XL
428 debug!("PlaceRef::load: {:?}", place);
429
430 assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
431
432 if place.layout.is_zst() {
532ac7d7 433 return OperandRef::new_zst(self, place.layout);
a1dfa0c6
XL
434 }
435
436 fn scalar_load_metadata<'a, 'll, 'tcx>(
437 bx: &mut Builder<'a, 'll, 'tcx>,
438 load: &'ll Value,
ba9703b0 439 scalar: &abi::Scalar,
a1dfa0c6
XL
440 ) {
441 let vr = scalar.valid_range.clone();
442 match scalar.value {
ba9703b0 443 abi::Int(..) => {
a1dfa0c6
XL
444 let range = scalar.valid_range_exclusive(bx);
445 if range.start != range.end {
446 bx.range_metadata(load, range);
447 }
448 }
ba9703b0 449 abi::Pointer if vr.start() < vr.end() && !vr.contains(&0) => {
a1dfa0c6
XL
450 bx.nonnull_metadata(load);
451 }
452 _ => {}
453 }
454 }
455
456 let val = if let Some(llextra) = place.llextra {
457 OperandValue::Ref(place.llval, Some(llextra), place.align)
458 } else if place.layout.is_llvm_immediate() {
459 let mut const_llval = None;
460 unsafe {
461 if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) {
462 if llvm::LLVMIsGlobalConstant(global) == llvm::True {
463 const_llval = llvm::LLVMGetInitializer(global);
464 }
465 }
466 }
467 let llval = const_llval.unwrap_or_else(|| {
468 let load = self.load(place.llval, place.align);
ba9703b0 469 if let abi::Abi::Scalar(ref scalar) = place.layout.abi {
a1dfa0c6
XL
470 scalar_load_metadata(self, load, scalar);
471 }
472 load
473 });
474 OperandValue::Immediate(to_immediate(self, llval, place.layout))
ba9703b0 475 } else if let abi::Abi::ScalarPair(ref a, ref b) = place.layout.abi {
a1dfa0c6
XL
476 let b_offset = a.value.size(self).align_to(b.value.align(self).abi);
477
ba9703b0 478 let mut load = |i, scalar: &abi::Scalar, align| {
a1dfa0c6
XL
479 let llptr = self.struct_gep(place.llval, i as u64);
480 let load = self.load(llptr, align);
481 scalar_load_metadata(self, load, scalar);
dfeec247 482 if scalar.is_bool() { self.trunc(load, self.type_i1()) } else { load }
a1dfa0c6
XL
483 };
484
485 OperandValue::Pair(
486 load(0, a, place.align),
487 load(1, b, place.align.restrict_for_offset(b_offset)),
488 )
489 } else {
490 OperandValue::Ref(place.llval, None, place.align)
491 };
492
493 OperandRef { val, layout: place.layout }
494 }
495
532ac7d7
XL
496 fn write_operand_repeatedly(
497 mut self,
498 cg_elem: OperandRef<'tcx, &'ll Value>,
499 count: u64,
500 dest: PlaceRef<'tcx, &'ll Value>,
501 ) -> Self {
502 let zero = self.const_usize(0);
503 let count = self.const_usize(count);
504 let start = dest.project_index(&mut self, zero).llval;
505 let end = dest.project_index(&mut self, count).llval;
506
507 let mut header_bx = self.build_sibling_block("repeat_loop_header");
508 let mut body_bx = self.build_sibling_block("repeat_loop_body");
509 let next_bx = self.build_sibling_block("repeat_loop_next");
510
511 self.br(header_bx.llbb());
512 let current = header_bx.phi(self.val_ty(start), &[start], &[self.llbb()]);
513
514 let keep_going = header_bx.icmp(IntPredicate::IntNE, current, end);
515 header_bx.cond_br(keep_going, body_bx.llbb(), next_bx.llbb());
516
517 let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
dfeec247
XL
518 cg_elem
519 .val
520 .store(&mut body_bx, PlaceRef::new_sized_aligned(current, cg_elem.layout, align));
a1dfa0c6 521
532ac7d7
XL
522 let next = body_bx.inbounds_gep(current, &[self.const_usize(1)]);
523 body_bx.br(header_bx.llbb());
524 header_bx.add_incoming_to_phi(current, next, body_bx.llbb());
525
526 next_bx
527 }
1a4d82fc 528
a1dfa0c6 529 fn range_metadata(&mut self, load: &'ll Value, range: Range<u128>) {
b7449926
XL
530 if self.sess().target.target.arch == "amdgpu" {
531 // amdgpu/LLVM does something weird and thinks a i64 value is
532 // split into a v2i32, halving the bitwidth LLVM expects,
533 // tripping an assertion. So, for now, just disable this
534 // optimization.
535 return;
536 }
537
1a4d82fc 538 unsafe {
a1dfa0c6 539 let llty = self.cx.val_ty(load);
ff7c6d11 540 let v = [
a1dfa0c6 541 self.cx.const_uint_big(llty, range.start),
dfeec247 542 self.cx.const_uint_big(llty, range.end),
ff7c6d11 543 ];
1a4d82fc 544
dfeec247
XL
545 llvm::LLVMSetMetadata(
546 load,
547 llvm::MD_range as c_uint,
548 llvm::LLVMMDNodeInContext(self.cx.llcx, v.as_ptr(), v.len() as c_uint),
549 );
1a4d82fc 550 }
1a4d82fc
JJ
551 }
552
a1dfa0c6 553 fn nonnull_metadata(&mut self, load: &'ll Value) {
85aaf69f 554 unsafe {
dfeec247
XL
555 llvm::LLVMSetMetadata(
556 load,
557 llvm::MD_nonnull as c_uint,
558 llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0),
559 );
85aaf69f 560 }
85aaf69f
SL
561 }
562
a1dfa0c6 563 fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
83c7162d
XL
564 self.store_with_flags(val, ptr, align, MemFlags::empty())
565 }
566
a1dfa0c6
XL
567 fn store_with_flags(
568 &mut self,
b7449926
XL
569 val: &'ll Value,
570 ptr: &'ll Value,
83c7162d
XL
571 align: Align,
572 flags: MemFlags,
b7449926
XL
573 ) -> &'ll Value {
574 debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
1bb2cb6e 575 let ptr = self.check_store(val, ptr);
1a4d82fc 576 unsafe {
32a655c1 577 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
dfeec247
XL
578 let align =
579 if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint };
8faf50e0 580 llvm::LLVMSetAlignment(store, align);
83c7162d
XL
581 if flags.contains(MemFlags::VOLATILE) {
582 llvm::LLVMSetVolatile(store, llvm::True);
583 }
584 if flags.contains(MemFlags::NONTEMPORAL) {
585 // According to LLVM [1] building a nontemporal store must
586 // *always* point to a metadata value of the integer 1.
587 //
588 // [1]: http://llvm.org/docs/LangRef.html#store-instruction
a1dfa0c6 589 let one = self.cx.const_i32(1);
83c7162d
XL
590 let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
591 llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
592 }
32a655c1 593 store
1a4d82fc
JJ
594 }
595 }
596
dfeec247
XL
597 fn atomic_store(
598 &mut self,
599 val: &'ll Value,
600 ptr: &'ll Value,
601 order: rustc_codegen_ssa::common::AtomicOrdering,
602 size: Size,
603 ) {
b7449926 604 debug!("Store {:?} -> {:?}", val, ptr);
1bb2cb6e 605 let ptr = self.check_store(val, ptr);
1a4d82fc 606 unsafe {
a1dfa0c6
XL
607 let store = llvm::LLVMRustBuildAtomicStore(
608 self.llbuilder,
609 val,
610 ptr,
611 AtomicOrdering::from_generic(order),
612 );
613 // LLVM requires the alignment of atomic stores to be at least the size of the type.
614 llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
ff7c6d11
XL
615 }
616 }
617
a1dfa0c6 618 fn gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
1a4d82fc 619 unsafe {
dfeec247
XL
620 llvm::LLVMBuildGEP(
621 self.llbuilder,
622 ptr,
623 indices.as_ptr(),
624 indices.len() as c_uint,
625 UNNAMED,
626 )
1a4d82fc
JJ
627 }
628 }
629
a1dfa0c6 630 fn inbounds_gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
1a4d82fc
JJ
631 unsafe {
632 llvm::LLVMBuildInBoundsGEP(
dfeec247
XL
633 self.llbuilder,
634 ptr,
635 indices.as_ptr(),
636 indices.len() as c_uint,
637 UNNAMED,
638 )
1a4d82fc
JJ
639 }
640 }
641
532ac7d7 642 fn struct_gep(&mut self, ptr: &'ll Value, idx: u64) -> &'ll Value {
532ac7d7 643 assert_eq!(idx as c_uint as u64, idx);
dfeec247 644 unsafe { llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, UNNAMED) }
532ac7d7
XL
645 }
646
1a4d82fc 647 /* Casts */
a1dfa0c6 648 fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 649 unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
650 }
651
a1dfa0c6 652 fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 653 unsafe { llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
654 }
655
f035d41b
XL
656 fn fptoui_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> Option<&'ll Value> {
657 // WebAssembly has saturating floating point to integer casts if the
658 // `nontrapping-fptoint` target feature is activated. We'll use those if
659 // they are available.
660 if self.sess().target.target.arch == "wasm32"
3dfed10e 661 && self.sess().target_features.contains(&sym::nontrapping_dash_fptoint)
f035d41b
XL
662 {
663 let src_ty = self.cx.val_ty(val);
664 let float_width = self.cx.float_width(src_ty);
665 let int_width = self.cx.int_width(dest_ty);
666 let name = match (int_width, float_width) {
667 (32, 32) => Some("llvm.wasm.trunc.saturate.unsigned.i32.f32"),
668 (32, 64) => Some("llvm.wasm.trunc.saturate.unsigned.i32.f64"),
669 (64, 32) => Some("llvm.wasm.trunc.saturate.unsigned.i64.f32"),
670 (64, 64) => Some("llvm.wasm.trunc.saturate.unsigned.i64.f64"),
671 _ => None,
672 };
673 if let Some(name) = name {
674 let intrinsic = self.get_intrinsic(name);
675 return Some(self.call(intrinsic, &[val], None));
676 }
677 }
678 None
679 }
680
681 fn fptosi_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> Option<&'ll Value> {
682 // WebAssembly has saturating floating point to integer casts if the
683 // `nontrapping-fptoint` target feature is activated. We'll use those if
684 // they are available.
685 if self.sess().target.target.arch == "wasm32"
3dfed10e 686 && self.sess().target_features.contains(&sym::nontrapping_dash_fptoint)
f035d41b
XL
687 {
688 let src_ty = self.cx.val_ty(val);
689 let float_width = self.cx.float_width(src_ty);
690 let int_width = self.cx.int_width(dest_ty);
691 let name = match (int_width, float_width) {
692 (32, 32) => Some("llvm.wasm.trunc.saturate.signed.i32.f32"),
693 (32, 64) => Some("llvm.wasm.trunc.saturate.signed.i32.f64"),
694 (64, 32) => Some("llvm.wasm.trunc.saturate.signed.i64.f32"),
695 (64, 64) => Some("llvm.wasm.trunc.saturate.signed.i64.f64"),
696 _ => None,
697 };
698 if let Some(name) = name {
699 let intrinsic = self.get_intrinsic(name);
700 return Some(self.call(intrinsic, &[val], None));
701 }
702 }
703 None
704 }
705
3dfed10e
XL
706 fn fptosui_may_trap(&self, val: &'ll Value, dest_ty: &'ll Type) -> bool {
707 // Most of the time we'll be generating the `fptosi` or `fptoui`
708 // instruction for floating-point-to-integer conversions. These
709 // instructions by definition in LLVM do not trap. For the WebAssembly
710 // target, however, we'll lower in some cases to intrinsic calls instead
711 // which may trap. If we detect that this is a situation where we'll be
712 // using the intrinsics then we report that the call map trap, which
713 // callers might need to handle.
714 if !self.wasm_and_missing_nontrapping_fptoint() {
715 return false;
716 }
717 let src_ty = self.cx.val_ty(val);
718 let float_width = self.cx.float_width(src_ty);
719 let int_width = self.cx.int_width(dest_ty);
720 match (int_width, float_width) {
721 (32, 32) | (32, 64) | (64, 32) | (64, 64) => true,
722 _ => false,
723 }
724 }
725
a1dfa0c6 726 fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
3dfed10e
XL
727 // When we can, use the native wasm intrinsics which have tighter
728 // codegen. Note that this has a semantic difference in that the
729 // intrinsic can trap whereas `fptoui` never traps. That difference,
730 // however, is handled by `fptosui_may_trap` above.
731 //
732 // Note that we skip the wasm intrinsics for vector types where `fptoui`
733 // must be used instead.
734 if self.wasm_and_missing_nontrapping_fptoint() {
735 let src_ty = self.cx.val_ty(val);
736 if self.cx.type_kind(src_ty) != TypeKind::Vector {
737 let float_width = self.cx.float_width(src_ty);
738 let int_width = self.cx.int_width(dest_ty);
739 let name = match (int_width, float_width) {
740 (32, 32) => Some("llvm.wasm.trunc.unsigned.i32.f32"),
741 (32, 64) => Some("llvm.wasm.trunc.unsigned.i32.f64"),
742 (64, 32) => Some("llvm.wasm.trunc.unsigned.i64.f32"),
743 (64, 64) => Some("llvm.wasm.trunc.unsigned.i64.f64"),
744 _ => None,
745 };
746 if let Some(name) = name {
747 let intrinsic = self.get_intrinsic(name);
748 return self.call(intrinsic, &[val], None);
749 }
750 }
751 }
dfeec247 752 unsafe { llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
753 }
754
a1dfa0c6 755 fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
3dfed10e
XL
756 if self.wasm_and_missing_nontrapping_fptoint() {
757 let src_ty = self.cx.val_ty(val);
758 if self.cx.type_kind(src_ty) != TypeKind::Vector {
759 let float_width = self.cx.float_width(src_ty);
760 let int_width = self.cx.int_width(dest_ty);
761 let name = match (int_width, float_width) {
762 (32, 32) => Some("llvm.wasm.trunc.signed.i32.f32"),
763 (32, 64) => Some("llvm.wasm.trunc.signed.i32.f64"),
764 (64, 32) => Some("llvm.wasm.trunc.signed.i64.f32"),
765 (64, 64) => Some("llvm.wasm.trunc.signed.i64.f64"),
766 _ => None,
767 };
768 if let Some(name) = name {
769 let intrinsic = self.get_intrinsic(name);
770 return self.call(intrinsic, &[val], None);
771 }
772 }
773 }
dfeec247 774 unsafe { llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
775 }
776
a1dfa0c6 777 fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 778 unsafe { llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
779 }
780
a1dfa0c6 781 fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 782 unsafe { llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
783 }
784
a1dfa0c6 785 fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 786 unsafe { llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
787 }
788
a1dfa0c6 789 fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 790 unsafe { llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
791 }
792
a1dfa0c6 793 fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 794 unsafe { llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
795 }
796
a1dfa0c6 797 fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 798 unsafe { llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
799 }
800
a1dfa0c6 801 fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 802 unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
803 }
804
a1dfa0c6 805 fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
dfeec247 806 unsafe { llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed) }
1a4d82fc
JJ
807 }
808
a1dfa0c6 809 fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 810 unsafe { llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, UNNAMED) }
1a4d82fc
JJ
811 }
812
1a4d82fc 813 /* Comparisons */
a1dfa0c6 814 fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
a1dfa0c6 815 let op = llvm::IntPredicate::from_generic(op);
dfeec247 816 unsafe { llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1a4d82fc
JJ
817 }
818
a1dfa0c6 819 fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
dfeec247 820 unsafe { llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1a4d82fc
JJ
821 }
822
823 /* Miscellaneous instructions */
dfeec247
XL
824 fn memcpy(
825 &mut self,
826 dst: &'ll Value,
827 dst_align: Align,
828 src: &'ll Value,
829 src_align: Align,
830 size: &'ll Value,
831 flags: MemFlags,
832 ) {
a1dfa0c6
XL
833 if flags.contains(MemFlags::NONTEMPORAL) {
834 // HACK(nox): This is inefficient but there is no nontemporal memcpy.
835 let val = self.load(src, src_align);
836 let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
837 self.store_with_flags(val, ptr, dst_align, flags);
838 return;
839 }
840 let size = self.intcast(size, self.type_isize(), false);
841 let is_volatile = flags.contains(MemFlags::VOLATILE);
842 let dst = self.pointercast(dst, self.type_i8p());
843 let src = self.pointercast(src, self.type_i8p());
844 unsafe {
dfeec247
XL
845 llvm::LLVMRustBuildMemCpy(
846 self.llbuilder,
847 dst,
848 dst_align.bytes() as c_uint,
849 src,
850 src_align.bytes() as c_uint,
851 size,
852 is_volatile,
853 );
a1dfa0c6
XL
854 }
855 }
7453a54e 856
dfeec247
XL
857 fn memmove(
858 &mut self,
859 dst: &'ll Value,
860 dst_align: Align,
861 src: &'ll Value,
862 src_align: Align,
863 size: &'ll Value,
864 flags: MemFlags,
865 ) {
a1dfa0c6
XL
866 if flags.contains(MemFlags::NONTEMPORAL) {
867 // HACK(nox): This is inefficient but there is no nontemporal memmove.
868 let val = self.load(src, src_align);
869 let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
870 self.store_with_flags(val, ptr, dst_align, flags);
871 return;
872 }
873 let size = self.intcast(size, self.type_isize(), false);
874 let is_volatile = flags.contains(MemFlags::VOLATILE);
875 let dst = self.pointercast(dst, self.type_i8p());
876 let src = self.pointercast(src, self.type_i8p());
1a4d82fc 877 unsafe {
dfeec247
XL
878 llvm::LLVMRustBuildMemMove(
879 self.llbuilder,
880 dst,
881 dst_align.bytes() as c_uint,
882 src,
883 src_align.bytes() as c_uint,
884 size,
885 is_volatile,
886 );
1a4d82fc
JJ
887 }
888 }
889
a1dfa0c6
XL
890 fn memset(
891 &mut self,
892 ptr: &'ll Value,
893 fill_byte: &'ll Value,
894 size: &'ll Value,
895 align: Align,
896 flags: MemFlags,
897 ) {
74b04a01 898 let is_volatile = flags.contains(MemFlags::VOLATILE);
a1dfa0c6 899 let ptr = self.pointercast(ptr, self.type_i8p());
74b04a01
XL
900 unsafe {
901 llvm::LLVMRustBuildMemSet(
902 self.llbuilder,
903 ptr,
904 align.bytes() as c_uint,
905 fill_byte,
906 size,
907 is_volatile,
908 );
909 }
a1dfa0c6
XL
910 }
911
a1dfa0c6 912 fn select(
dfeec247
XL
913 &mut self,
914 cond: &'ll Value,
b7449926
XL
915 then_val: &'ll Value,
916 else_val: &'ll Value,
917 ) -> &'ll Value {
dfeec247 918 unsafe { llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, UNNAMED) }
1a4d82fc
JJ
919 }
920
b7449926 921 #[allow(dead_code)]
a1dfa0c6 922 fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
dfeec247 923 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
1a4d82fc
JJ
924 }
925
a1dfa0c6 926 fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
dfeec247 927 unsafe { llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, UNNAMED) }
1a4d82fc
JJ
928 }
929
a1dfa0c6 930 fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
1a4d82fc 931 unsafe {
a1dfa0c6
XL
932 let elt_ty = self.cx.val_ty(elt);
933 let undef = llvm::LLVMGetUndef(self.type_vector(elt_ty, num_elts as u64));
934 let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
935 let vec_i32_ty = self.type_vector(self.type_i32(), num_elts as u64);
936 self.shuffle_vector(vec, undef, self.const_null(vec_i32_ty))
1a4d82fc
JJ
937 }
938 }
939
a1dfa0c6 940 fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
ff7c6d11 941 assert_eq!(idx as c_uint as u64, idx);
dfeec247 942 unsafe { llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, UNNAMED) }
1a4d82fc
JJ
943 }
944
dfeec247 945 fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value, idx: u64) -> &'ll Value {
ff7c6d11 946 assert_eq!(idx as c_uint as u64, idx);
dfeec247 947 unsafe { llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint, UNNAMED) }
1a4d82fc
JJ
948 }
949
dfeec247
XL
950 fn landing_pad(
951 &mut self,
952 ty: &'ll Type,
953 pers_fn: &'ll Value,
954 num_clauses: usize,
955 ) -> &'ll Value {
1a4d82fc 956 unsafe {
dfeec247 957 llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn, num_clauses as c_uint, UNNAMED)
c1a9b12d
SL
958 }
959 }
960
a1dfa0c6 961 fn set_cleanup(&mut self, landing_pad: &'ll Value) {
1a4d82fc
JJ
962 unsafe {
963 llvm::LLVMSetCleanup(landing_pad, llvm::True);
964 }
965 }
966
a1dfa0c6 967 fn resume(&mut self, exn: &'ll Value) -> &'ll Value {
dfeec247 968 unsafe { llvm::LLVMBuildResume(self.llbuilder, exn) }
1a4d82fc
JJ
969 }
970
dfeec247 971 fn cleanup_pad(&mut self, parent: Option<&'ll Value>, args: &[&'ll Value]) -> Funclet<'ll> {
b7449926 972 let name = const_cstr!("cleanuppad");
7453a54e 973 let ret = unsafe {
dfeec247
XL
974 llvm::LLVMRustBuildCleanupPad(
975 self.llbuilder,
976 parent,
977 args.len() as c_uint,
978 args.as_ptr(),
979 name.as_ptr(),
980 )
7453a54e 981 };
a1dfa0c6 982 Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
7453a54e
SL
983 }
984
a1dfa0c6 985 fn cleanup_ret(
dfeec247
XL
986 &mut self,
987 funclet: &Funclet<'ll>,
b7449926
XL
988 unwind: Option<&'ll BasicBlock>,
989 ) -> &'ll Value {
dfeec247
XL
990 let ret =
991 unsafe { llvm::LLVMRustBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind) };
b7449926 992 ret.expect("LLVM does not have support for cleanupret")
7453a54e
SL
993 }
994
dfeec247 995 fn catch_pad(&mut self, parent: &'ll Value, args: &[&'ll Value]) -> Funclet<'ll> {
b7449926 996 let name = const_cstr!("catchpad");
7453a54e 997 let ret = unsafe {
dfeec247
XL
998 llvm::LLVMRustBuildCatchPad(
999 self.llbuilder,
1000 parent,
1001 args.len() as c_uint,
1002 args.as_ptr(),
1003 name.as_ptr(),
1004 )
7453a54e 1005 };
a1dfa0c6 1006 Funclet::new(ret.expect("LLVM does not have support for catchpad"))
7453a54e
SL
1007 }
1008
a1dfa0c6
XL
1009 fn catch_switch(
1010 &mut self,
b7449926
XL
1011 parent: Option<&'ll Value>,
1012 unwind: Option<&'ll BasicBlock>,
1013 num_handlers: usize,
1014 ) -> &'ll Value {
b7449926 1015 let name = const_cstr!("catchswitch");
7453a54e 1016 let ret = unsafe {
dfeec247
XL
1017 llvm::LLVMRustBuildCatchSwitch(
1018 self.llbuilder,
1019 parent,
1020 unwind,
1021 num_handlers as c_uint,
1022 name.as_ptr(),
1023 )
7453a54e 1024 };
b7449926 1025 ret.expect("LLVM does not have support for catchswitch")
7453a54e
SL
1026 }
1027
a1dfa0c6 1028 fn add_handler(&mut self, catch_switch: &'ll Value, handler: &'ll BasicBlock) {
7453a54e
SL
1029 unsafe {
1030 llvm::LLVMRustAddHandler(catch_switch, handler);
1031 }
1032 }
1033
a1dfa0c6 1034 fn set_personality_fn(&mut self, personality: &'ll Value) {
7453a54e 1035 unsafe {
8bb4bdeb 1036 llvm::LLVMSetPersonalityFn(self.llfn(), personality);
7453a54e
SL
1037 }
1038 }
1039
1a4d82fc 1040 // Atomic Operations
a1dfa0c6
XL
1041 fn atomic_cmpxchg(
1042 &mut self,
b7449926
XL
1043 dst: &'ll Value,
1044 cmp: &'ll Value,
1045 src: &'ll Value,
a1dfa0c6
XL
1046 order: rustc_codegen_ssa::common::AtomicOrdering,
1047 failure_order: rustc_codegen_ssa::common::AtomicOrdering,
1048 weak: bool,
b7449926 1049 ) -> &'ll Value {
a1dfa0c6
XL
1050 let weak = if weak { llvm::True } else { llvm::False };
1051 unsafe {
1052 llvm::LLVMRustBuildAtomicCmpXchg(
1053 self.llbuilder,
1054 dst,
1055 cmp,
1056 src,
1057 AtomicOrdering::from_generic(order),
1058 AtomicOrdering::from_generic(failure_order),
dfeec247 1059 weak,
a1dfa0c6 1060 )
1a4d82fc
JJ
1061 }
1062 }
a1dfa0c6
XL
1063 fn atomic_rmw(
1064 &mut self,
1065 op: rustc_codegen_ssa::common::AtomicRmwBinOp,
b7449926
XL
1066 dst: &'ll Value,
1067 src: &'ll Value,
a1dfa0c6 1068 order: rustc_codegen_ssa::common::AtomicOrdering,
b7449926 1069 ) -> &'ll Value {
1a4d82fc 1070 unsafe {
a1dfa0c6
XL
1071 llvm::LLVMBuildAtomicRMW(
1072 self.llbuilder,
1073 AtomicRmwBinOp::from_generic(op),
1074 dst,
1075 src,
1076 AtomicOrdering::from_generic(order),
dfeec247
XL
1077 False,
1078 )
1a4d82fc
JJ
1079 }
1080 }
1081
a1dfa0c6
XL
1082 fn atomic_fence(
1083 &mut self,
1084 order: rustc_codegen_ssa::common::AtomicOrdering,
dfeec247 1085 scope: rustc_codegen_ssa::common::SynchronizationScope,
a1dfa0c6 1086 ) {
1a4d82fc 1087 unsafe {
a1dfa0c6
XL
1088 llvm::LLVMRustBuildAtomicFence(
1089 self.llbuilder,
1090 AtomicOrdering::from_generic(order),
dfeec247 1091 SynchronizationScope::from_generic(scope),
a1dfa0c6 1092 );
1a4d82fc
JJ
1093 }
1094 }
a7813a04 1095
532ac7d7 1096 fn set_invariant_load(&mut self, load: &'ll Value) {
32a655c1 1097 unsafe {
dfeec247
XL
1098 llvm::LLVMSetMetadata(
1099 load,
1100 llvm::MD_invariant_load as c_uint,
1101 llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0),
1102 );
32a655c1
SL
1103 }
1104 }
1105
532ac7d7 1106 fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) {
74b04a01 1107 self.call_lifetime_intrinsic("llvm.lifetime.start.p0i8", ptr, size);
532ac7d7
XL
1108 }
1109
1110 fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) {
74b04a01 1111 self.call_lifetime_intrinsic("llvm.lifetime.end.p0i8", ptr, size);
532ac7d7
XL
1112 }
1113
f035d41b
XL
1114 fn instrprof_increment(
1115 &mut self,
1116 fn_name: &'ll Value,
1117 hash: &'ll Value,
1118 num_counters: &'ll Value,
1119 index: &'ll Value,
3dfed10e 1120 ) {
f035d41b
XL
1121 debug!(
1122 "instrprof_increment() with args ({:?}, {:?}, {:?}, {:?})",
1123 fn_name, hash, num_counters, index
1124 );
1125
3dfed10e 1126 let llfn = unsafe { llvm::LLVMRustGetInstrProfIncrementIntrinsic(self.cx().llmod) };
f035d41b
XL
1127 let args = &[fn_name, hash, num_counters, index];
1128 let args = self.check_call("call", llfn, args);
1129
1130 unsafe {
3dfed10e 1131 let _ = llvm::LLVMRustBuildCall(
f035d41b
XL
1132 self.llbuilder,
1133 llfn,
1134 args.as_ptr() as *const &llvm::Value,
1135 args.len() as c_uint,
1136 None,
3dfed10e 1137 );
f035d41b
XL
1138 }
1139 }
1140
532ac7d7
XL
1141 fn call(
1142 &mut self,
1143 llfn: &'ll Value,
1144 args: &[&'ll Value],
1145 funclet: Option<&Funclet<'ll>>,
1146 ) -> &'ll Value {
dfeec247 1147 debug!("call {:?} with args ({:?})", llfn, args);
532ac7d7
XL
1148
1149 let args = self.check_call("call", llfn, args);
1150 let bundle = funclet.map(|funclet| funclet.bundle());
1151 let bundle = bundle.as_ref().map(|b| &*b.raw);
1152
32a655c1 1153 unsafe {
532ac7d7
XL
1154 llvm::LLVMRustBuildCall(
1155 self.llbuilder,
1156 llfn,
1157 args.as_ptr() as *const &llvm::Value,
1158 args.len() as c_uint,
dfeec247 1159 bundle,
532ac7d7 1160 )
32a655c1
SL
1161 }
1162 }
1163
532ac7d7 1164 fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
dfeec247 1165 unsafe { llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED) }
8bb4bdeb
XL
1166 }
1167
532ac7d7
XL
1168 fn cx(&self) -> &CodegenCx<'ll, 'tcx> {
1169 self.cx
1170 }
1171
1172 unsafe fn delete_basic_block(&mut self, bb: &'ll BasicBlock) {
1173 llvm::LLVMDeleteBasicBlock(bb);
1174 }
1175
1176 fn do_not_inline(&mut self, llret: &'ll Value) {
1177 llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
1178 }
1179}
1180
dc9dc135
XL
1181impl StaticBuilderMethods for Builder<'a, 'll, 'tcx> {
1182 fn get_static(&mut self, def_id: DefId) -> &'ll Value {
532ac7d7
XL
1183 // Forward to the `get_static` method of `CodegenCx`
1184 self.cx().get_static(def_id)
1185 }
532ac7d7
XL
1186}
1187
1188impl Builder<'a, 'll, 'tcx> {
1189 pub fn llfn(&self) -> &'ll Value {
dfeec247 1190 unsafe { llvm::LLVMGetBasicBlockParent(self.llbb()) }
532ac7d7
XL
1191 }
1192
532ac7d7
XL
1193 fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
1194 unsafe {
1195 llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
1196 }
1197 }
1198
1199 pub fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
532ac7d7
XL
1200 unsafe { llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs) }
1201 }
1202
1203 pub fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
532ac7d7
XL
1204 unsafe { llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs) }
1205 }
1206
1207 pub fn insert_element(
dfeec247
XL
1208 &mut self,
1209 vec: &'ll Value,
532ac7d7
XL
1210 elt: &'ll Value,
1211 idx: &'ll Value,
1212 ) -> &'ll Value {
dfeec247 1213 unsafe { llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, UNNAMED) }
532ac7d7
XL
1214 }
1215
1216 pub fn shuffle_vector(
1217 &mut self,
1218 v1: &'ll Value,
1219 v2: &'ll Value,
1220 mask: &'ll Value,
1221 ) -> &'ll Value {
dfeec247 1222 unsafe { llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, UNNAMED) }
532ac7d7
XL
1223 }
1224
416331ca
XL
1225 pub fn vector_reduce_fadd(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1226 unsafe { llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src) }
1227 }
1228 pub fn vector_reduce_fmul(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1229 unsafe { llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src) }
1230 }
532ac7d7 1231 pub fn vector_reduce_fadd_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
532ac7d7 1232 unsafe {
532ac7d7
XL
1233 let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
1234 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1235 instr
1236 }
1237 }
1238 pub fn vector_reduce_fmul_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
532ac7d7 1239 unsafe {
532ac7d7
XL
1240 let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
1241 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1242 instr
1243 }
1244 }
1245 pub fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1246 unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
1247 }
1248 pub fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1249 unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
1250 }
1251 pub fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1252 unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
1253 }
1254 pub fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1255 unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
1256 }
1257 pub fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7
XL
1258 unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
1259 }
1260 pub fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
dfeec247
XL
1261 unsafe {
1262 llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false)
1263 }
532ac7d7
XL
1264 }
1265 pub fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
dfeec247
XL
1266 unsafe {
1267 llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false)
1268 }
532ac7d7
XL
1269 }
1270 pub fn vector_reduce_fmin_fast(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7 1271 unsafe {
dfeec247
XL
1272 let instr =
1273 llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
532ac7d7
XL
1274 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1275 instr
1276 }
1277 }
1278 pub fn vector_reduce_fmax_fast(&mut self, src: &'ll Value) -> &'ll Value {
532ac7d7 1279 unsafe {
dfeec247
XL
1280 let instr =
1281 llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
532ac7d7
XL
1282 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1283 instr
1284 }
1285 }
1286 pub fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
532ac7d7
XL
1287 unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
1288 }
1289 pub fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
532ac7d7
XL
1290 unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
1291 }
1292
1293 pub fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) {
1294 unsafe {
1295 llvm::LLVMAddClause(landing_pad, clause);
1296 }
1297 }
1298
1299 pub fn catch_ret(&mut self, funclet: &Funclet<'ll>, unwind: &'ll BasicBlock) -> &'ll Value {
dfeec247
XL
1300 let ret =
1301 unsafe { llvm::LLVMRustBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind) };
532ac7d7
XL
1302 ret.expect("LLVM does not have support for catchret")
1303 }
1304
dc9dc135 1305 fn check_store(&mut self, val: &'ll Value, ptr: &'ll Value) -> &'ll Value {
a1dfa0c6
XL
1306 let dest_ptr_ty = self.cx.val_ty(ptr);
1307 let stored_ty = self.cx.val_ty(val);
1308 let stored_ptr_ty = self.cx.type_ptr_to(stored_ty);
1bb2cb6e 1309
a1dfa0c6 1310 assert_eq!(self.cx.type_kind(dest_ptr_ty), TypeKind::Pointer);
1bb2cb6e
SL
1311
1312 if dest_ptr_ty == stored_ptr_ty {
1313 ptr
1314 } else {
dfeec247
XL
1315 debug!(
1316 "type mismatch in store. \
1bb2cb6e 1317 Expected {:?}, got {:?}; inserting bitcast",
dfeec247
XL
1318 dest_ptr_ty, stored_ptr_ty
1319 );
1bb2cb6e
SL
1320 self.bitcast(ptr, stored_ptr_ty)
1321 }
1322 }
1323
dfeec247
XL
1324 fn check_call<'b>(
1325 &mut self,
1326 typ: &str,
1327 llfn: &'ll Value,
1328 args: &'b [&'ll Value],
1329 ) -> Cow<'b, [&'ll Value]> {
a1dfa0c6 1330 let mut fn_ty = self.cx.val_ty(llfn);
a7813a04 1331 // Strip off pointers
a1dfa0c6
XL
1332 while self.cx.type_kind(fn_ty) == TypeKind::Pointer {
1333 fn_ty = self.cx.element_type(fn_ty);
a7813a04
XL
1334 }
1335
dfeec247
XL
1336 assert!(
1337 self.cx.type_kind(fn_ty) == TypeKind::Function,
1338 "builder::{} not passed a function, but {:?}",
1339 typ,
1340 fn_ty
1341 );
a7813a04 1342
a1dfa0c6 1343 let param_tys = self.cx.func_params_types(fn_ty);
a7813a04 1344
dfeec247
XL
1345 let all_args_match = param_tys
1346 .iter()
a1dfa0c6 1347 .zip(args.iter().map(|&v| self.val_ty(v)))
1bb2cb6e
SL
1348 .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1349
1350 if all_args_match {
1351 return Cow::Borrowed(args);
1352 }
1353
dfeec247
XL
1354 let casted_args: Vec<_> = param_tys
1355 .into_iter()
1bb2cb6e
SL
1356 .zip(args.iter())
1357 .enumerate()
1358 .map(|(i, (expected_ty, &actual_val))| {
a1dfa0c6 1359 let actual_ty = self.val_ty(actual_val);
1bb2cb6e 1360 if expected_ty != actual_ty {
dfeec247
XL
1361 debug!(
1362 "type mismatch in function call of {:?}. \
1bb2cb6e 1363 Expected {:?} for param {}, got {:?}; injecting bitcast",
dfeec247
XL
1364 llfn, expected_ty, i, actual_ty
1365 );
1bb2cb6e
SL
1366 self.bitcast(actual_val, expected_ty)
1367 } else {
1368 actual_val
1369 }
1370 })
1371 .collect();
a7813a04 1372
0bf4aa26 1373 Cow::Owned(casted_args)
a7813a04 1374 }
ff7c6d11 1375
532ac7d7 1376 pub fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
dfeec247 1377 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
a1dfa0c6
XL
1378 }
1379
a1dfa0c6 1380 fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) {
74b04a01
XL
1381 let size = size.bytes();
1382 if size == 0 {
ff7c6d11
XL
1383 return;
1384 }
1385
f9f354fc 1386 if !self.cx().sess().emit_lifetime_markers() {
ff7c6d11
XL
1387 return;
1388 }
1389
2c00a5a8 1390 let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);
ff7c6d11 1391
a1dfa0c6
XL
1392 let ptr = self.pointercast(ptr, self.cx.type_i8p());
1393 self.call(lifetime_intrinsic, &[self.cx.const_u64(size), ptr], None);
ff7c6d11 1394 }
532ac7d7 1395
3dfed10e
XL
1396 pub(crate) fn phi(
1397 &mut self,
1398 ty: &'ll Type,
1399 vals: &[&'ll Value],
1400 bbs: &[&'ll BasicBlock],
1401 ) -> &'ll Value {
532ac7d7 1402 assert_eq!(vals.len(), bbs.len());
dfeec247 1403 let phi = unsafe { llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED) };
532ac7d7 1404 unsafe {
dfeec247 1405 llvm::LLVMAddIncoming(phi, vals.as_ptr(), bbs.as_ptr(), vals.len() as c_uint);
532ac7d7
XL
1406 phi
1407 }
1408 }
1409
1410 fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
532ac7d7
XL
1411 unsafe {
1412 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1413 }
1414 }
3dfed10e
XL
1415
1416 fn wasm_and_missing_nontrapping_fptoint(&self) -> bool {
1417 self.sess().target.target.arch == "wasm32"
1418 && !self.sess().target_features.contains(&sym::nontrapping_dash_fptoint)
1419 }
a7813a04 1420}