]>
Commit | Line | Data |
---|---|---|
9fa01778 XL |
1 | use crate::common::Funclet; |
2 | use crate::context::CodegenCx; | |
dfeec247 XL |
3 | use crate::llvm::{self, BasicBlock, False}; |
4 | use crate::llvm::{AtomicOrdering, AtomicRmwBinOp, SynchronizationScope}; | |
cdc7bbd5 | 5 | use crate::llvm_util; |
9fa01778 XL |
6 | use crate::type_::Type; |
7 | use crate::type_of::LayoutLlvmExt; | |
8 | use crate::value::Value; | |
6a06907d | 9 | use cstr::cstr; |
dfeec247 | 10 | use libc::{c_char, c_uint}; |
dfeec247 XL |
11 | use rustc_codegen_ssa::common::{IntPredicate, RealPredicate, TypeKind}; |
12 | use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; | |
a1dfa0c6 | 13 | use rustc_codegen_ssa::mir::place::PlaceRef; |
dfeec247 XL |
14 | use rustc_codegen_ssa::traits::*; |
15 | use rustc_codegen_ssa::MemFlags; | |
dfeec247 XL |
16 | use rustc_data_structures::small_c_str::SmallCStr; |
17 | use rustc_hir::def_id::DefId; | |
c295e0f8 XL |
18 | use rustc_middle::ty::layout::{ |
19 | FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOfHelpers, TyAndLayout, | |
20 | }; | |
ba9703b0 | 21 | use rustc_middle::ty::{self, Ty, TyCtxt}; |
cdc7bbd5 | 22 | use rustc_span::Span; |
c295e0f8 | 23 | use rustc_target::abi::{self, call::FnAbi, Align, Size, WrappingRange}; |
48663c56 | 24 | use rustc_target::spec::{HasTargetSpec, Target}; |
1bb2cb6e | 25 | use std::borrow::Cow; |
dc9dc135 | 26 | use std::ffi::CStr; |
cdc7bbd5 | 27 | use std::iter; |
c295e0f8 | 28 | use std::ops::Deref; |
85aaf69f | 29 | use std::ptr; |
3dfed10e | 30 | use tracing::debug; |
1a4d82fc | 31 | |
32a655c1 SL |
32 | // All Builders must have an llfn associated with them |
33 | #[must_use] | |
dc9dc135 | 34 | pub struct Builder<'a, 'll, 'tcx> { |
b7449926 XL |
35 | pub llbuilder: &'ll mut llvm::Builder<'ll>, |
36 | pub cx: &'a CodegenCx<'ll, 'tcx>, | |
1a4d82fc JJ |
37 | } |
38 | ||
a2a8927a | 39 | impl Drop for Builder<'_, '_, '_> { |
32a655c1 SL |
40 | fn drop(&mut self) { |
41 | unsafe { | |
b7449926 | 42 | llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _)); |
32a655c1 SL |
43 | } |
44 | } | |
45 | } | |
46 | ||
dc9dc135 | 47 | // FIXME(eddyb) use a checked constructor when they become `const fn`. |
dfeec247 | 48 | const EMPTY_C_STR: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"\0") }; |
dc9dc135 XL |
49 | |
50 | /// Empty string, to be used where LLVM expects an instruction name, indicating | |
51 | /// that the instruction is to be left unnamed (i.e. numbered, in textual IR). | |
52 | // FIXME(eddyb) pass `&CStr` directly to FFI once it's a thin pointer. | |
53 | const UNNAMED: *const c_char = EMPTY_C_STR.as_ptr(); | |
1a4d82fc | 54 | |
a2a8927a | 55 | impl<'ll, 'tcx> BackendTypes for Builder<'_, 'll, 'tcx> { |
a1dfa0c6 | 56 | type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value; |
e74abb32 | 57 | type Function = <CodegenCx<'ll, 'tcx> as BackendTypes>::Function; |
a1dfa0c6 XL |
58 | type BasicBlock = <CodegenCx<'ll, 'tcx> as BackendTypes>::BasicBlock; |
59 | type Type = <CodegenCx<'ll, 'tcx> as BackendTypes>::Type; | |
60 | type Funclet = <CodegenCx<'ll, 'tcx> as BackendTypes>::Funclet; | |
61 | ||
62 | type DIScope = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIScope; | |
29967ef6 | 63 | type DILocation = <CodegenCx<'ll, 'tcx> as BackendTypes>::DILocation; |
74b04a01 | 64 | type DIVariable = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIVariable; |
a1dfa0c6 XL |
65 | } |
66 | ||
ba9703b0 XL |
67 | impl abi::HasDataLayout for Builder<'_, '_, '_> { |
68 | fn data_layout(&self) -> &abi::TargetDataLayout { | |
a1dfa0c6 | 69 | self.cx.data_layout() |
83c7162d XL |
70 | } |
71 | } | |
72 | ||
a2a8927a | 73 | impl<'tcx> ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> { |
17df50a5 | 74 | #[inline] |
dc9dc135 | 75 | fn tcx(&self) -> TyCtxt<'tcx> { |
a1dfa0c6 XL |
76 | self.cx.tcx |
77 | } | |
78 | } | |
79 | ||
a2a8927a | 80 | impl<'tcx> ty::layout::HasParamEnv<'tcx> for Builder<'_, '_, 'tcx> { |
48663c56 XL |
81 | fn param_env(&self) -> ty::ParamEnv<'tcx> { |
82 | self.cx.param_env() | |
83 | } | |
84 | } | |
85 | ||
a2a8927a | 86 | impl HasTargetSpec for Builder<'_, '_, '_> { |
17df50a5 | 87 | #[inline] |
48663c56 | 88 | fn target_spec(&self) -> &Target { |
c295e0f8 | 89 | self.cx.target_spec() |
48663c56 XL |
90 | } |
91 | } | |
92 | ||
a2a8927a | 93 | impl<'tcx> LayoutOfHelpers<'tcx> for Builder<'_, '_, 'tcx> { |
c295e0f8 | 94 | type LayoutOfResult = TyAndLayout<'tcx>; |
a1dfa0c6 | 95 | |
c295e0f8 XL |
96 | #[inline] |
97 | fn handle_layout_err(&self, err: LayoutError<'tcx>, span: Span, ty: Ty<'tcx>) -> ! { | |
98 | self.cx.handle_layout_err(err, span, ty) | |
99 | } | |
100 | } | |
101 | ||
a2a8927a | 102 | impl<'tcx> FnAbiOfHelpers<'tcx> for Builder<'_, '_, 'tcx> { |
c295e0f8 XL |
103 | type FnAbiOfResult = &'tcx FnAbi<'tcx, Ty<'tcx>>; |
104 | ||
105 | #[inline] | |
106 | fn handle_fn_abi_err( | |
107 | &self, | |
108 | err: FnAbiError<'tcx>, | |
109 | span: Span, | |
110 | fn_abi_request: FnAbiRequest<'tcx>, | |
111 | ) -> ! { | |
112 | self.cx.handle_fn_abi_err(err, span, fn_abi_request) | |
a1dfa0c6 XL |
113 | } |
114 | } | |
115 | ||
a2a8927a | 116 | impl<'ll, 'tcx> Deref for Builder<'_, 'll, 'tcx> { |
a1dfa0c6 XL |
117 | type Target = CodegenCx<'ll, 'tcx>; |
118 | ||
17df50a5 | 119 | #[inline] |
a1dfa0c6 XL |
120 | fn deref(&self) -> &Self::Target { |
121 | self.cx | |
122 | } | |
123 | } | |
124 | ||
a2a8927a | 125 | impl<'ll, 'tcx> HasCodegen<'tcx> for Builder<'_, 'll, 'tcx> { |
a1dfa0c6 XL |
126 | type CodegenCx = CodegenCx<'ll, 'tcx>; |
127 | } | |
128 | ||
dc9dc135 XL |
129 | macro_rules! builder_methods_for_value_instructions { |
130 | ($($name:ident($($arg:ident),*) => $llvm_capi:ident),+ $(,)?) => { | |
131 | $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value { | |
132 | unsafe { | |
133 | llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED) | |
134 | } | |
135 | })+ | |
136 | } | |
137 | } | |
138 | ||
a2a8927a | 139 | impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { |
17df50a5 XL |
140 | fn build(cx: &'a CodegenCx<'ll, 'tcx>, llbb: &'ll BasicBlock) -> Self { |
141 | let bx = Builder::with_cx(cx); | |
142 | unsafe { | |
143 | llvm::LLVMPositionBuilderAtEnd(bx.llbuilder, llbb); | |
144 | } | |
2c00a5a8 | 145 | bx |
32a655c1 SL |
146 | } |
147 | ||
17df50a5 XL |
148 | fn cx(&self) -> &CodegenCx<'ll, 'tcx> { |
149 | self.cx | |
32a655c1 SL |
150 | } |
151 | ||
a1dfa0c6 | 152 | fn llbb(&self) -> &'ll BasicBlock { |
dfeec247 | 153 | unsafe { llvm::LLVMGetInsertBlock(self.llbuilder) } |
32a655c1 SL |
154 | } |
155 | ||
29967ef6 XL |
156 | fn set_span(&mut self, _span: Span) {} |
157 | ||
17df50a5 | 158 | fn append_block(cx: &'a CodegenCx<'ll, 'tcx>, llfn: &'ll Value, name: &str) -> &'ll BasicBlock { |
1a4d82fc | 159 | unsafe { |
17df50a5 XL |
160 | let name = SmallCStr::new(name); |
161 | llvm::LLVMAppendBasicBlockInContext(cx.llcx, llfn, name.as_ptr()) | |
1a4d82fc JJ |
162 | } |
163 | } | |
164 | ||
17df50a5 XL |
165 | fn append_sibling_block(&mut self, name: &str) -> &'ll BasicBlock { |
166 | Self::append_block(self.cx, self.llfn(), name) | |
167 | } | |
168 | ||
169 | fn build_sibling_block(&mut self, name: &str) -> Self { | |
170 | let llbb = self.append_sibling_block(name); | |
171 | Self::build(self.cx, llbb) | |
172 | } | |
173 | ||
a1dfa0c6 | 174 | fn ret_void(&mut self) { |
1a4d82fc JJ |
175 | unsafe { |
176 | llvm::LLVMBuildRetVoid(self.llbuilder); | |
177 | } | |
178 | } | |
179 | ||
a1dfa0c6 | 180 | fn ret(&mut self, v: &'ll Value) { |
1a4d82fc JJ |
181 | unsafe { |
182 | llvm::LLVMBuildRet(self.llbuilder, v); | |
183 | } | |
184 | } | |
185 | ||
a1dfa0c6 | 186 | fn br(&mut self, dest: &'ll BasicBlock) { |
1a4d82fc JJ |
187 | unsafe { |
188 | llvm::LLVMBuildBr(self.llbuilder, dest); | |
189 | } | |
190 | } | |
191 | ||
a1dfa0c6 XL |
192 | fn cond_br( |
193 | &mut self, | |
b7449926 XL |
194 | cond: &'ll Value, |
195 | then_llbb: &'ll BasicBlock, | |
196 | else_llbb: &'ll BasicBlock, | |
197 | ) { | |
1a4d82fc JJ |
198 | unsafe { |
199 | llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb); | |
200 | } | |
201 | } | |
202 | ||
a1dfa0c6 XL |
203 | fn switch( |
204 | &mut self, | |
b7449926 XL |
205 | v: &'ll Value, |
206 | else_llbb: &'ll BasicBlock, | |
1b1a35ee | 207 | cases: impl ExactSizeIterator<Item = (u128, &'ll BasicBlock)>, |
532ac7d7 | 208 | ) { |
dfeec247 XL |
209 | let switch = |
210 | unsafe { llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint) }; | |
532ac7d7 XL |
211 | for (on_val, dest) in cases { |
212 | let on_val = self.const_uint_big(self.val_ty(v), on_val); | |
dfeec247 | 213 | unsafe { llvm::LLVMAddCase(switch, on_val, dest) } |
1a4d82fc JJ |
214 | } |
215 | } | |
216 | ||
a1dfa0c6 XL |
217 | fn invoke( |
218 | &mut self, | |
94222f64 | 219 | llty: &'ll Type, |
a1dfa0c6 XL |
220 | llfn: &'ll Value, |
221 | args: &[&'ll Value], | |
222 | then: &'ll BasicBlock, | |
223 | catch: &'ll BasicBlock, | |
224 | funclet: Option<&Funclet<'ll>>, | |
225 | ) -> &'ll Value { | |
dfeec247 | 226 | debug!("invoke {:?} with args ({:?})", llfn, args); |
1a4d82fc | 227 | |
94222f64 | 228 | let args = self.check_call("invoke", llty, llfn, args); |
a1dfa0c6 XL |
229 | let bundle = funclet.map(|funclet| funclet.bundle()); |
230 | let bundle = bundle.as_ref().map(|b| &*b.raw); | |
7453a54e | 231 | |
1a4d82fc | 232 | unsafe { |
dfeec247 XL |
233 | llvm::LLVMRustBuildInvoke( |
234 | self.llbuilder, | |
94222f64 | 235 | llty, |
dfeec247 XL |
236 | llfn, |
237 | args.as_ptr(), | |
238 | args.len() as c_uint, | |
239 | then, | |
240 | catch, | |
241 | bundle, | |
242 | UNNAMED, | |
243 | ) | |
1a4d82fc JJ |
244 | } |
245 | } | |
246 | ||
a1dfa0c6 | 247 | fn unreachable(&mut self) { |
1a4d82fc JJ |
248 | unsafe { |
249 | llvm::LLVMBuildUnreachable(self.llbuilder); | |
250 | } | |
251 | } | |
252 | ||
dc9dc135 XL |
253 | builder_methods_for_value_instructions! { |
254 | add(a, b) => LLVMBuildAdd, | |
255 | fadd(a, b) => LLVMBuildFAdd, | |
256 | sub(a, b) => LLVMBuildSub, | |
257 | fsub(a, b) => LLVMBuildFSub, | |
258 | mul(a, b) => LLVMBuildMul, | |
259 | fmul(a, b) => LLVMBuildFMul, | |
260 | udiv(a, b) => LLVMBuildUDiv, | |
261 | exactudiv(a, b) => LLVMBuildExactUDiv, | |
262 | sdiv(a, b) => LLVMBuildSDiv, | |
263 | exactsdiv(a, b) => LLVMBuildExactSDiv, | |
264 | fdiv(a, b) => LLVMBuildFDiv, | |
265 | urem(a, b) => LLVMBuildURem, | |
266 | srem(a, b) => LLVMBuildSRem, | |
267 | frem(a, b) => LLVMBuildFRem, | |
268 | shl(a, b) => LLVMBuildShl, | |
269 | lshr(a, b) => LLVMBuildLShr, | |
270 | ashr(a, b) => LLVMBuildAShr, | |
271 | and(a, b) => LLVMBuildAnd, | |
272 | or(a, b) => LLVMBuildOr, | |
273 | xor(a, b) => LLVMBuildXor, | |
274 | neg(x) => LLVMBuildNeg, | |
275 | fneg(x) => LLVMBuildFNeg, | |
276 | not(x) => LLVMBuildNot, | |
277 | unchecked_sadd(x, y) => LLVMBuildNSWAdd, | |
278 | unchecked_uadd(x, y) => LLVMBuildNUWAdd, | |
279 | unchecked_ssub(x, y) => LLVMBuildNSWSub, | |
280 | unchecked_usub(x, y) => LLVMBuildNUWSub, | |
281 | unchecked_smul(x, y) => LLVMBuildNSWMul, | |
282 | unchecked_umul(x, y) => LLVMBuildNUWMul, | |
1a4d82fc JJ |
283 | } |
284 | ||
a1dfa0c6 | 285 | fn fadd_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { |
54a0048b | 286 | unsafe { |
dc9dc135 | 287 | let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, UNNAMED); |
cdc7bbd5 | 288 | llvm::LLVMRustSetFastMath(instr); |
54a0048b SL |
289 | instr |
290 | } | |
291 | } | |
292 | ||
a1dfa0c6 | 293 | fn fsub_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { |
54a0048b | 294 | unsafe { |
dc9dc135 | 295 | let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, UNNAMED); |
cdc7bbd5 | 296 | llvm::LLVMRustSetFastMath(instr); |
54a0048b SL |
297 | instr |
298 | } | |
299 | } | |
300 | ||
a1dfa0c6 | 301 | fn fmul_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { |
54a0048b | 302 | unsafe { |
dc9dc135 | 303 | let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, UNNAMED); |
cdc7bbd5 | 304 | llvm::LLVMRustSetFastMath(instr); |
54a0048b SL |
305 | instr |
306 | } | |
307 | } | |
308 | ||
a1dfa0c6 | 309 | fn fdiv_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { |
54a0048b | 310 | unsafe { |
dc9dc135 | 311 | let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, UNNAMED); |
cdc7bbd5 | 312 | llvm::LLVMRustSetFastMath(instr); |
54a0048b SL |
313 | instr |
314 | } | |
315 | } | |
316 | ||
a1dfa0c6 | 317 | fn frem_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { |
54a0048b | 318 | unsafe { |
dc9dc135 | 319 | let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, UNNAMED); |
cdc7bbd5 | 320 | llvm::LLVMRustSetFastMath(instr); |
54a0048b SL |
321 | instr |
322 | } | |
323 | } | |
324 | ||
a1dfa0c6 XL |
325 | fn checked_binop( |
326 | &mut self, | |
327 | oop: OverflowOp, | |
9fa01778 | 328 | ty: Ty<'_>, |
a1dfa0c6 XL |
329 | lhs: Self::Value, |
330 | rhs: Self::Value, | |
331 | ) -> (Self::Value, Self::Value) { | |
ba9703b0 | 332 | use rustc_middle::ty::{Int, Uint}; |
5869c6ff | 333 | use rustc_middle::ty::{IntTy::*, UintTy::*}; |
a1dfa0c6 | 334 | |
1b1a35ee | 335 | let new_kind = match ty.kind() { |
29967ef6 XL |
336 | Int(t @ Isize) => Int(t.normalize(self.tcx.sess.target.pointer_width)), |
337 | Uint(t @ Usize) => Uint(t.normalize(self.tcx.sess.target.pointer_width)), | |
1b1a35ee | 338 | t @ (Uint(_) | Int(_)) => t.clone(), |
dfeec247 | 339 | _ => panic!("tried to get overflow intrinsic for op applied to non-int type"), |
a1dfa0c6 XL |
340 | }; |
341 | ||
342 | let name = match oop { | |
e74abb32 | 343 | OverflowOp::Add => match new_kind { |
a1dfa0c6 XL |
344 | Int(I8) => "llvm.sadd.with.overflow.i8", |
345 | Int(I16) => "llvm.sadd.with.overflow.i16", | |
346 | Int(I32) => "llvm.sadd.with.overflow.i32", | |
347 | Int(I64) => "llvm.sadd.with.overflow.i64", | |
348 | Int(I128) => "llvm.sadd.with.overflow.i128", | |
349 | ||
350 | Uint(U8) => "llvm.uadd.with.overflow.i8", | |
351 | Uint(U16) => "llvm.uadd.with.overflow.i16", | |
352 | Uint(U32) => "llvm.uadd.with.overflow.i32", | |
353 | Uint(U64) => "llvm.uadd.with.overflow.i64", | |
354 | Uint(U128) => "llvm.uadd.with.overflow.i128", | |
355 | ||
356 | _ => unreachable!(), | |
357 | }, | |
e74abb32 | 358 | OverflowOp::Sub => match new_kind { |
a1dfa0c6 XL |
359 | Int(I8) => "llvm.ssub.with.overflow.i8", |
360 | Int(I16) => "llvm.ssub.with.overflow.i16", | |
361 | Int(I32) => "llvm.ssub.with.overflow.i32", | |
362 | Int(I64) => "llvm.ssub.with.overflow.i64", | |
363 | Int(I128) => "llvm.ssub.with.overflow.i128", | |
364 | ||
365 | Uint(U8) => "llvm.usub.with.overflow.i8", | |
366 | Uint(U16) => "llvm.usub.with.overflow.i16", | |
367 | Uint(U32) => "llvm.usub.with.overflow.i32", | |
368 | Uint(U64) => "llvm.usub.with.overflow.i64", | |
369 | Uint(U128) => "llvm.usub.with.overflow.i128", | |
370 | ||
371 | _ => unreachable!(), | |
372 | }, | |
e74abb32 | 373 | OverflowOp::Mul => match new_kind { |
a1dfa0c6 XL |
374 | Int(I8) => "llvm.smul.with.overflow.i8", |
375 | Int(I16) => "llvm.smul.with.overflow.i16", | |
376 | Int(I32) => "llvm.smul.with.overflow.i32", | |
377 | Int(I64) => "llvm.smul.with.overflow.i64", | |
378 | Int(I128) => "llvm.smul.with.overflow.i128", | |
379 | ||
380 | Uint(U8) => "llvm.umul.with.overflow.i8", | |
381 | Uint(U16) => "llvm.umul.with.overflow.i16", | |
382 | Uint(U32) => "llvm.umul.with.overflow.i32", | |
383 | Uint(U64) => "llvm.umul.with.overflow.i64", | |
384 | Uint(U128) => "llvm.umul.with.overflow.i128", | |
385 | ||
386 | _ => unreachable!(), | |
387 | }, | |
388 | }; | |
389 | ||
94222f64 | 390 | let res = self.call_intrinsic(name, &[lhs, rhs]); |
dfeec247 | 391 | (self.extract_value(res, 0), self.extract_value(res, 1)) |
a1dfa0c6 XL |
392 | } |
393 | ||
1b1a35ee XL |
394 | fn from_immediate(&mut self, val: Self::Value) -> Self::Value { |
395 | if self.cx().val_ty(val) == self.cx().type_i1() { | |
396 | self.zext(val, self.cx().type_i8()) | |
397 | } else { | |
398 | val | |
399 | } | |
400 | } | |
c295e0f8 | 401 | fn to_immediate_scalar(&mut self, val: Self::Value, scalar: abi::Scalar) -> Self::Value { |
1b1a35ee XL |
402 | if scalar.is_bool() { |
403 | return self.trunc(val, self.cx().type_i1()); | |
404 | } | |
405 | val | |
406 | } | |
407 | ||
e1599b0c | 408 | fn alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value { |
a1dfa0c6 | 409 | let mut bx = Builder::with_cx(self.cx); |
dfeec247 | 410 | bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) }); |
e1599b0c | 411 | bx.dynamic_alloca(ty, align) |
32a655c1 SL |
412 | } |
413 | ||
e1599b0c | 414 | fn dynamic_alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value { |
1a4d82fc | 415 | unsafe { |
e1599b0c | 416 | let alloca = llvm::LLVMBuildAlloca(self.llbuilder, ty, UNNAMED); |
a1dfa0c6 | 417 | llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint); |
cc61c64b | 418 | alloca |
1a4d82fc JJ |
419 | } |
420 | } | |
421 | ||
dfeec247 | 422 | fn array_alloca(&mut self, ty: &'ll Type, len: &'ll Value, align: Align) -> &'ll Value { |
1a4d82fc | 423 | unsafe { |
e1599b0c | 424 | let alloca = llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, UNNAMED); |
a1dfa0c6 | 425 | llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint); |
b7449926 | 426 | alloca |
1a4d82fc JJ |
427 | } |
428 | } | |
429 | ||
136023e0 | 430 | fn load(&mut self, ty: &'ll Type, ptr: &'ll Value, align: Align) -> &'ll Value { |
1a4d82fc | 431 | unsafe { |
136023e0 | 432 | let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED); |
a1dfa0c6 | 433 | llvm::LLVMSetAlignment(load, align.bytes() as c_uint); |
32a655c1 | 434 | load |
1a4d82fc JJ |
435 | } |
436 | } | |
437 | ||
136023e0 | 438 | fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value { |
1a4d82fc | 439 | unsafe { |
136023e0 | 440 | let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED); |
dc9dc135 XL |
441 | llvm::LLVMSetVolatile(load, llvm::True); |
442 | load | |
1a4d82fc JJ |
443 | } |
444 | } | |
445 | ||
a1dfa0c6 XL |
446 | fn atomic_load( |
447 | &mut self, | |
136023e0 | 448 | ty: &'ll Type, |
a1dfa0c6 XL |
449 | ptr: &'ll Value, |
450 | order: rustc_codegen_ssa::common::AtomicOrdering, | |
451 | size: Size, | |
452 | ) -> &'ll Value { | |
1a4d82fc | 453 | unsafe { |
a1dfa0c6 XL |
454 | let load = llvm::LLVMRustBuildAtomicLoad( |
455 | self.llbuilder, | |
136023e0 | 456 | ty, |
a1dfa0c6 | 457 | ptr, |
dc9dc135 | 458 | UNNAMED, |
a1dfa0c6 XL |
459 | AtomicOrdering::from_generic(order), |
460 | ); | |
461 | // LLVM requires the alignment of atomic loads to be at least the size of the type. | |
462 | llvm::LLVMSetAlignment(load, size.bytes() as c_uint); | |
ff7c6d11 | 463 | load |
1a4d82fc JJ |
464 | } |
465 | } | |
466 | ||
dfeec247 | 467 | fn load_operand(&mut self, place: PlaceRef<'tcx, &'ll Value>) -> OperandRef<'tcx, &'ll Value> { |
a1dfa0c6 XL |
468 | debug!("PlaceRef::load: {:?}", place); |
469 | ||
470 | assert_eq!(place.llextra.is_some(), place.layout.is_unsized()); | |
471 | ||
472 | if place.layout.is_zst() { | |
532ac7d7 | 473 | return OperandRef::new_zst(self, place.layout); |
a1dfa0c6 XL |
474 | } |
475 | ||
476 | fn scalar_load_metadata<'a, 'll, 'tcx>( | |
477 | bx: &mut Builder<'a, 'll, 'tcx>, | |
478 | load: &'ll Value, | |
c295e0f8 | 479 | scalar: abi::Scalar, |
a1dfa0c6 | 480 | ) { |
a1dfa0c6 | 481 | match scalar.value { |
ba9703b0 | 482 | abi::Int(..) => { |
c295e0f8 XL |
483 | if !scalar.is_always_valid(bx) { |
484 | bx.range_metadata(load, scalar.valid_range); | |
a1dfa0c6 XL |
485 | } |
486 | } | |
c295e0f8 | 487 | abi::Pointer if !scalar.valid_range.contains(0) => { |
a1dfa0c6 XL |
488 | bx.nonnull_metadata(load); |
489 | } | |
490 | _ => {} | |
491 | } | |
492 | } | |
493 | ||
494 | let val = if let Some(llextra) = place.llextra { | |
495 | OperandValue::Ref(place.llval, Some(llextra), place.align) | |
496 | } else if place.layout.is_llvm_immediate() { | |
497 | let mut const_llval = None; | |
498 | unsafe { | |
499 | if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) { | |
500 | if llvm::LLVMIsGlobalConstant(global) == llvm::True { | |
501 | const_llval = llvm::LLVMGetInitializer(global); | |
502 | } | |
503 | } | |
504 | } | |
505 | let llval = const_llval.unwrap_or_else(|| { | |
136023e0 | 506 | let load = self.load(place.layout.llvm_type(self), place.llval, place.align); |
c295e0f8 | 507 | if let abi::Abi::Scalar(scalar) = place.layout.abi { |
a1dfa0c6 XL |
508 | scalar_load_metadata(self, load, scalar); |
509 | } | |
510 | load | |
511 | }); | |
1b1a35ee | 512 | OperandValue::Immediate(self.to_immediate(llval, place.layout)) |
c295e0f8 | 513 | } else if let abi::Abi::ScalarPair(a, b) = place.layout.abi { |
a1dfa0c6 | 514 | let b_offset = a.value.size(self).align_to(b.value.align(self).abi); |
94222f64 | 515 | let pair_ty = place.layout.llvm_type(self); |
a1dfa0c6 | 516 | |
c295e0f8 | 517 | let mut load = |i, scalar: abi::Scalar, align| { |
94222f64 | 518 | let llptr = self.struct_gep(pair_ty, place.llval, i as u64); |
136023e0 XL |
519 | let llty = place.layout.scalar_pair_element_llvm_type(self, i, false); |
520 | let load = self.load(llty, llptr, align); | |
a1dfa0c6 | 521 | scalar_load_metadata(self, load, scalar); |
1b1a35ee | 522 | self.to_immediate_scalar(load, scalar) |
a1dfa0c6 XL |
523 | }; |
524 | ||
525 | OperandValue::Pair( | |
526 | load(0, a, place.align), | |
527 | load(1, b, place.align.restrict_for_offset(b_offset)), | |
528 | ) | |
529 | } else { | |
530 | OperandValue::Ref(place.llval, None, place.align) | |
531 | }; | |
532 | ||
533 | OperandRef { val, layout: place.layout } | |
534 | } | |
535 | ||
532ac7d7 XL |
536 | fn write_operand_repeatedly( |
537 | mut self, | |
538 | cg_elem: OperandRef<'tcx, &'ll Value>, | |
539 | count: u64, | |
540 | dest: PlaceRef<'tcx, &'ll Value>, | |
541 | ) -> Self { | |
542 | let zero = self.const_usize(0); | |
543 | let count = self.const_usize(count); | |
544 | let start = dest.project_index(&mut self, zero).llval; | |
545 | let end = dest.project_index(&mut self, count).llval; | |
546 | ||
547 | let mut header_bx = self.build_sibling_block("repeat_loop_header"); | |
548 | let mut body_bx = self.build_sibling_block("repeat_loop_body"); | |
549 | let next_bx = self.build_sibling_block("repeat_loop_next"); | |
550 | ||
551 | self.br(header_bx.llbb()); | |
552 | let current = header_bx.phi(self.val_ty(start), &[start], &[self.llbb()]); | |
553 | ||
554 | let keep_going = header_bx.icmp(IntPredicate::IntNE, current, end); | |
555 | header_bx.cond_br(keep_going, body_bx.llbb(), next_bx.llbb()); | |
556 | ||
557 | let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size); | |
dfeec247 XL |
558 | cg_elem |
559 | .val | |
560 | .store(&mut body_bx, PlaceRef::new_sized_aligned(current, cg_elem.layout, align)); | |
a1dfa0c6 | 561 | |
94222f64 XL |
562 | let next = body_bx.inbounds_gep( |
563 | self.backend_type(cg_elem.layout), | |
564 | current, | |
565 | &[self.const_usize(1)], | |
566 | ); | |
532ac7d7 XL |
567 | body_bx.br(header_bx.llbb()); |
568 | header_bx.add_incoming_to_phi(current, next, body_bx.llbb()); | |
569 | ||
570 | next_bx | |
571 | } | |
1a4d82fc | 572 | |
c295e0f8 | 573 | fn range_metadata(&mut self, load: &'ll Value, range: WrappingRange) { |
29967ef6 | 574 | if self.sess().target.arch == "amdgpu" { |
94222f64 | 575 | // amdgpu/LLVM does something weird and thinks an i64 value is |
b7449926 XL |
576 | // split into a v2i32, halving the bitwidth LLVM expects, |
577 | // tripping an assertion. So, for now, just disable this | |
578 | // optimization. | |
579 | return; | |
580 | } | |
581 | ||
1a4d82fc | 582 | unsafe { |
a1dfa0c6 | 583 | let llty = self.cx.val_ty(load); |
ff7c6d11 | 584 | let v = [ |
a1dfa0c6 | 585 | self.cx.const_uint_big(llty, range.start), |
c295e0f8 | 586 | self.cx.const_uint_big(llty, range.end.wrapping_add(1)), |
ff7c6d11 | 587 | ]; |
1a4d82fc | 588 | |
dfeec247 XL |
589 | llvm::LLVMSetMetadata( |
590 | load, | |
591 | llvm::MD_range as c_uint, | |
592 | llvm::LLVMMDNodeInContext(self.cx.llcx, v.as_ptr(), v.len() as c_uint), | |
593 | ); | |
1a4d82fc | 594 | } |
1a4d82fc JJ |
595 | } |
596 | ||
a1dfa0c6 | 597 | fn nonnull_metadata(&mut self, load: &'ll Value) { |
85aaf69f | 598 | unsafe { |
dfeec247 XL |
599 | llvm::LLVMSetMetadata( |
600 | load, | |
601 | llvm::MD_nonnull as c_uint, | |
602 | llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0), | |
603 | ); | |
85aaf69f | 604 | } |
85aaf69f SL |
605 | } |
606 | ||
3c0e092e XL |
607 | fn type_metadata(&mut self, function: &'ll Value, typeid: String) { |
608 | let typeid_metadata = self.typeid_metadata(typeid); | |
609 | let v = [self.const_usize(0), typeid_metadata]; | |
610 | unsafe { | |
611 | llvm::LLVMGlobalSetMetadata( | |
612 | function, | |
613 | llvm::MD_type as c_uint, | |
614 | llvm::LLVMValueAsMetadata(llvm::LLVMMDNodeInContext( | |
615 | self.cx.llcx, | |
616 | v.as_ptr(), | |
617 | v.len() as c_uint, | |
618 | )), | |
619 | ) | |
620 | } | |
621 | } | |
622 | ||
623 | fn typeid_metadata(&mut self, typeid: String) -> Self::Value { | |
624 | unsafe { | |
625 | llvm::LLVMMDStringInContext( | |
626 | self.cx.llcx, | |
627 | typeid.as_ptr() as *const c_char, | |
628 | typeid.as_bytes().len() as c_uint, | |
629 | ) | |
630 | } | |
631 | } | |
632 | ||
a1dfa0c6 | 633 | fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value { |
83c7162d XL |
634 | self.store_with_flags(val, ptr, align, MemFlags::empty()) |
635 | } | |
636 | ||
a1dfa0c6 XL |
637 | fn store_with_flags( |
638 | &mut self, | |
b7449926 XL |
639 | val: &'ll Value, |
640 | ptr: &'ll Value, | |
83c7162d XL |
641 | align: Align, |
642 | flags: MemFlags, | |
b7449926 XL |
643 | ) -> &'ll Value { |
644 | debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags); | |
1bb2cb6e | 645 | let ptr = self.check_store(val, ptr); |
1a4d82fc | 646 | unsafe { |
32a655c1 | 647 | let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr); |
dfeec247 XL |
648 | let align = |
649 | if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint }; | |
8faf50e0 | 650 | llvm::LLVMSetAlignment(store, align); |
83c7162d XL |
651 | if flags.contains(MemFlags::VOLATILE) { |
652 | llvm::LLVMSetVolatile(store, llvm::True); | |
653 | } | |
654 | if flags.contains(MemFlags::NONTEMPORAL) { | |
655 | // According to LLVM [1] building a nontemporal store must | |
656 | // *always* point to a metadata value of the integer 1. | |
657 | // | |
136023e0 | 658 | // [1]: https://llvm.org/docs/LangRef.html#store-instruction |
a1dfa0c6 | 659 | let one = self.cx.const_i32(1); |
83c7162d XL |
660 | let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1); |
661 | llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node); | |
662 | } | |
32a655c1 | 663 | store |
1a4d82fc JJ |
664 | } |
665 | } | |
666 | ||
dfeec247 XL |
667 | fn atomic_store( |
668 | &mut self, | |
669 | val: &'ll Value, | |
670 | ptr: &'ll Value, | |
671 | order: rustc_codegen_ssa::common::AtomicOrdering, | |
672 | size: Size, | |
673 | ) { | |
b7449926 | 674 | debug!("Store {:?} -> {:?}", val, ptr); |
1bb2cb6e | 675 | let ptr = self.check_store(val, ptr); |
1a4d82fc | 676 | unsafe { |
a1dfa0c6 XL |
677 | let store = llvm::LLVMRustBuildAtomicStore( |
678 | self.llbuilder, | |
679 | val, | |
680 | ptr, | |
681 | AtomicOrdering::from_generic(order), | |
682 | ); | |
683 | // LLVM requires the alignment of atomic stores to be at least the size of the type. | |
684 | llvm::LLVMSetAlignment(store, size.bytes() as c_uint); | |
ff7c6d11 XL |
685 | } |
686 | } | |
687 | ||
94222f64 | 688 | fn gep(&mut self, ty: &'ll Type, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value { |
1a4d82fc | 689 | unsafe { |
94222f64 | 690 | llvm::LLVMBuildGEP2( |
dfeec247 | 691 | self.llbuilder, |
94222f64 | 692 | ty, |
dfeec247 XL |
693 | ptr, |
694 | indices.as_ptr(), | |
695 | indices.len() as c_uint, | |
696 | UNNAMED, | |
697 | ) | |
1a4d82fc JJ |
698 | } |
699 | } | |
700 | ||
94222f64 XL |
701 | fn inbounds_gep( |
702 | &mut self, | |
703 | ty: &'ll Type, | |
704 | ptr: &'ll Value, | |
705 | indices: &[&'ll Value], | |
706 | ) -> &'ll Value { | |
1a4d82fc | 707 | unsafe { |
94222f64 | 708 | llvm::LLVMBuildInBoundsGEP2( |
dfeec247 | 709 | self.llbuilder, |
94222f64 | 710 | ty, |
dfeec247 XL |
711 | ptr, |
712 | indices.as_ptr(), | |
713 | indices.len() as c_uint, | |
714 | UNNAMED, | |
715 | ) | |
1a4d82fc JJ |
716 | } |
717 | } | |
718 | ||
94222f64 | 719 | fn struct_gep(&mut self, ty: &'ll Type, ptr: &'ll Value, idx: u64) -> &'ll Value { |
532ac7d7 | 720 | assert_eq!(idx as c_uint as u64, idx); |
94222f64 | 721 | unsafe { llvm::LLVMBuildStructGEP2(self.llbuilder, ty, ptr, idx as c_uint, UNNAMED) } |
532ac7d7 XL |
722 | } |
723 | ||
1a4d82fc | 724 | /* Casts */ |
a1dfa0c6 | 725 | fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 726 | unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
727 | } |
728 | ||
a1dfa0c6 | 729 | fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 730 | unsafe { llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
731 | } |
732 | ||
f035d41b | 733 | fn fptoui_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> Option<&'ll Value> { |
5099ac24 | 734 | self.fptoint_sat(false, val, dest_ty) |
f035d41b XL |
735 | } |
736 | ||
737 | fn fptosi_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> Option<&'ll Value> { | |
5099ac24 | 738 | self.fptoint_sat(true, val, dest_ty) |
3dfed10e XL |
739 | } |
740 | ||
a1dfa0c6 | 741 | fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
cdc7bbd5 XL |
742 | // On WebAssembly the `fptoui` and `fptosi` instructions currently have |
743 | // poor codegen. The reason for this is that the corresponding wasm | |
744 | // instructions, `i32.trunc_f32_s` for example, will trap when the float | |
745 | // is out-of-bounds, infinity, or nan. This means that LLVM | |
746 | // automatically inserts control flow around `fptoui` and `fptosi` | |
747 | // because the LLVM instruction `fptoui` is defined as producing a | |
748 | // poison value, not having UB on out-of-bounds values. | |
3dfed10e | 749 | // |
cdc7bbd5 XL |
750 | // This method, however, is only used with non-saturating casts that |
751 | // have UB on out-of-bounds values. This means that it's ok if we use | |
752 | // the raw wasm instruction since out-of-bounds values can do whatever | |
753 | // we like. To ensure that LLVM picks the right instruction we choose | |
754 | // the raw wasm intrinsic functions which avoid LLVM inserting all the | |
755 | // other control flow automatically. | |
3c0e092e | 756 | if self.sess().target.is_like_wasm { |
3dfed10e XL |
757 | let src_ty = self.cx.val_ty(val); |
758 | if self.cx.type_kind(src_ty) != TypeKind::Vector { | |
759 | let float_width = self.cx.float_width(src_ty); | |
760 | let int_width = self.cx.int_width(dest_ty); | |
761 | let name = match (int_width, float_width) { | |
762 | (32, 32) => Some("llvm.wasm.trunc.unsigned.i32.f32"), | |
763 | (32, 64) => Some("llvm.wasm.trunc.unsigned.i32.f64"), | |
764 | (64, 32) => Some("llvm.wasm.trunc.unsigned.i64.f32"), | |
765 | (64, 64) => Some("llvm.wasm.trunc.unsigned.i64.f64"), | |
766 | _ => None, | |
767 | }; | |
768 | if let Some(name) = name { | |
94222f64 | 769 | return self.call_intrinsic(name, &[val]); |
3dfed10e XL |
770 | } |
771 | } | |
772 | } | |
dfeec247 | 773 | unsafe { llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
774 | } |
775 | ||
a1dfa0c6 | 776 | fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
cdc7bbd5 | 777 | // see `fptoui` above for why wasm is different here |
3c0e092e | 778 | if self.sess().target.is_like_wasm { |
3dfed10e XL |
779 | let src_ty = self.cx.val_ty(val); |
780 | if self.cx.type_kind(src_ty) != TypeKind::Vector { | |
781 | let float_width = self.cx.float_width(src_ty); | |
782 | let int_width = self.cx.int_width(dest_ty); | |
783 | let name = match (int_width, float_width) { | |
784 | (32, 32) => Some("llvm.wasm.trunc.signed.i32.f32"), | |
785 | (32, 64) => Some("llvm.wasm.trunc.signed.i32.f64"), | |
786 | (64, 32) => Some("llvm.wasm.trunc.signed.i64.f32"), | |
787 | (64, 64) => Some("llvm.wasm.trunc.signed.i64.f64"), | |
788 | _ => None, | |
789 | }; | |
790 | if let Some(name) = name { | |
94222f64 | 791 | return self.call_intrinsic(name, &[val]); |
3dfed10e XL |
792 | } |
793 | } | |
794 | } | |
dfeec247 | 795 | unsafe { llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
796 | } |
797 | ||
a1dfa0c6 | 798 | fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 799 | unsafe { llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
800 | } |
801 | ||
a1dfa0c6 | 802 | fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 803 | unsafe { llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
804 | } |
805 | ||
a1dfa0c6 | 806 | fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 807 | unsafe { llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
808 | } |
809 | ||
a1dfa0c6 | 810 | fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 811 | unsafe { llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
812 | } |
813 | ||
a1dfa0c6 | 814 | fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 815 | unsafe { llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
816 | } |
817 | ||
a1dfa0c6 | 818 | fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 819 | unsafe { llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
820 | } |
821 | ||
a1dfa0c6 | 822 | fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 823 | unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
824 | } |
825 | ||
a1dfa0c6 | 826 | fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value { |
dfeec247 | 827 | unsafe { llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed) } |
1a4d82fc JJ |
828 | } |
829 | ||
a1dfa0c6 | 830 | fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 831 | unsafe { llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, UNNAMED) } |
1a4d82fc JJ |
832 | } |
833 | ||
1a4d82fc | 834 | /* Comparisons */ |
a1dfa0c6 | 835 | fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { |
a1dfa0c6 | 836 | let op = llvm::IntPredicate::from_generic(op); |
dfeec247 | 837 | unsafe { llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) } |
1a4d82fc JJ |
838 | } |
839 | ||
a1dfa0c6 | 840 | fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { |
c295e0f8 | 841 | let op = llvm::RealPredicate::from_generic(op); |
dfeec247 | 842 | unsafe { llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) } |
1a4d82fc JJ |
843 | } |
844 | ||
845 | /* Miscellaneous instructions */ | |
dfeec247 XL |
846 | fn memcpy( |
847 | &mut self, | |
848 | dst: &'ll Value, | |
849 | dst_align: Align, | |
850 | src: &'ll Value, | |
851 | src_align: Align, | |
852 | size: &'ll Value, | |
853 | flags: MemFlags, | |
854 | ) { | |
136023e0 | 855 | assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memcpy not supported"); |
a1dfa0c6 XL |
856 | let size = self.intcast(size, self.type_isize(), false); |
857 | let is_volatile = flags.contains(MemFlags::VOLATILE); | |
858 | let dst = self.pointercast(dst, self.type_i8p()); | |
859 | let src = self.pointercast(src, self.type_i8p()); | |
860 | unsafe { | |
dfeec247 XL |
861 | llvm::LLVMRustBuildMemCpy( |
862 | self.llbuilder, | |
863 | dst, | |
864 | dst_align.bytes() as c_uint, | |
865 | src, | |
866 | src_align.bytes() as c_uint, | |
867 | size, | |
868 | is_volatile, | |
869 | ); | |
a1dfa0c6 XL |
870 | } |
871 | } | |
7453a54e | 872 | |
dfeec247 XL |
873 | fn memmove( |
874 | &mut self, | |
875 | dst: &'ll Value, | |
876 | dst_align: Align, | |
877 | src: &'ll Value, | |
878 | src_align: Align, | |
879 | size: &'ll Value, | |
880 | flags: MemFlags, | |
881 | ) { | |
136023e0 | 882 | assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memmove not supported"); |
a1dfa0c6 XL |
883 | let size = self.intcast(size, self.type_isize(), false); |
884 | let is_volatile = flags.contains(MemFlags::VOLATILE); | |
885 | let dst = self.pointercast(dst, self.type_i8p()); | |
886 | let src = self.pointercast(src, self.type_i8p()); | |
1a4d82fc | 887 | unsafe { |
dfeec247 XL |
888 | llvm::LLVMRustBuildMemMove( |
889 | self.llbuilder, | |
890 | dst, | |
891 | dst_align.bytes() as c_uint, | |
892 | src, | |
893 | src_align.bytes() as c_uint, | |
894 | size, | |
895 | is_volatile, | |
896 | ); | |
1a4d82fc JJ |
897 | } |
898 | } | |
899 | ||
a1dfa0c6 XL |
900 | fn memset( |
901 | &mut self, | |
902 | ptr: &'ll Value, | |
903 | fill_byte: &'ll Value, | |
904 | size: &'ll Value, | |
905 | align: Align, | |
906 | flags: MemFlags, | |
907 | ) { | |
74b04a01 | 908 | let is_volatile = flags.contains(MemFlags::VOLATILE); |
a1dfa0c6 | 909 | let ptr = self.pointercast(ptr, self.type_i8p()); |
74b04a01 XL |
910 | unsafe { |
911 | llvm::LLVMRustBuildMemSet( | |
912 | self.llbuilder, | |
913 | ptr, | |
914 | align.bytes() as c_uint, | |
915 | fill_byte, | |
916 | size, | |
917 | is_volatile, | |
918 | ); | |
919 | } | |
a1dfa0c6 XL |
920 | } |
921 | ||
a1dfa0c6 | 922 | fn select( |
dfeec247 XL |
923 | &mut self, |
924 | cond: &'ll Value, | |
b7449926 XL |
925 | then_val: &'ll Value, |
926 | else_val: &'ll Value, | |
927 | ) -> &'ll Value { | |
dfeec247 | 928 | unsafe { llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, UNNAMED) } |
1a4d82fc JJ |
929 | } |
930 | ||
a1dfa0c6 | 931 | fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value { |
dfeec247 | 932 | unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) } |
1a4d82fc JJ |
933 | } |
934 | ||
a1dfa0c6 | 935 | fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value { |
dfeec247 | 936 | unsafe { llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, UNNAMED) } |
1a4d82fc JJ |
937 | } |
938 | ||
a1dfa0c6 | 939 | fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value { |
1a4d82fc | 940 | unsafe { |
a1dfa0c6 XL |
941 | let elt_ty = self.cx.val_ty(elt); |
942 | let undef = llvm::LLVMGetUndef(self.type_vector(elt_ty, num_elts as u64)); | |
943 | let vec = self.insert_element(undef, elt, self.cx.const_i32(0)); | |
944 | let vec_i32_ty = self.type_vector(self.type_i32(), num_elts as u64); | |
945 | self.shuffle_vector(vec, undef, self.const_null(vec_i32_ty)) | |
1a4d82fc JJ |
946 | } |
947 | } | |
948 | ||
a1dfa0c6 | 949 | fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value { |
ff7c6d11 | 950 | assert_eq!(idx as c_uint as u64, idx); |
dfeec247 | 951 | unsafe { llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, UNNAMED) } |
1a4d82fc JJ |
952 | } |
953 | ||
dfeec247 | 954 | fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value, idx: u64) -> &'ll Value { |
ff7c6d11 | 955 | assert_eq!(idx as c_uint as u64, idx); |
dfeec247 | 956 | unsafe { llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint, UNNAMED) } |
1a4d82fc JJ |
957 | } |
958 | ||
5099ac24 | 959 | fn set_personality_fn(&mut self, personality: &'ll Value) { |
1a4d82fc | 960 | unsafe { |
5099ac24 | 961 | llvm::LLVMSetPersonalityFn(self.llfn(), personality); |
c1a9b12d SL |
962 | } |
963 | } | |
964 | ||
5099ac24 FG |
965 | fn cleanup_landing_pad(&mut self, ty: &'ll Type, pers_fn: &'ll Value) -> &'ll Value { |
966 | let landing_pad = self.landing_pad(ty, pers_fn, 1 /* FIXME should this be 0? */); | |
1a4d82fc JJ |
967 | unsafe { |
968 | llvm::LLVMSetCleanup(landing_pad, llvm::True); | |
969 | } | |
5099ac24 | 970 | landing_pad |
1a4d82fc JJ |
971 | } |
972 | ||
5099ac24 FG |
973 | fn resume(&mut self, exn: &'ll Value) { |
974 | unsafe { | |
975 | llvm::LLVMBuildResume(self.llbuilder, exn); | |
976 | } | |
1a4d82fc JJ |
977 | } |
978 | ||
dfeec247 | 979 | fn cleanup_pad(&mut self, parent: Option<&'ll Value>, args: &[&'ll Value]) -> Funclet<'ll> { |
6a06907d | 980 | let name = cstr!("cleanuppad"); |
7453a54e | 981 | let ret = unsafe { |
dfeec247 XL |
982 | llvm::LLVMRustBuildCleanupPad( |
983 | self.llbuilder, | |
984 | parent, | |
985 | args.len() as c_uint, | |
986 | args.as_ptr(), | |
987 | name.as_ptr(), | |
988 | ) | |
7453a54e | 989 | }; |
a1dfa0c6 | 990 | Funclet::new(ret.expect("LLVM does not have support for cleanuppad")) |
7453a54e SL |
991 | } |
992 | ||
5099ac24 FG |
993 | fn cleanup_ret(&mut self, funclet: &Funclet<'ll>, unwind: Option<&'ll BasicBlock>) { |
994 | unsafe { | |
995 | llvm::LLVMRustBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind) | |
996 | .expect("LLVM does not have support for cleanupret"); | |
997 | } | |
7453a54e SL |
998 | } |
999 | ||
dfeec247 | 1000 | fn catch_pad(&mut self, parent: &'ll Value, args: &[&'ll Value]) -> Funclet<'ll> { |
6a06907d | 1001 | let name = cstr!("catchpad"); |
7453a54e | 1002 | let ret = unsafe { |
dfeec247 XL |
1003 | llvm::LLVMRustBuildCatchPad( |
1004 | self.llbuilder, | |
1005 | parent, | |
1006 | args.len() as c_uint, | |
1007 | args.as_ptr(), | |
1008 | name.as_ptr(), | |
1009 | ) | |
7453a54e | 1010 | }; |
a1dfa0c6 | 1011 | Funclet::new(ret.expect("LLVM does not have support for catchpad")) |
7453a54e SL |
1012 | } |
1013 | ||
a1dfa0c6 XL |
1014 | fn catch_switch( |
1015 | &mut self, | |
b7449926 XL |
1016 | parent: Option<&'ll Value>, |
1017 | unwind: Option<&'ll BasicBlock>, | |
5099ac24 | 1018 | handlers: &[&'ll BasicBlock], |
b7449926 | 1019 | ) -> &'ll Value { |
6a06907d | 1020 | let name = cstr!("catchswitch"); |
7453a54e | 1021 | let ret = unsafe { |
dfeec247 XL |
1022 | llvm::LLVMRustBuildCatchSwitch( |
1023 | self.llbuilder, | |
1024 | parent, | |
1025 | unwind, | |
5099ac24 | 1026 | handlers.len() as c_uint, |
dfeec247 XL |
1027 | name.as_ptr(), |
1028 | ) | |
7453a54e | 1029 | }; |
5099ac24 FG |
1030 | let ret = ret.expect("LLVM does not have support for catchswitch"); |
1031 | for handler in handlers { | |
1032 | unsafe { | |
1033 | llvm::LLVMRustAddHandler(ret, handler); | |
1034 | } | |
7453a54e | 1035 | } |
5099ac24 | 1036 | ret |
7453a54e SL |
1037 | } |
1038 | ||
1a4d82fc | 1039 | // Atomic Operations |
a1dfa0c6 XL |
1040 | fn atomic_cmpxchg( |
1041 | &mut self, | |
b7449926 XL |
1042 | dst: &'ll Value, |
1043 | cmp: &'ll Value, | |
1044 | src: &'ll Value, | |
a1dfa0c6 XL |
1045 | order: rustc_codegen_ssa::common::AtomicOrdering, |
1046 | failure_order: rustc_codegen_ssa::common::AtomicOrdering, | |
1047 | weak: bool, | |
b7449926 | 1048 | ) -> &'ll Value { |
a1dfa0c6 XL |
1049 | let weak = if weak { llvm::True } else { llvm::False }; |
1050 | unsafe { | |
1051 | llvm::LLVMRustBuildAtomicCmpXchg( | |
1052 | self.llbuilder, | |
1053 | dst, | |
1054 | cmp, | |
1055 | src, | |
1056 | AtomicOrdering::from_generic(order), | |
1057 | AtomicOrdering::from_generic(failure_order), | |
dfeec247 | 1058 | weak, |
a1dfa0c6 | 1059 | ) |
1a4d82fc JJ |
1060 | } |
1061 | } | |
a1dfa0c6 XL |
1062 | fn atomic_rmw( |
1063 | &mut self, | |
1064 | op: rustc_codegen_ssa::common::AtomicRmwBinOp, | |
b7449926 XL |
1065 | dst: &'ll Value, |
1066 | src: &'ll Value, | |
a1dfa0c6 | 1067 | order: rustc_codegen_ssa::common::AtomicOrdering, |
b7449926 | 1068 | ) -> &'ll Value { |
1a4d82fc | 1069 | unsafe { |
a1dfa0c6 XL |
1070 | llvm::LLVMBuildAtomicRMW( |
1071 | self.llbuilder, | |
1072 | AtomicRmwBinOp::from_generic(op), | |
1073 | dst, | |
1074 | src, | |
1075 | AtomicOrdering::from_generic(order), | |
dfeec247 XL |
1076 | False, |
1077 | ) | |
1a4d82fc JJ |
1078 | } |
1079 | } | |
1080 | ||
a1dfa0c6 XL |
1081 | fn atomic_fence( |
1082 | &mut self, | |
1083 | order: rustc_codegen_ssa::common::AtomicOrdering, | |
dfeec247 | 1084 | scope: rustc_codegen_ssa::common::SynchronizationScope, |
a1dfa0c6 | 1085 | ) { |
1a4d82fc | 1086 | unsafe { |
a1dfa0c6 XL |
1087 | llvm::LLVMRustBuildAtomicFence( |
1088 | self.llbuilder, | |
1089 | AtomicOrdering::from_generic(order), | |
dfeec247 | 1090 | SynchronizationScope::from_generic(scope), |
a1dfa0c6 | 1091 | ); |
1a4d82fc JJ |
1092 | } |
1093 | } | |
a7813a04 | 1094 | |
532ac7d7 | 1095 | fn set_invariant_load(&mut self, load: &'ll Value) { |
32a655c1 | 1096 | unsafe { |
dfeec247 XL |
1097 | llvm::LLVMSetMetadata( |
1098 | load, | |
1099 | llvm::MD_invariant_load as c_uint, | |
1100 | llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0), | |
1101 | ); | |
32a655c1 SL |
1102 | } |
1103 | } | |
1104 | ||
532ac7d7 | 1105 | fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) { |
74b04a01 | 1106 | self.call_lifetime_intrinsic("llvm.lifetime.start.p0i8", ptr, size); |
532ac7d7 XL |
1107 | } |
1108 | ||
1109 | fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) { | |
74b04a01 | 1110 | self.call_lifetime_intrinsic("llvm.lifetime.end.p0i8", ptr, size); |
532ac7d7 XL |
1111 | } |
1112 | ||
f035d41b XL |
1113 | fn instrprof_increment( |
1114 | &mut self, | |
1115 | fn_name: &'ll Value, | |
1116 | hash: &'ll Value, | |
1117 | num_counters: &'ll Value, | |
1118 | index: &'ll Value, | |
3dfed10e | 1119 | ) { |
f035d41b XL |
1120 | debug!( |
1121 | "instrprof_increment() with args ({:?}, {:?}, {:?}, {:?})", | |
1122 | fn_name, hash, num_counters, index | |
1123 | ); | |
1124 | ||
3dfed10e | 1125 | let llfn = unsafe { llvm::LLVMRustGetInstrProfIncrementIntrinsic(self.cx().llmod) }; |
94222f64 XL |
1126 | let llty = self.cx.type_func( |
1127 | &[self.cx.type_i8p(), self.cx.type_i64(), self.cx.type_i32(), self.cx.type_i32()], | |
1128 | self.cx.type_void(), | |
1129 | ); | |
f035d41b | 1130 | let args = &[fn_name, hash, num_counters, index]; |
94222f64 | 1131 | let args = self.check_call("call", llty, llfn, args); |
f035d41b XL |
1132 | |
1133 | unsafe { | |
3dfed10e | 1134 | let _ = llvm::LLVMRustBuildCall( |
f035d41b | 1135 | self.llbuilder, |
94222f64 | 1136 | llty, |
f035d41b XL |
1137 | llfn, |
1138 | args.as_ptr() as *const &llvm::Value, | |
1139 | args.len() as c_uint, | |
1140 | None, | |
3dfed10e | 1141 | ); |
f035d41b XL |
1142 | } |
1143 | } | |
1144 | ||
532ac7d7 XL |
1145 | fn call( |
1146 | &mut self, | |
94222f64 | 1147 | llty: &'ll Type, |
532ac7d7 XL |
1148 | llfn: &'ll Value, |
1149 | args: &[&'ll Value], | |
1150 | funclet: Option<&Funclet<'ll>>, | |
1151 | ) -> &'ll Value { | |
dfeec247 | 1152 | debug!("call {:?} with args ({:?})", llfn, args); |
532ac7d7 | 1153 | |
94222f64 | 1154 | let args = self.check_call("call", llty, llfn, args); |
532ac7d7 XL |
1155 | let bundle = funclet.map(|funclet| funclet.bundle()); |
1156 | let bundle = bundle.as_ref().map(|b| &*b.raw); | |
1157 | ||
32a655c1 | 1158 | unsafe { |
532ac7d7 XL |
1159 | llvm::LLVMRustBuildCall( |
1160 | self.llbuilder, | |
94222f64 | 1161 | llty, |
532ac7d7 XL |
1162 | llfn, |
1163 | args.as_ptr() as *const &llvm::Value, | |
1164 | args.len() as c_uint, | |
dfeec247 | 1165 | bundle, |
532ac7d7 | 1166 | ) |
32a655c1 SL |
1167 | } |
1168 | } | |
1169 | ||
532ac7d7 | 1170 | fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value { |
dfeec247 | 1171 | unsafe { llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED) } |
8bb4bdeb XL |
1172 | } |
1173 | ||
a2a8927a XL |
1174 | fn apply_attrs_to_cleanup_callsite(&mut self, llret: &'ll Value) { |
1175 | // Cleanup is always the cold path. | |
1176 | llvm::Attribute::Cold.apply_callsite(llvm::AttributePlace::Function, llret); | |
1177 | ||
1178 | // In LLVM versions with deferred inlining (currently, system LLVM < 14), | |
1179 | // inlining drop glue can lead to exponential size blowup, see #41696 and #92110. | |
1180 | if !llvm_util::is_rust_llvm() && llvm_util::get_version() < (14, 0, 0) { | |
1181 | llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret); | |
1182 | } | |
532ac7d7 XL |
1183 | } |
1184 | } | |
1185 | ||
a2a8927a | 1186 | impl<'ll> StaticBuilderMethods for Builder<'_, 'll, '_> { |
dc9dc135 | 1187 | fn get_static(&mut self, def_id: DefId) -> &'ll Value { |
532ac7d7 XL |
1188 | // Forward to the `get_static` method of `CodegenCx` |
1189 | self.cx().get_static(def_id) | |
1190 | } | |
532ac7d7 XL |
1191 | } |
1192 | ||
a2a8927a | 1193 | impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> { |
17df50a5 XL |
1194 | fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self { |
1195 | // Create a fresh builder from the crate context. | |
1196 | let llbuilder = unsafe { llvm::LLVMCreateBuilderInContext(cx.llcx) }; | |
1197 | Builder { llbuilder, cx } | |
1198 | } | |
1199 | ||
532ac7d7 | 1200 | pub fn llfn(&self) -> &'ll Value { |
dfeec247 | 1201 | unsafe { llvm::LLVMGetBasicBlockParent(self.llbb()) } |
532ac7d7 XL |
1202 | } |
1203 | ||
532ac7d7 XL |
1204 | fn position_at_start(&mut self, llbb: &'ll BasicBlock) { |
1205 | unsafe { | |
1206 | llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb); | |
1207 | } | |
1208 | } | |
1209 | ||
1210 | pub fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { | |
532ac7d7 XL |
1211 | unsafe { llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs) } |
1212 | } | |
1213 | ||
1214 | pub fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { | |
532ac7d7 XL |
1215 | unsafe { llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs) } |
1216 | } | |
1217 | ||
1218 | pub fn insert_element( | |
dfeec247 XL |
1219 | &mut self, |
1220 | vec: &'ll Value, | |
532ac7d7 XL |
1221 | elt: &'ll Value, |
1222 | idx: &'ll Value, | |
1223 | ) -> &'ll Value { | |
dfeec247 | 1224 | unsafe { llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, UNNAMED) } |
532ac7d7 XL |
1225 | } |
1226 | ||
1227 | pub fn shuffle_vector( | |
1228 | &mut self, | |
1229 | v1: &'ll Value, | |
1230 | v2: &'ll Value, | |
1231 | mask: &'ll Value, | |
1232 | ) -> &'ll Value { | |
dfeec247 | 1233 | unsafe { llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, UNNAMED) } |
532ac7d7 XL |
1234 | } |
1235 | ||
416331ca XL |
1236 | pub fn vector_reduce_fadd(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value { |
1237 | unsafe { llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src) } | |
1238 | } | |
1239 | pub fn vector_reduce_fmul(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value { | |
1240 | unsafe { llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src) } | |
1241 | } | |
532ac7d7 | 1242 | pub fn vector_reduce_fadd_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value { |
532ac7d7 | 1243 | unsafe { |
532ac7d7 | 1244 | let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src); |
cdc7bbd5 | 1245 | llvm::LLVMRustSetFastMath(instr); |
532ac7d7 XL |
1246 | instr |
1247 | } | |
1248 | } | |
1249 | pub fn vector_reduce_fmul_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value { | |
532ac7d7 | 1250 | unsafe { |
532ac7d7 | 1251 | let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src); |
cdc7bbd5 | 1252 | llvm::LLVMRustSetFastMath(instr); |
532ac7d7 XL |
1253 | instr |
1254 | } | |
1255 | } | |
1256 | pub fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value { | |
532ac7d7 XL |
1257 | unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) } |
1258 | } | |
1259 | pub fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value { | |
532ac7d7 XL |
1260 | unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) } |
1261 | } | |
1262 | pub fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value { | |
532ac7d7 XL |
1263 | unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) } |
1264 | } | |
1265 | pub fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value { | |
532ac7d7 XL |
1266 | unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) } |
1267 | } | |
1268 | pub fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value { | |
532ac7d7 XL |
1269 | unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) } |
1270 | } | |
1271 | pub fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value { | |
dfeec247 XL |
1272 | unsafe { |
1273 | llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false) | |
1274 | } | |
532ac7d7 XL |
1275 | } |
1276 | pub fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value { | |
dfeec247 XL |
1277 | unsafe { |
1278 | llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false) | |
1279 | } | |
532ac7d7 XL |
1280 | } |
1281 | pub fn vector_reduce_fmin_fast(&mut self, src: &'ll Value) -> &'ll Value { | |
532ac7d7 | 1282 | unsafe { |
dfeec247 XL |
1283 | let instr = |
1284 | llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true); | |
cdc7bbd5 | 1285 | llvm::LLVMRustSetFastMath(instr); |
532ac7d7 XL |
1286 | instr |
1287 | } | |
1288 | } | |
1289 | pub fn vector_reduce_fmax_fast(&mut self, src: &'ll Value) -> &'ll Value { | |
532ac7d7 | 1290 | unsafe { |
dfeec247 XL |
1291 | let instr = |
1292 | llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true); | |
cdc7bbd5 | 1293 | llvm::LLVMRustSetFastMath(instr); |
532ac7d7 XL |
1294 | instr |
1295 | } | |
1296 | } | |
1297 | pub fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value { | |
532ac7d7 XL |
1298 | unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) } |
1299 | } | |
1300 | pub fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value { | |
532ac7d7 XL |
1301 | unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) } |
1302 | } | |
1303 | ||
1304 | pub fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) { | |
1305 | unsafe { | |
1306 | llvm::LLVMAddClause(landing_pad, clause); | |
1307 | } | |
1308 | } | |
1309 | ||
1310 | pub fn catch_ret(&mut self, funclet: &Funclet<'ll>, unwind: &'ll BasicBlock) -> &'ll Value { | |
dfeec247 XL |
1311 | let ret = |
1312 | unsafe { llvm::LLVMRustBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind) }; | |
532ac7d7 XL |
1313 | ret.expect("LLVM does not have support for catchret") |
1314 | } | |
1315 | ||
dc9dc135 | 1316 | fn check_store(&mut self, val: &'ll Value, ptr: &'ll Value) -> &'ll Value { |
a1dfa0c6 XL |
1317 | let dest_ptr_ty = self.cx.val_ty(ptr); |
1318 | let stored_ty = self.cx.val_ty(val); | |
1319 | let stored_ptr_ty = self.cx.type_ptr_to(stored_ty); | |
1bb2cb6e | 1320 | |
a1dfa0c6 | 1321 | assert_eq!(self.cx.type_kind(dest_ptr_ty), TypeKind::Pointer); |
1bb2cb6e SL |
1322 | |
1323 | if dest_ptr_ty == stored_ptr_ty { | |
1324 | ptr | |
1325 | } else { | |
dfeec247 XL |
1326 | debug!( |
1327 | "type mismatch in store. \ | |
1bb2cb6e | 1328 | Expected {:?}, got {:?}; inserting bitcast", |
dfeec247 XL |
1329 | dest_ptr_ty, stored_ptr_ty |
1330 | ); | |
1bb2cb6e SL |
1331 | self.bitcast(ptr, stored_ptr_ty) |
1332 | } | |
1333 | } | |
1334 | ||
dfeec247 XL |
1335 | fn check_call<'b>( |
1336 | &mut self, | |
1337 | typ: &str, | |
94222f64 | 1338 | fn_ty: &'ll Type, |
dfeec247 XL |
1339 | llfn: &'ll Value, |
1340 | args: &'b [&'ll Value], | |
1341 | ) -> Cow<'b, [&'ll Value]> { | |
dfeec247 XL |
1342 | assert!( |
1343 | self.cx.type_kind(fn_ty) == TypeKind::Function, | |
1344 | "builder::{} not passed a function, but {:?}", | |
1345 | typ, | |
1346 | fn_ty | |
1347 | ); | |
a7813a04 | 1348 | |
a1dfa0c6 | 1349 | let param_tys = self.cx.func_params_types(fn_ty); |
a7813a04 | 1350 | |
cdc7bbd5 | 1351 | let all_args_match = iter::zip(¶m_tys, args.iter().map(|&v| self.val_ty(v))) |
1bb2cb6e SL |
1352 | .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty); |
1353 | ||
1354 | if all_args_match { | |
1355 | return Cow::Borrowed(args); | |
1356 | } | |
1357 | ||
cdc7bbd5 | 1358 | let casted_args: Vec<_> = iter::zip(param_tys, args) |
1bb2cb6e SL |
1359 | .enumerate() |
1360 | .map(|(i, (expected_ty, &actual_val))| { | |
a1dfa0c6 | 1361 | let actual_ty = self.val_ty(actual_val); |
1bb2cb6e | 1362 | if expected_ty != actual_ty { |
dfeec247 XL |
1363 | debug!( |
1364 | "type mismatch in function call of {:?}. \ | |
1bb2cb6e | 1365 | Expected {:?} for param {}, got {:?}; injecting bitcast", |
dfeec247 XL |
1366 | llfn, expected_ty, i, actual_ty |
1367 | ); | |
1bb2cb6e SL |
1368 | self.bitcast(actual_val, expected_ty) |
1369 | } else { | |
1370 | actual_val | |
1371 | } | |
1372 | }) | |
1373 | .collect(); | |
a7813a04 | 1374 | |
0bf4aa26 | 1375 | Cow::Owned(casted_args) |
a7813a04 | 1376 | } |
ff7c6d11 | 1377 | |
532ac7d7 | 1378 | pub fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value { |
dfeec247 | 1379 | unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) } |
a1dfa0c6 XL |
1380 | } |
1381 | ||
94222f64 XL |
1382 | crate fn call_intrinsic(&mut self, intrinsic: &str, args: &[&'ll Value]) -> &'ll Value { |
1383 | let (ty, f) = self.cx.get_intrinsic(intrinsic); | |
1384 | self.call(ty, f, args, None) | |
1385 | } | |
1386 | ||
a1dfa0c6 | 1387 | fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) { |
74b04a01 XL |
1388 | let size = size.bytes(); |
1389 | if size == 0 { | |
ff7c6d11 XL |
1390 | return; |
1391 | } | |
1392 | ||
f9f354fc | 1393 | if !self.cx().sess().emit_lifetime_markers() { |
ff7c6d11 XL |
1394 | return; |
1395 | } | |
1396 | ||
a1dfa0c6 | 1397 | let ptr = self.pointercast(ptr, self.cx.type_i8p()); |
94222f64 | 1398 | self.call_intrinsic(intrinsic, &[self.cx.const_u64(size), ptr]); |
ff7c6d11 | 1399 | } |
532ac7d7 | 1400 | |
3dfed10e XL |
1401 | pub(crate) fn phi( |
1402 | &mut self, | |
1403 | ty: &'ll Type, | |
1404 | vals: &[&'ll Value], | |
1405 | bbs: &[&'ll BasicBlock], | |
1406 | ) -> &'ll Value { | |
532ac7d7 | 1407 | assert_eq!(vals.len(), bbs.len()); |
dfeec247 | 1408 | let phi = unsafe { llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED) }; |
532ac7d7 | 1409 | unsafe { |
dfeec247 | 1410 | llvm::LLVMAddIncoming(phi, vals.as_ptr(), bbs.as_ptr(), vals.len() as c_uint); |
532ac7d7 XL |
1411 | phi |
1412 | } | |
1413 | } | |
1414 | ||
1415 | fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) { | |
532ac7d7 XL |
1416 | unsafe { |
1417 | llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint); | |
1418 | } | |
1419 | } | |
3dfed10e | 1420 | |
cdc7bbd5 XL |
1421 | fn fptoint_sat_broken_in_llvm(&self) -> bool { |
1422 | match self.tcx.sess.target.arch.as_str() { | |
1423 | // FIXME - https://bugs.llvm.org/show_bug.cgi?id=50083 | |
1424 | "riscv64" => llvm_util::get_version() < (13, 0, 0), | |
1425 | _ => false, | |
1426 | } | |
3dfed10e | 1427 | } |
5099ac24 FG |
1428 | |
1429 | fn fptoint_sat( | |
1430 | &mut self, | |
1431 | signed: bool, | |
1432 | val: &'ll Value, | |
1433 | dest_ty: &'ll Type, | |
1434 | ) -> Option<&'ll Value> { | |
1435 | if !self.fptoint_sat_broken_in_llvm() { | |
1436 | let src_ty = self.cx.val_ty(val); | |
1437 | let (float_ty, int_ty, vector_length) = if self.cx.type_kind(src_ty) == TypeKind::Vector | |
1438 | { | |
1439 | assert_eq!(self.cx.vector_length(src_ty), self.cx.vector_length(dest_ty)); | |
1440 | ( | |
1441 | self.cx.element_type(src_ty), | |
1442 | self.cx.element_type(dest_ty), | |
1443 | Some(self.cx.vector_length(src_ty)), | |
1444 | ) | |
1445 | } else { | |
1446 | (src_ty, dest_ty, None) | |
1447 | }; | |
1448 | let float_width = self.cx.float_width(float_ty); | |
1449 | let int_width = self.cx.int_width(int_ty); | |
1450 | ||
1451 | let instr = if signed { "fptosi" } else { "fptoui" }; | |
1452 | let name = if let Some(vector_length) = vector_length { | |
1453 | format!( | |
1454 | "llvm.{}.sat.v{}i{}.v{}f{}", | |
1455 | instr, vector_length, int_width, vector_length, float_width | |
1456 | ) | |
1457 | } else { | |
1458 | format!("llvm.{}.sat.i{}.f{}", instr, int_width, float_width) | |
1459 | }; | |
1460 | let f = | |
1461 | self.declare_cfn(&name, llvm::UnnamedAddr::No, self.type_func(&[src_ty], dest_ty)); | |
1462 | Some(self.call(self.type_func(&[src_ty], dest_ty), f, &[val], None)) | |
1463 | } else { | |
1464 | None | |
1465 | } | |
1466 | } | |
1467 | ||
1468 | pub(crate) fn landing_pad( | |
1469 | &mut self, | |
1470 | ty: &'ll Type, | |
1471 | pers_fn: &'ll Value, | |
1472 | num_clauses: usize, | |
1473 | ) -> &'ll Value { | |
1474 | // Use LLVMSetPersonalityFn to set the personality. It supports arbitrary Consts while, | |
1475 | // LLVMBuildLandingPad requires the argument to be a Function (as of LLVM 12). The | |
1476 | // personality lives on the parent function anyway. | |
1477 | self.set_personality_fn(pers_fn); | |
1478 | unsafe { | |
1479 | llvm::LLVMBuildLandingPad(self.llbuilder, ty, None, num_clauses as c_uint, UNNAMED) | |
1480 | } | |
1481 | } | |
a7813a04 | 1482 | } |