]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_codegen_ssa/src/mir/rvalue.rs
New upstream version 1.67.1+dfsg1
[rustc.git] / compiler / rustc_codegen_ssa / src / mir / rvalue.rs
CommitLineData
60c5eb7d
XL
1use super::operand::{OperandRef, OperandValue};
2use super::place::PlaceRef;
dfeec247 3use super::{FunctionCx, LocalRef};
60c5eb7d
XL
4
5use crate::base;
5099ac24 6use crate::common::{self, IntPredicate};
60c5eb7d 7use crate::traits::*;
dfeec247 8use crate::MemFlags;
60c5eb7d 9
ba9703b0 10use rustc_middle::mir;
064997fb 11use rustc_middle::mir::Operand;
ba9703b0 12use rustc_middle::ty::cast::{CastTy, IntTy};
c295e0f8 13use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
ba9703b0 14use rustc_middle::ty::{self, adjustment::PointerCast, Instance, Ty, TyCtxt};
dfeec247 15use rustc_span::source_map::{Span, DUMMY_SP};
92a42be0 16
dc9dc135 17impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
064997fb 18 #[instrument(level = "trace", skip(self, bx))]
a1dfa0c6
XL
19 pub fn codegen_rvalue(
20 &mut self,
487cf647 21 bx: &mut Bx,
a1dfa0c6 22 dest: PlaceRef<'tcx, Bx::Value>,
dfeec247 23 rvalue: &mir::Rvalue<'tcx>,
487cf647 24 ) {
92a42be0 25 match *rvalue {
dfeec247 26 mir::Rvalue::Use(ref operand) => {
487cf647 27 let cg_operand = self.codegen_operand(bx, operand);
dfeec247
XL
28 // FIXME: consider not copying constants through stack. (Fixable by codegen'ing
29 // constants into `OperandValue::Ref`; why don’t we do that yet if we don’t?)
487cf647 30 cg_operand.val.store(bx, dest);
dfeec247 31 }
92a42be0 32
48663c56 33 mir::Rvalue::Cast(mir::CastKind::Pointer(PointerCast::Unsize), ref source, _) => {
ff7c6d11
XL
34 // The destination necessarily contains a fat pointer, so if
35 // it's a scalar pair, it's a fat pointer or newtype thereof.
a1dfa0c6 36 if bx.cx().is_backend_scalar_pair(dest.layout) {
60c5eb7d 37 // Into-coerce of a thin pointer to a fat pointer -- just
92a42be0 38 // use the operand path.
487cf647
FG
39 let temp = self.codegen_rvalue_operand(bx, rvalue);
40 temp.val.store(bx, dest);
41 return;
92a42be0
SL
42 }
43
44 // Unsize of a nontrivial struct. I would prefer for
94b46f34 45 // this to be eliminated by MIR building, but
92a42be0
SL
46 // `CoerceUnsized` can be passed by a where-clause,
47 // so the (generic) MIR may not be able to expand it.
487cf647 48 let operand = self.codegen_operand(bx, source);
ff7c6d11 49 match operand.val {
dfeec247 50 OperandValue::Pair(..) | OperandValue::Immediate(_) => {
60c5eb7d 51 // Unsize from an immediate structure. We don't
32a655c1
SL
52 // really need a temporary alloca here, but
53 // avoiding it would require us to have
60c5eb7d 54 // `coerce_unsized_into` use `extractvalue` to
32a655c1
SL
55 // index into the struct, and this case isn't
56 // important enough for it.
94b46f34 57 debug!("codegen_rvalue: creating ugly alloca");
487cf647
FG
58 let scratch = PlaceRef::alloca(bx, operand.layout);
59 scratch.storage_live(bx);
60 operand.val.store(bx, scratch);
61 base::coerce_unsized_into(bx, scratch, dest);
62 scratch.storage_dead(bx);
32a655c1 63 }
b7449926 64 OperandValue::Ref(llref, None, align) => {
e1599b0c 65 let source = PlaceRef::new_sized_aligned(llref, operand.layout, align);
487cf647 66 base::coerce_unsized_into(bx, source, dest);
92a42be0 67 }
b7449926 68 OperandValue::Ref(_, Some(_), _) => {
60c5eb7d 69 bug!("unsized coercion on an unsized rvalue");
b7449926 70 }
ff7c6d11 71 }
92a42be0
SL
72 }
73
ea8adc8c 74 mir::Rvalue::Repeat(ref elem, count) => {
487cf647 75 let cg_elem = self.codegen_operand(bx, elem);
3b2f2976 76
ff7c6d11
XL
77 // Do not generate the loop for zero-sized elements or empty arrays.
78 if dest.layout.is_zst() {
487cf647 79 return;
3b2f2976 80 }
3b2f2976 81
94b46f34 82 if let OperandValue::Immediate(v) = cg_elem.val {
532ac7d7 83 let zero = bx.const_usize(0);
487cf647 84 let start = dest.project_index(bx, zero).llval;
532ac7d7 85 let size = bx.const_usize(dest.layout.size.bytes());
ff7c6d11 86
3b2f2976 87 // Use llvm.memset.p0i8.* to initialize all zero arrays
f2b60f7d 88 if bx.cx().const_to_opt_u128(v, false) == Some(0) {
a1dfa0c6
XL
89 let fill = bx.cx().const_u8(0);
90 bx.memset(start, fill, size, dest.align, MemFlags::empty());
487cf647 91 return;
3b2f2976
XL
92 }
93
94 // Use llvm.memset.p0i8.* to initialize byte arrays
1b1a35ee 95 let v = bx.from_immediate(v);
a1dfa0c6
XL
96 if bx.cx().val_ty(v) == bx.cx().type_i8() {
97 bx.memset(start, v, size, dest.align, MemFlags::empty());
487cf647 98 return;
3b2f2976
XL
99 }
100 }
101
ba9703b0 102 let count =
fc512014 103 self.monomorphize(count).eval_usize(bx.cx().tcx(), ty::ParamEnv::reveal_all());
ba9703b0 104
487cf647 105 bx.write_operand_repeatedly(cg_elem, count, dest);
92a42be0
SL
106 }
107
9cc50fc6 108 mir::Rvalue::Aggregate(ref kind, ref operands) => {
ff7c6d11 109 let (dest, active_field_index) = match **kind {
a2a8927a 110 mir::AggregateKind::Adt(adt_did, variant_index, _, _, active_field_index) => {
487cf647 111 dest.codegen_set_discr(bx, variant_index);
a2a8927a 112 if bx.tcx().adt_def(adt_did).is_enum() {
487cf647 113 (dest.project_downcast(bx, variant_index), active_field_index)
ff7c6d11
XL
114 } else {
115 (dest, active_field_index)
9cc50fc6
SL
116 }
117 }
dfeec247 118 _ => (dest, None),
ff7c6d11
XL
119 };
120 for (i, operand) in operands.iter().enumerate() {
487cf647 121 let op = self.codegen_operand(bx, operand);
ff7c6d11
XL
122 // Do not generate stores and GEPis for zero-sized fields.
123 if !op.layout.is_zst() {
124 let field_index = active_field_index.unwrap_or(i);
064997fb
FG
125 let field = if let mir::AggregateKind::Array(_) = **kind {
126 let llindex = bx.cx().const_usize(field_index as u64);
487cf647 127 dest.project_index(bx, llindex)
064997fb 128 } else {
487cf647 129 dest.project_field(bx, field_index)
064997fb 130 };
487cf647 131 op.val.store(bx, field);
ff7c6d11 132 }
92a42be0 133 }
92a42be0
SL
134 }
135
92a42be0 136 _ => {
416331ca 137 assert!(self.rvalue_creates_operand(rvalue, DUMMY_SP));
487cf647
FG
138 let temp = self.codegen_rvalue_operand(bx, rvalue);
139 temp.val.store(bx, dest);
92a42be0
SL
140 }
141 }
142 }
143
a1dfa0c6
XL
144 pub fn codegen_rvalue_unsized(
145 &mut self,
487cf647 146 bx: &mut Bx,
a1dfa0c6
XL
147 indirect_dest: PlaceRef<'tcx, Bx::Value>,
148 rvalue: &mir::Rvalue<'tcx>,
487cf647 149 ) {
dfeec247
XL
150 debug!(
151 "codegen_rvalue_unsized(indirect_dest.llval={:?}, rvalue={:?})",
152 indirect_dest.llval, rvalue
153 );
b7449926
XL
154
155 match *rvalue {
156 mir::Rvalue::Use(ref operand) => {
487cf647
FG
157 let cg_operand = self.codegen_operand(bx, operand);
158 cg_operand.val.store_unsized(bx, indirect_dest);
b7449926
XL
159 }
160
60c5eb7d 161 _ => bug!("unsized assignment other than `Rvalue::Use`"),
b7449926
XL
162 }
163 }
164
a1dfa0c6
XL
165 pub fn codegen_rvalue_operand(
166 &mut self,
487cf647 167 bx: &mut Bx,
dfeec247 168 rvalue: &mir::Rvalue<'tcx>,
487cf647 169 ) -> OperandRef<'tcx, Bx::Value> {
416331ca
XL
170 assert!(
171 self.rvalue_creates_operand(rvalue, DUMMY_SP),
172 "cannot codegen {:?} to operand",
173 rvalue,
174 );
92a42be0
SL
175
176 match *rvalue {
ff7c6d11 177 mir::Rvalue::Cast(ref kind, ref source, mir_cast_ty) => {
487cf647 178 let operand = self.codegen_operand(bx, source);
54a0048b 179 debug!("cast operand is {:?}", operand);
fc512014 180 let cast = bx.cx().layout_of(self.monomorphize(mir_cast_ty));
92a42be0
SL
181
182 let val = match *kind {
923072b8
FG
183 mir::CastKind::PointerExposeAddress => {
184 assert!(bx.cx().is_backend_immediate(cast));
185 let llptr = operand.immediate();
186 let llcast_ty = bx.cx().immediate_backend_type(cast);
187 let lladdr = bx.ptrtoint(llptr, llcast_ty);
188 OperandValue::Immediate(lladdr)
189 }
48663c56 190 mir::CastKind::Pointer(PointerCast::ReifyFnPointer) => {
1b1a35ee 191 match *operand.layout.ty.kind() {
b7449926 192 ty::FnDef(def_id, substs) => {
3dfed10e
XL
193 let instance = ty::Instance::resolve_for_fn_ptr(
194 bx.tcx(),
195 ty::ParamEnv::reveal_all(),
196 def_id,
197 substs,
e74abb32 198 )
3dfed10e
XL
199 .unwrap()
200 .polymorphize(bx.cx().tcx());
201 OperandValue::Immediate(bx.get_fn_addr(instance))
54a0048b 202 }
dfeec247 203 _ => bug!("{} cannot be reified to a fn ptr", operand.layout.ty),
54a0048b
SL
204 }
205 }
48663c56 206 mir::CastKind::Pointer(PointerCast::ClosureFnPointer(_)) => {
1b1a35ee 207 match *operand.layout.ty.kind() {
b7449926 208 ty::Closure(def_id, substs) => {
dc9dc135 209 let instance = Instance::resolve_closure(
e74abb32
XL
210 bx.cx().tcx(),
211 def_id,
212 substs,
dfeec247 213 ty::ClosureKind::FnOnce,
3dfed10e 214 )
064997fb 215 .expect("failed to normalize and resolve closure during codegen")
3dfed10e 216 .polymorphize(bx.cx().tcx());
e74abb32 217 OperandValue::Immediate(bx.cx().get_fn_addr(instance))
8bb4bdeb 218 }
dfeec247 219 _ => bug!("{} cannot be cast to a fn ptr", operand.layout.ty),
8bb4bdeb
XL
220 }
221 }
48663c56 222 mir::CastKind::Pointer(PointerCast::UnsafeFnPointer) => {
60c5eb7d 223 // This is a no-op at the LLVM level.
92a42be0
SL
224 operand.val
225 }
48663c56 226 mir::CastKind::Pointer(PointerCast::Unsize) => {
a1dfa0c6 227 assert!(bx.cx().is_backend_scalar_pair(cast));
94222f64 228 let (lldata, llextra) = match operand.val {
3157f602 229 OperandValue::Pair(lldata, llextra) => {
60c5eb7d 230 // unsize from a fat pointer -- this is a
94222f64
XL
231 // "trait-object-to-supertrait" coercion.
232 (lldata, Some(llextra))
92a42be0
SL
233 }
234 OperandValue::Immediate(lldata) => {
235 // "standard" unsize
94222f64 236 (lldata, None)
92a42be0 237 }
32a655c1 238 OperandValue::Ref(..) => {
dfeec247 239 bug!("by-ref operand {:?} in `codegen_rvalue_operand`", operand);
92a42be0 240 }
94222f64
XL
241 };
242 let (lldata, llextra) =
487cf647 243 base::unsize_ptr(bx, lldata, operand.layout.ty, cast.ty, llextra);
94222f64 244 OperandValue::Pair(lldata, llextra)
92a42be0 245 }
dfeec247 246 mir::CastKind::Pointer(PointerCast::MutToConstPointer)
2b03887a 247 | mir::CastKind::PtrToPtr
dfeec247
XL
248 if bx.cx().is_backend_scalar_pair(operand.layout) =>
249 {
ff7c6d11 250 if let OperandValue::Pair(data_ptr, meta) = operand.val {
a1dfa0c6 251 if bx.cx().is_backend_scalar_pair(cast) {
dfeec247
XL
252 let data_cast = bx.pointercast(
253 data_ptr,
254 bx.cx().scalar_pair_element_backend_type(cast, 0, true),
255 );
ff7c6d11 256 OperandValue::Pair(data_cast, meta)
dfeec247
XL
257 } else {
258 // cast to thin-ptr
5bcae85e
SL
259 // Cast of fat-ptr to thin-ptr is an extraction of data-ptr and
260 // pointer-cast of that pointer to desired pointer type.
a1dfa0c6 261 let llcast_ty = bx.cx().immediate_backend_type(cast);
2c00a5a8 262 let llval = bx.pointercast(data_ptr, llcast_ty);
5bcae85e
SL
263 OperandValue::Immediate(llval)
264 }
265 } else {
60c5eb7d 266 bug!("unexpected non-pair operand");
5bcae85e
SL
267 }
268 }
f2b60f7d 269 mir::CastKind::DynStar => {
2b03887a 270 let (lldata, llextra) = match operand.val {
f2b60f7d 271 OperandValue::Ref(_, _, _) => todo!(),
2b03887a
FG
272 OperandValue::Immediate(v) => (v, None),
273 OperandValue::Pair(v, l) => (v, Some(l)),
f2b60f7d 274 };
2b03887a 275 let (lldata, llextra) =
487cf647 276 base::cast_to_dyn_star(bx, lldata, operand.layout, cast.ty, llextra);
2b03887a 277 OperandValue::Pair(lldata, llextra)
f2b60f7d 278 }
ba9703b0
XL
279 mir::CastKind::Pointer(
280 PointerCast::MutToConstPointer | PointerCast::ArrayToPointer,
281 )
2b03887a
FG
282 | mir::CastKind::IntToInt
283 | mir::CastKind::FloatToInt
284 | mir::CastKind::FloatToFloat
285 | mir::CastKind::IntToFloat
286 | mir::CastKind::PtrToPtr
287 | mir::CastKind::FnPtrToPtr
288
923072b8
FG
289 // Since int2ptr can have arbitrary integer types as input (so we have to do
290 // sign extension and all that), it is currently best handled in the same code
291 // path as the other integer-to-X casts.
292 | mir::CastKind::PointerFromExposedAddress => {
a1dfa0c6
XL
293 assert!(bx.cx().is_backend_immediate(cast));
294 let ll_t_out = bx.cx().immediate_backend_type(cast);
0bf4aa26 295 if operand.layout.abi.is_uninhabited() {
a1dfa0c6 296 let val = OperandValue::Immediate(bx.cx().const_undef(ll_t_out));
487cf647 297 return OperandRef { val, layout: cast };
83c7162d 298 }
dfeec247
XL
299 let r_t_in =
300 CastTy::from_ty(operand.layout.ty).expect("bad input type for cast");
ff7c6d11 301 let r_t_out = CastTy::from_ty(cast.ty).expect("bad output type for cast");
a1dfa0c6 302 let ll_t_in = bx.cx().immediate_backend_type(operand.layout);
32a655c1 303 let llval = operand.immediate();
c30ab7b3 304
9cc50fc6 305 let newval = match (r_t_in, r_t_out) {
064997fb
FG
306 (CastTy::Int(i), CastTy::Int(_)) => {
307 bx.intcast(llval, ll_t_out, i.is_signed())
308 }
9cc50fc6 309 (CastTy::Float, CastTy::Float) => {
a1dfa0c6
XL
310 let srcsz = bx.cx().float_width(ll_t_in);
311 let dstsz = bx.cx().float_width(ll_t_out);
9cc50fc6 312 if dstsz > srcsz {
2c00a5a8 313 bx.fpext(llval, ll_t_out)
9cc50fc6 314 } else if srcsz > dstsz {
2c00a5a8 315 bx.fptrunc(llval, ll_t_out)
9cc50fc6
SL
316 } else {
317 llval
318 }
319 }
064997fb
FG
320 (CastTy::Int(i), CastTy::Float) => {
321 if i.is_signed() {
dfeec247
XL
322 bx.sitofp(llval, ll_t_out)
323 } else {
324 bx.uitofp(llval, ll_t_out)
325 }
326 }
ba9703b0 327 (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Ptr(_)) => {
dfeec247
XL
328 bx.pointercast(llval, ll_t_out)
329 }
064997fb
FG
330 (CastTy::Int(i), CastTy::Ptr(_)) => {
331 let usize_llval =
332 bx.intcast(llval, bx.cx().type_isize(), i.is_signed());
2c00a5a8
XL
333 bx.inttoptr(usize_llval, ll_t_out)
334 }
dfeec247 335 (CastTy::Float, CastTy::Int(IntTy::I)) => {
5099ac24 336 bx.cast_float_to_int(true, llval, ll_t_out)
dfeec247
XL
337 }
338 (CastTy::Float, CastTy::Int(_)) => {
5099ac24 339 bx.cast_float_to_int(false, llval, ll_t_out)
dfeec247
XL
340 }
341 _ => bug!("unsupported cast: {:?} to {:?}", operand.layout.ty, cast.ty),
9cc50fc6
SL
342 };
343 OperandValue::Immediate(newval)
344 }
92a42be0 345 };
487cf647 346 OperandRef { val, layout: cast }
92a42be0
SL
347 }
348
ba9703b0 349 mir::Rvalue::Ref(_, bk, place) => {
dfeec247
XL
350 let mk_ref = move |tcx: TyCtxt<'tcx>, ty: Ty<'tcx>| {
351 tcx.mk_ref(
352 tcx.lifetimes.re_erased,
353 ty::TypeAndMut { ty, mutbl: bk.to_mutbl_lossy() },
354 )
355 };
356 self.codegen_place_to_pointer(bx, place, mk_ref)
357 }
92a42be0 358
487cf647 359 mir::Rvalue::CopyForDeref(place) => self.codegen_operand(bx, &Operand::Copy(place)),
ba9703b0 360 mir::Rvalue::AddressOf(mutability, place) => {
dfeec247 361 let mk_ptr = move |tcx: TyCtxt<'tcx>, ty: Ty<'tcx>| {
74b04a01 362 tcx.mk_ptr(ty::TypeAndMut { ty, mutbl: mutability })
7453a54e 363 };
dfeec247 364 self.codegen_place_to_pointer(bx, place, mk_ptr)
92a42be0
SL
365 }
366
ba9703b0 367 mir::Rvalue::Len(place) => {
487cf647
FG
368 let size = self.evaluate_array_len(bx, place);
369 OperandRef {
3b2f2976 370 val: OperandValue::Immediate(size),
a1dfa0c6 371 layout: bx.cx().layout_of(bx.tcx().types.usize),
487cf647 372 }
92a42be0
SL
373 }
374
6a06907d 375 mir::Rvalue::BinaryOp(op, box (ref lhs, ref rhs)) => {
487cf647
FG
376 let lhs = self.codegen_operand(bx, lhs);
377 let rhs = self.codegen_operand(bx, rhs);
ff7c6d11 378 let llresult = match (lhs.val, rhs.val) {
dfeec247
XL
379 (
380 OperandValue::Pair(lhs_addr, lhs_extra),
381 OperandValue::Pair(rhs_addr, rhs_extra),
382 ) => self.codegen_fat_ptr_binop(
487cf647 383 bx,
dfeec247
XL
384 op,
385 lhs_addr,
386 lhs_extra,
387 rhs_addr,
388 rhs_extra,
389 lhs.layout.ty,
390 ),
391
392 (OperandValue::Immediate(lhs_val), OperandValue::Immediate(rhs_val)) => {
487cf647 393 self.codegen_scalar_binop(bx, op, lhs_val, rhs_val, lhs.layout.ty)
ff7c6d11
XL
394 }
395
dfeec247 396 _ => bug!(),
92a42be0 397 };
487cf647 398 OperandRef {
92a42be0 399 val: OperandValue::Immediate(llresult),
dfeec247 400 layout: bx.cx().layout_of(op.ty(bx.tcx(), lhs.layout.ty, rhs.layout.ty)),
487cf647 401 }
92a42be0 402 }
6a06907d 403 mir::Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
487cf647
FG
404 let lhs = self.codegen_operand(bx, lhs);
405 let rhs = self.codegen_operand(bx, rhs);
dfeec247 406 let result = self.codegen_scalar_checked_binop(
487cf647 407 bx,
dfeec247
XL
408 op,
409 lhs.immediate(),
410 rhs.immediate(),
411 lhs.layout.ty,
412 );
2c00a5a8 413 let val_ty = op.ty(bx.tcx(), lhs.layout.ty, rhs.layout.ty);
0531ce1d 414 let operand_ty = bx.tcx().intern_tup(&[val_ty, bx.tcx().types.bool]);
487cf647 415 OperandRef { val: result, layout: bx.cx().layout_of(operand_ty) }
3157f602 416 }
92a42be0
SL
417
418 mir::Rvalue::UnaryOp(op, ref operand) => {
487cf647 419 let operand = self.codegen_operand(bx, operand);
92a42be0 420 let lloperand = operand.immediate();
dc9dc135 421 let is_float = operand.layout.ty.is_floating_point();
92a42be0 422 let llval = match op {
2c00a5a8 423 mir::UnOp::Not => bx.not(lloperand),
dfeec247
XL
424 mir::UnOp::Neg => {
425 if is_float {
426 bx.fneg(lloperand)
427 } else {
428 bx.neg(lloperand)
429 }
92a42be0
SL
430 }
431 };
487cf647 432 OperandRef { val: OperandValue::Immediate(llval), layout: operand.layout }
92a42be0
SL
433 }
434
ff7c6d11 435 mir::Rvalue::Discriminant(ref place) => {
f9f354fc 436 let discr_ty = rvalue.ty(self.mir, bx.tcx());
fc512014 437 let discr_ty = self.monomorphize(discr_ty);
487cf647
FG
438 let discr = self.codegen_place(bx, place.as_ref()).codegen_get_discr(bx, discr_ty);
439 OperandRef {
440 val: OperandValue::Immediate(discr),
441 layout: self.cx.layout_of(discr_ty),
442 }
8bb4bdeb
XL
443 }
444
c295e0f8
XL
445 mir::Rvalue::NullaryOp(null_op, ty) => {
446 let ty = self.monomorphize(ty);
447 assert!(bx.cx().type_is_sized(ty));
448 let layout = bx.cx().layout_of(ty);
449 let val = match null_op {
450 mir::NullOp::SizeOf => layout.size.bytes(),
451 mir::NullOp::AlignOf => layout.align.abi.bytes(),
c295e0f8
XL
452 };
453 let val = bx.cx().const_usize(val);
454 let tcx = self.cx.tcx();
487cf647
FG
455 OperandRef {
456 val: OperandValue::Immediate(val),
457 layout: self.cx.layout_of(tcx.types.usize),
458 }
c295e0f8
XL
459 }
460
f9f354fc
XL
461 mir::Rvalue::ThreadLocalRef(def_id) => {
462 assert!(bx.cx().tcx().is_static(def_id));
463 let static_ = bx.get_static(def_id);
464 let layout = bx.layout_of(bx.cx().tcx().static_ptr_ty(def_id));
487cf647 465 OperandRef::from_immediate_or_packed_pair(bx, static_, layout)
a7813a04 466 }
487cf647 467 mir::Rvalue::Use(ref operand) => self.codegen_operand(bx, operand),
dfeec247 468 mir::Rvalue::Repeat(..) | mir::Rvalue::Aggregate(..) => {
cc61c64b
XL
469 // According to `rvalue_creates_operand`, only ZST
470 // aggregate rvalues are allowed to be operands.
f9f354fc 471 let ty = rvalue.ty(self.mir, self.cx.tcx());
487cf647 472 OperandRef::new_zst(bx, self.cx.layout_of(self.monomorphize(ty)))
92a42be0 473 }
c295e0f8 474 mir::Rvalue::ShallowInitBox(ref operand, content_ty) => {
487cf647 475 let operand = self.codegen_operand(bx, operand);
c295e0f8
XL
476 let lloperand = operand.immediate();
477
478 let content_ty = self.monomorphize(content_ty);
479 let box_layout = bx.cx().layout_of(bx.tcx().mk_box(content_ty));
480 let llty_ptr = bx.cx().backend_type(box_layout);
481
482 let val = bx.pointercast(lloperand, llty_ptr);
487cf647 483 OperandRef { val: OperandValue::Immediate(val), layout: box_layout }
c295e0f8 484 }
92a42be0
SL
485 }
486 }
487
ba9703b0 488 fn evaluate_array_len(&mut self, bx: &mut Bx, place: mir::Place<'tcx>) -> Bx::Value {
3b2f2976 489 // ZST are passed as operands and require special handling
94b46f34 490 // because codegen_place() panics if Local is operand.
e74abb32 491 if let Some(index) = place.as_local() {
3b2f2976 492 if let LocalRef::Operand(Some(op)) = self.locals[index] {
1b1a35ee 493 if let ty::Array(_, n) = op.layout.ty.kind() {
416331ca 494 let n = n.eval_usize(bx.cx().tcx(), ty::ParamEnv::reveal_all());
a1dfa0c6 495 return bx.cx().const_usize(n);
3b2f2976
XL
496 }
497 }
498 }
499 // use common size calculation for non zero-sized types
74b04a01 500 let cg_value = self.codegen_place(bx, place.as_ref());
416331ca 501 cg_value.len(bx.cx())
3b2f2976
XL
502 }
503
dfeec247
XL
504 /// Codegen an `Rvalue::AddressOf` or `Rvalue::Ref`
505 fn codegen_place_to_pointer(
506 &mut self,
487cf647 507 bx: &mut Bx,
ba9703b0 508 place: mir::Place<'tcx>,
dfeec247 509 mk_ptr_ty: impl FnOnce(TyCtxt<'tcx>, Ty<'tcx>) -> Ty<'tcx>,
487cf647
FG
510 ) -> OperandRef<'tcx, Bx::Value> {
511 let cg_place = self.codegen_place(bx, place.as_ref());
dfeec247
XL
512
513 let ty = cg_place.layout.ty;
514
515 // Note: places are indirect, so storing the `llval` into the
516 // destination effectively creates a reference.
517 let val = if !bx.cx().type_has_metadata(ty) {
518 OperandValue::Immediate(cg_place.llval)
519 } else {
520 OperandValue::Pair(cg_place.llval, cg_place.llextra.unwrap())
521 };
487cf647 522 OperandRef { val, layout: self.cx.layout_of(mk_ptr_ty(self.cx.tcx(), ty)) }
dfeec247
XL
523 }
524
b7449926
XL
525 pub fn codegen_scalar_binop(
526 &mut self,
a1dfa0c6 527 bx: &mut Bx,
b7449926 528 op: mir::BinOp,
a1dfa0c6
XL
529 lhs: Bx::Value,
530 rhs: Bx::Value,
b7449926 531 input_ty: Ty<'tcx>,
a1dfa0c6 532 ) -> Bx::Value {
dc9dc135 533 let is_float = input_ty.is_floating_point();
92a42be0
SL
534 let is_signed = input_ty.is_signed();
535 match op {
dfeec247
XL
536 mir::BinOp::Add => {
537 if is_float {
538 bx.fadd(lhs, rhs)
539 } else {
540 bx.add(lhs, rhs)
541 }
542 }
543 mir::BinOp::Sub => {
544 if is_float {
545 bx.fsub(lhs, rhs)
546 } else {
547 bx.sub(lhs, rhs)
548 }
549 }
550 mir::BinOp::Mul => {
551 if is_float {
552 bx.fmul(lhs, rhs)
553 } else {
554 bx.mul(lhs, rhs)
555 }
556 }
557 mir::BinOp::Div => {
558 if is_float {
559 bx.fdiv(lhs, rhs)
560 } else if is_signed {
561 bx.sdiv(lhs, rhs)
562 } else {
563 bx.udiv(lhs, rhs)
564 }
565 }
566 mir::BinOp::Rem => {
567 if is_float {
568 bx.frem(lhs, rhs)
569 } else if is_signed {
570 bx.srem(lhs, rhs)
571 } else {
572 bx.urem(lhs, rhs)
573 }
574 }
2c00a5a8
XL
575 mir::BinOp::BitOr => bx.or(lhs, rhs),
576 mir::BinOp::BitAnd => bx.and(lhs, rhs),
577 mir::BinOp::BitXor => bx.xor(lhs, rhs),
94222f64
XL
578 mir::BinOp::Offset => {
579 let pointee_type = input_ty
580 .builtin_deref(true)
581 .unwrap_or_else(|| bug!("deref of non-pointer {:?}", input_ty))
582 .ty;
583 let llty = bx.cx().backend_type(bx.cx().layout_of(pointee_type));
584 bx.inbounds_gep(llty, lhs, &[rhs])
585 }
2c00a5a8
XL
586 mir::BinOp::Shl => common::build_unchecked_lshift(bx, lhs, rhs),
587 mir::BinOp::Shr => common::build_unchecked_rshift(bx, input_ty, lhs, rhs),
dfeec247
XL
588 mir::BinOp::Ne
589 | mir::BinOp::Lt
590 | mir::BinOp::Gt
591 | mir::BinOp::Eq
592 | mir::BinOp::Le
593 | mir::BinOp::Ge => {
594 if is_float {
595 bx.fcmp(base::bin_op_to_fcmp_predicate(op.to_hir_binop()), lhs, rhs)
596 } else {
597 bx.icmp(base::bin_op_to_icmp_predicate(op.to_hir_binop(), is_signed), lhs, rhs)
598 }
c30ab7b3
SL
599 }
600 }
601 }
602
b7449926
XL
603 pub fn codegen_fat_ptr_binop(
604 &mut self,
a1dfa0c6 605 bx: &mut Bx,
b7449926 606 op: mir::BinOp,
a1dfa0c6
XL
607 lhs_addr: Bx::Value,
608 lhs_extra: Bx::Value,
609 rhs_addr: Bx::Value,
610 rhs_extra: Bx::Value,
b7449926 611 _input_ty: Ty<'tcx>,
a1dfa0c6 612 ) -> Bx::Value {
c30ab7b3
SL
613 match op {
614 mir::BinOp::Eq => {
a1dfa0c6
XL
615 let lhs = bx.icmp(IntPredicate::IntEQ, lhs_addr, rhs_addr);
616 let rhs = bx.icmp(IntPredicate::IntEQ, lhs_extra, rhs_extra);
617 bx.and(lhs, rhs)
c30ab7b3
SL
618 }
619 mir::BinOp::Ne => {
a1dfa0c6
XL
620 let lhs = bx.icmp(IntPredicate::IntNE, lhs_addr, rhs_addr);
621 let rhs = bx.icmp(IntPredicate::IntNE, lhs_extra, rhs_extra);
622 bx.or(lhs, rhs)
c30ab7b3 623 }
dfeec247 624 mir::BinOp::Le | mir::BinOp::Lt | mir::BinOp::Ge | mir::BinOp::Gt => {
c30ab7b3
SL
625 // a OP b ~ a.0 STRICT(OP) b.0 | (a.0 == b.0 && a.1 OP a.1)
626 let (op, strict_op) = match op {
a1dfa0c6
XL
627 mir::BinOp::Lt => (IntPredicate::IntULT, IntPredicate::IntULT),
628 mir::BinOp::Le => (IntPredicate::IntULE, IntPredicate::IntULT),
629 mir::BinOp::Gt => (IntPredicate::IntUGT, IntPredicate::IntUGT),
630 mir::BinOp::Ge => (IntPredicate::IntUGE, IntPredicate::IntUGT),
c30ab7b3
SL
631 _ => bug!(),
632 };
a1dfa0c6
XL
633 let lhs = bx.icmp(strict_op, lhs_addr, rhs_addr);
634 let and_lhs = bx.icmp(IntPredicate::IntEQ, lhs_addr, rhs_addr);
635 let and_rhs = bx.icmp(op, lhs_extra, rhs_extra);
636 let rhs = bx.and(and_lhs, and_rhs);
637 bx.or(lhs, rhs)
c30ab7b3
SL
638 }
639 _ => {
640 bug!("unexpected fat ptr binop");
92a42be0
SL
641 }
642 }
643 }
3157f602 644
a1dfa0c6
XL
645 pub fn codegen_scalar_checked_binop(
646 &mut self,
647 bx: &mut Bx,
648 op: mir::BinOp,
649 lhs: Bx::Value,
650 rhs: Bx::Value,
dfeec247 651 input_ty: Ty<'tcx>,
a1dfa0c6 652 ) -> OperandValue<Bx::Value> {
3157f602
XL
653 // This case can currently arise only from functions marked
654 // with #[rustc_inherit_overflow_checks] and inlined from
655 // another crate (mostly core::num generic/#[inline] fns),
656 // while the current crate doesn't use overflow checks.
a1dfa0c6 657 if !bx.cx().check_overflow() {
94b46f34 658 let val = self.codegen_scalar_binop(bx, op, lhs, rhs, input_ty);
a1dfa0c6 659 return OperandValue::Pair(val, bx.cx().const_bool(false));
3157f602
XL
660 }
661
3157f602
XL
662 let (val, of) = match op {
663 // These are checked using intrinsics
664 mir::BinOp::Add | mir::BinOp::Sub | mir::BinOp::Mul => {
665 let oop = match op {
666 mir::BinOp::Add => OverflowOp::Add,
667 mir::BinOp::Sub => OverflowOp::Sub,
668 mir::BinOp::Mul => OverflowOp::Mul,
dfeec247 669 _ => unreachable!(),
3157f602 670 };
a1dfa0c6 671 bx.checked_binop(oop, input_ty, lhs, rhs)
3157f602
XL
672 }
673 mir::BinOp::Shl | mir::BinOp::Shr => {
a1dfa0c6
XL
674 let lhs_llty = bx.cx().val_ty(lhs);
675 let rhs_llty = bx.cx().val_ty(rhs);
676 let invert_mask = common::shift_mask_val(bx, lhs_llty, rhs_llty, true);
2c00a5a8 677 let outer_bits = bx.and(rhs, invert_mask);
3157f602 678
a1dfa0c6 679 let of = bx.icmp(IntPredicate::IntNE, outer_bits, bx.cx().const_null(rhs_llty));
94b46f34 680 let val = self.codegen_scalar_binop(bx, op, lhs, rhs, input_ty);
3157f602
XL
681
682 (val, of)
683 }
dfeec247 684 _ => bug!("Operator `{:?}` is not a checkable operator", op),
3157f602
XL
685 };
686
687 OperandValue::Pair(val, of)
688 }
a1dfa0c6 689}
92a42be0 690
dc9dc135 691impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
416331ca 692 pub fn rvalue_creates_operand(&self, rvalue: &mir::Rvalue<'tcx>, span: Span) -> bool {
cc61c64b
XL
693 match *rvalue {
694 mir::Rvalue::Ref(..) |
064997fb 695 mir::Rvalue::CopyForDeref(..) |
dfeec247 696 mir::Rvalue::AddressOf(..) |
cc61c64b
XL
697 mir::Rvalue::Len(..) |
698 mir::Rvalue::Cast(..) | // (*)
c295e0f8 699 mir::Rvalue::ShallowInitBox(..) | // (*)
cc61c64b
XL
700 mir::Rvalue::BinaryOp(..) |
701 mir::Rvalue::CheckedBinaryOp(..) |
702 mir::Rvalue::UnaryOp(..) |
703 mir::Rvalue::Discriminant(..) |
7cac9316 704 mir::Rvalue::NullaryOp(..) |
f9f354fc 705 mir::Rvalue::ThreadLocalRef(_) |
cc61c64b
XL
706 mir::Rvalue::Use(..) => // (*)
707 true,
708 mir::Rvalue::Repeat(..) |
709 mir::Rvalue::Aggregate(..) => {
f9f354fc 710 let ty = rvalue.ty(self.mir, self.cx.tcx());
fc512014 711 let ty = self.monomorphize(ty);
416331ca 712 self.cx.spanned_layout_of(ty, span).is_zst()
cc61c64b
XL
713 }
714 }
92a42be0 715
cc61c64b
XL
716 // (*) this is only true if the type is suitable
717 }
92a42be0 718}