1 use super::place
::PlaceRef
;
2 use super::{FunctionCx, LocalRef}
;
9 use rustc_errors
::ErrorReported
;
10 use rustc_middle
::mir
;
11 use rustc_middle
::mir
::interpret
::{ConstValue, ErrorHandled, Pointer, Scalar}
;
12 use rustc_middle
::ty
::layout
::TyAndLayout
;
13 use rustc_middle
::ty
::Ty
;
14 use rustc_target
::abi
::{Abi, Align, LayoutOf, Size}
;
18 /// The representation of a Rust value. The enum variant is in fact
19 /// uniquely determined by the value's type, but is kept as a
21 #[derive(Copy, Clone, Debug)]
22 pub enum OperandValue
<V
> {
23 /// A reference to the actual operand. The data is guaranteed
24 /// to be valid for the operand's lifetime.
25 /// The second value, if any, is the extra data (vtable or length)
26 /// which indicates that it refers to an unsized rvalue.
27 Ref(V
, Option
<V
>, Align
),
28 /// A single LLVM value.
30 /// A pair of immediate LLVM values. Used by fat pointers too.
34 /// An `OperandRef` is an "SSA" reference to a Rust value, along with
37 /// NOTE: unless you know a value's type exactly, you should not
38 /// generate LLVM opcodes acting on it and instead act via methods,
39 /// to avoid nasty edge cases. In particular, using `Builder::store`
40 /// directly is sure to cause problems -- use `OperandRef::store`
42 #[derive(Copy, Clone)]
43 pub struct OperandRef
<'tcx
, V
> {
45 pub val
: OperandValue
<V
>,
47 // The layout of value, based on its Rust type.
48 pub layout
: TyAndLayout
<'tcx
>,
51 impl<V
: CodegenObject
> fmt
::Debug
for OperandRef
<'tcx
, V
> {
52 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
53 write
!(f
, "OperandRef({:?} @ {:?})", self.val
, self.layout
)
57 impl<'a
, 'tcx
, V
: CodegenObject
> OperandRef
<'tcx
, V
> {
58 pub fn new_zst
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
60 layout
: TyAndLayout
<'tcx
>,
61 ) -> OperandRef
<'tcx
, V
> {
62 assert
!(layout
.is_zst());
64 val
: OperandValue
::Immediate(bx
.const_undef(bx
.immediate_backend_type(layout
))),
69 pub fn from_const
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
71 val
: ConstValue
<'tcx
>,
74 let layout
= bx
.layout_of(ty
);
77 return OperandRef
::new_zst(bx
, layout
);
81 ConstValue
::Scalar(x
) => {
82 let scalar
= match layout
.abi
{
83 Abi
::Scalar(ref x
) => x
,
84 _
=> bug
!("from_const: invalid ByVal layout: {:#?}", layout
),
86 let llval
= bx
.scalar_to_backend(x
, scalar
, bx
.immediate_backend_type(layout
));
87 OperandValue
::Immediate(llval
)
89 ConstValue
::Slice { data, start, end }
=> {
90 let a_scalar
= match layout
.abi
{
91 Abi
::ScalarPair(ref a
, _
) => a
,
92 _
=> bug
!("from_const: invalid ScalarPair layout: {:#?}", layout
),
94 let a
= Scalar
::from(Pointer
::new(
95 bx
.tcx().create_memory_alloc(data
),
96 Size
::from_bytes(start
),
98 let a_llval
= bx
.scalar_to_backend(
101 bx
.scalar_pair_element_backend_type(layout
, 0, true),
103 let b_llval
= bx
.const_usize((end
- start
) as u64);
104 OperandValue
::Pair(a_llval
, b_llval
)
106 ConstValue
::ByRef { alloc, offset }
=> {
107 return bx
.load_operand(bx
.from_const_alloc(layout
, alloc
, offset
));
111 OperandRef { val, layout }
114 /// Asserts that this operand refers to a scalar and returns
115 /// a reference to its value.
116 pub fn immediate(self) -> V
{
118 OperandValue
::Immediate(s
) => s
,
119 _
=> bug
!("not immediate: {:?}", self),
123 pub fn deref
<Cx
: LayoutTypeMethods
<'tcx
>>(self, cx
: &Cx
) -> PlaceRef
<'tcx
, V
> {
124 let projected_ty
= self
128 .unwrap_or_else(|| bug
!("deref of non-pointer {:?}", self))
130 let (llptr
, llextra
) = match self.val
{
131 OperandValue
::Immediate(llptr
) => (llptr
, None
),
132 OperandValue
::Pair(llptr
, llextra
) => (llptr
, Some(llextra
)),
133 OperandValue
::Ref(..) => bug
!("Deref of by-Ref operand {:?}", self),
135 let layout
= cx
.layout_of(projected_ty
);
136 PlaceRef { llval: llptr, llextra, layout, align: layout.align.abi }
139 /// If this operand is a `Pair`, we return an aggregate with the two values.
140 /// For other cases, see `immediate`.
141 pub fn immediate_or_packed_pair
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
145 if let OperandValue
::Pair(a
, b
) = self.val
{
146 let llty
= bx
.cx().backend_type(self.layout
);
147 debug
!("Operand::immediate_or_packed_pair: packing {:?} into {:?}", self, llty
);
148 // Reconstruct the immediate aggregate.
149 let mut llpair
= bx
.cx().const_undef(llty
);
150 let imm_a
= base
::from_immediate(bx
, a
);
151 let imm_b
= base
::from_immediate(bx
, b
);
152 llpair
= bx
.insert_value(llpair
, imm_a
, 0);
153 llpair
= bx
.insert_value(llpair
, imm_b
, 1);
160 /// If the type is a pair, we return a `Pair`, otherwise, an `Immediate`.
161 pub fn from_immediate_or_packed_pair
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
164 layout
: TyAndLayout
<'tcx
>,
166 let val
= if let Abi
::ScalarPair(ref a
, ref b
) = layout
.abi
{
167 debug
!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval
, layout
);
169 // Deconstruct the immediate aggregate.
170 let a_llval
= bx
.extract_value(llval
, 0);
171 let a_llval
= base
::to_immediate_scalar(bx
, a_llval
, a
);
172 let b_llval
= bx
.extract_value(llval
, 1);
173 let b_llval
= base
::to_immediate_scalar(bx
, b_llval
, b
);
174 OperandValue
::Pair(a_llval
, b_llval
)
176 OperandValue
::Immediate(llval
)
178 OperandRef { val, layout }
181 pub fn extract_field
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
186 let field
= self.layout
.field(bx
.cx(), i
);
187 let offset
= self.layout
.fields
.offset(i
);
189 let mut val
= match (self.val
, &self.layout
.abi
) {
190 // If the field is ZST, it has no data.
191 _
if field
.is_zst() => {
192 return OperandRef
::new_zst(bx
, field
);
195 // Newtype of a scalar, scalar pair or vector.
196 (OperandValue
::Immediate(_
) | OperandValue
::Pair(..), _
)
197 if field
.size
== self.layout
.size
=>
199 assert_eq
!(offset
.bytes(), 0);
203 // Extract a scalar component from a pair.
204 (OperandValue
::Pair(a_llval
, b_llval
), &Abi
::ScalarPair(ref a
, ref b
)) => {
205 if offset
.bytes() == 0 {
206 assert_eq
!(field
.size
, a
.value
.size(bx
.cx()));
207 OperandValue
::Immediate(a_llval
)
209 assert_eq
!(offset
, a
.value
.size(bx
.cx()).align_to(b
.value
.align(bx
.cx()).abi
));
210 assert_eq
!(field
.size
, b
.value
.size(bx
.cx()));
211 OperandValue
::Immediate(b_llval
)
215 // `#[repr(simd)]` types are also immediate.
216 (OperandValue
::Immediate(llval
), &Abi
::Vector { .. }
) => {
217 OperandValue
::Immediate(bx
.extract_element(llval
, bx
.cx().const_usize(i
as u64)))
220 _
=> bug
!("OperandRef::extract_field({:?}): not applicable", self),
223 // HACK(eddyb) have to bitcast pointers until LLVM removes pointee types.
224 // Bools in union fields needs to be truncated.
225 let to_immediate_or_cast
= |bx
: &mut Bx
, val
, ty
| {
226 if ty
== bx
.cx().type_i1() { bx.trunc(val, ty) }
else { bx.bitcast(val, ty) }
230 OperandValue
::Immediate(ref mut llval
) => {
231 *llval
= to_immediate_or_cast(bx
, *llval
, bx
.cx().immediate_backend_type(field
));
233 OperandValue
::Pair(ref mut a
, ref mut b
) => {
234 *a
= to_immediate_or_cast(
237 bx
.cx().scalar_pair_element_backend_type(field
, 0, true),
239 *b
= to_immediate_or_cast(
242 bx
.cx().scalar_pair_element_backend_type(field
, 1, true),
245 OperandValue
::Ref(..) => bug
!(),
248 OperandRef { val, layout: field }
252 impl<'a
, 'tcx
, V
: CodegenObject
> OperandValue
<V
> {
253 pub fn store
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
256 dest
: PlaceRef
<'tcx
, V
>,
258 self.store_with_flags(bx
, dest
, MemFlags
::empty());
261 pub fn volatile_store
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
264 dest
: PlaceRef
<'tcx
, V
>,
266 self.store_with_flags(bx
, dest
, MemFlags
::VOLATILE
);
269 pub fn unaligned_volatile_store
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
272 dest
: PlaceRef
<'tcx
, V
>,
274 self.store_with_flags(bx
, dest
, MemFlags
::VOLATILE
| MemFlags
::UNALIGNED
);
277 pub fn nontemporal_store
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
280 dest
: PlaceRef
<'tcx
, V
>,
282 self.store_with_flags(bx
, dest
, MemFlags
::NONTEMPORAL
);
285 fn store_with_flags
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
288 dest
: PlaceRef
<'tcx
, V
>,
291 debug
!("OperandRef::store: operand={:?}, dest={:?}", self, dest
);
292 // Avoid generating stores of zero-sized values, because the only way to have a zero-sized
293 // value is through `undef`, and store itself is useless.
294 if dest
.layout
.is_zst() {
298 OperandValue
::Ref(r
, None
, source_align
) => {
299 base
::memcpy_ty(bx
, dest
.llval
, dest
.align
, r
, source_align
, dest
.layout
, flags
)
301 OperandValue
::Ref(_
, Some(_
), _
) => {
302 bug
!("cannot directly store unsized values");
304 OperandValue
::Immediate(s
) => {
305 let val
= base
::from_immediate(bx
, s
);
306 bx
.store_with_flags(val
, dest
.llval
, dest
.align
, flags
);
308 OperandValue
::Pair(a
, b
) => {
309 let (a_scalar
, b_scalar
) = match dest
.layout
.abi
{
310 Abi
::ScalarPair(ref a
, ref b
) => (a
, b
),
311 _
=> bug
!("store_with_flags: invalid ScalarPair layout: {:#?}", dest
.layout
),
313 let b_offset
= a_scalar
.value
.size(bx
).align_to(b_scalar
.value
.align(bx
).abi
);
315 let llptr
= bx
.struct_gep(dest
.llval
, 0);
316 let val
= base
::from_immediate(bx
, a
);
317 let align
= dest
.align
;
318 bx
.store_with_flags(val
, llptr
, align
, flags
);
320 let llptr
= bx
.struct_gep(dest
.llval
, 1);
321 let val
= base
::from_immediate(bx
, b
);
322 let align
= dest
.align
.restrict_for_offset(b_offset
);
323 bx
.store_with_flags(val
, llptr
, align
, flags
);
328 pub fn store_unsized
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
331 indirect_dest
: PlaceRef
<'tcx
, V
>,
333 debug
!("OperandRef::store_unsized: operand={:?}, indirect_dest={:?}", self, indirect_dest
);
334 let flags
= MemFlags
::empty();
336 // `indirect_dest` must have `*mut T` type. We extract `T` out of it.
337 let unsized_ty
= indirect_dest
341 .unwrap_or_else(|| bug
!("indirect_dest has non-pointer type: {:?}", indirect_dest
))
344 let (llptr
, llextra
) = if let OperandValue
::Ref(llptr
, Some(llextra
), _
) = self {
347 bug
!("store_unsized called with a sized value")
350 // FIXME: choose an appropriate alignment, or use dynamic align somehow
351 let max_align
= Align
::from_bits(128).unwrap();
352 let min_align
= Align
::from_bits(8).unwrap();
354 // Allocate an appropriate region on the stack, and copy the value into it
355 let (llsize
, _
) = glue
::size_and_align_of_dst(bx
, unsized_ty
, Some(llextra
));
356 let lldst
= bx
.array_alloca(bx
.cx().type_i8(), llsize
, max_align
);
357 bx
.memcpy(lldst
, max_align
, llptr
, min_align
, llsize
, flags
);
359 // Store the allocated region and the extra to the indirect place.
360 let indirect_operand
= OperandValue
::Pair(lldst
, llextra
);
361 indirect_operand
.store(bx
, indirect_dest
);
365 impl<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
366 fn maybe_codegen_consume_direct(
369 place_ref
: mir
::PlaceRef
<'tcx
>,
370 ) -> Option
<OperandRef
<'tcx
, Bx
::Value
>> {
371 debug
!("maybe_codegen_consume_direct(place_ref={:?})", place_ref
);
373 match self.locals
[place_ref
.local
] {
374 LocalRef
::Operand(Some(mut o
)) => {
375 // Moves out of scalar and scalar pair fields are trivial.
376 for elem
in place_ref
.projection
.iter() {
378 mir
::ProjectionElem
::Field(ref f
, _
) => {
379 o
= o
.extract_field(bx
, f
.index());
381 mir
::ProjectionElem
::Index(_
)
382 | mir
::ProjectionElem
::ConstantIndex { .. }
=> {
383 // ZSTs don't require any actual memory access.
384 // FIXME(eddyb) deduplicate this with the identical
385 // checks in `codegen_consume` and `extract_field`.
386 let elem
= o
.layout
.field(bx
.cx(), 0);
388 o
= OperandRef
::new_zst(bx
, elem
);
399 LocalRef
::Operand(None
) => {
400 bug
!("use of {:?} before def", place_ref
);
402 LocalRef
::Place(..) | LocalRef
::UnsizedPlace(..) => {
403 // watch out for locals that do not have an
404 // alloca; they are handled somewhat differently
410 pub fn codegen_consume(
413 place_ref
: mir
::PlaceRef
<'tcx
>,
414 ) -> OperandRef
<'tcx
, Bx
::Value
> {
415 debug
!("codegen_consume(place_ref={:?})", place_ref
);
417 let ty
= self.monomorphized_place_ty(place_ref
);
418 let layout
= bx
.cx().layout_of(ty
);
420 // ZSTs don't require any actual memory access.
422 return OperandRef
::new_zst(bx
, layout
);
425 if let Some(o
) = self.maybe_codegen_consume_direct(bx
, place_ref
) {
429 // for most places, to consume them we just load them
430 // out from their home
431 let place
= self.codegen_place(bx
, place_ref
);
432 bx
.load_operand(place
)
435 pub fn codegen_operand(
438 operand
: &mir
::Operand
<'tcx
>,
439 ) -> OperandRef
<'tcx
, Bx
::Value
> {
440 debug
!("codegen_operand(operand={:?})", operand
);
443 mir
::Operand
::Copy(ref place
) | mir
::Operand
::Move(ref place
) => {
444 self.codegen_consume(bx
, place
.as_ref())
447 mir
::Operand
::Constant(ref constant
) => {
448 self.eval_mir_constant_to_operand(bx
, constant
).unwrap_or_else(|err
| {
450 // errored or at least linted
451 ErrorHandled
::Reported(ErrorReported
) | ErrorHandled
::Linted
=> {}
452 ErrorHandled
::TooGeneric
=> {
453 bug
!("codegen encountered polymorphic constant")
456 // Allow RalfJ to sleep soundly knowing that even refactorings that remove
457 // the above error (or silence it under some conditions) will not cause UB.
459 // We still have to return an operand but it doesn't matter,
460 // this code is unreachable.
461 let ty
= self.monomorphize(&constant
.literal
.ty
);
462 let layout
= bx
.cx().layout_of(ty
);
463 bx
.load_operand(PlaceRef
::new_sized(
464 bx
.cx().const_undef(bx
.cx().type_ptr_to(bx
.cx().backend_type(layout
))),