]> git.proxmox.com Git - rustc.git/blame - src/librustc_trans/mir/constant.rs
New upstream version 1.25.0+dfsg1
[rustc.git] / src / librustc_trans / mir / constant.rs
CommitLineData
54a0048b
SL
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
a7813a04 11use llvm::{self, ValueRef};
cc61c64b 12use rustc::middle::const_val::{ConstEvalErr, ConstVal, ErrKind};
54a0048b 13use rustc_const_math::ConstInt::*;
abe05a73 14use rustc_const_math::{ConstInt, ConstMathErr, MAX_F32_PLUS_HALF_ULP};
a7813a04
XL
15use rustc::hir::def_id::DefId;
16use rustc::infer::TransNormalize;
ea8adc8c 17use rustc::traits;
c30ab7b3 18use rustc::mir;
ff7c6d11 19use rustc::mir::tcx::PlaceTy;
cc61c64b 20use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
ff7c6d11 21use rustc::ty::layout::{self, LayoutOf, Size};
a7813a04 22use rustc::ty::cast::{CastTy, IntTy};
ff7c6d11 23use rustc::ty::subst::{Kind, Substs};
abe05a73 24use rustc_apfloat::{ieee, Float, Status};
3157f602 25use rustc_data_structures::indexed_vec::{Idx, IndexVec};
ff7c6d11 26use base;
3b2f2976 27use abi::{self, Abi};
cc61c64b 28use callee;
32a655c1 29use builder::Builder;
2c00a5a8 30use common::{self, CodegenCx, const_get_elt, val_ty};
ff7c6d11
XL
31use common::{C_array, C_bool, C_bytes, C_int, C_uint, C_uint_big, C_u32, C_u64};
32use common::{C_null, C_struct, C_str_slice, C_undef, C_usize, C_vector, C_fat_ptr};
32a655c1 33use common::const_to_opt_u128;
9e0c209e 34use consts;
ff7c6d11 35use type_of::LayoutLlvmExt;
54a0048b 36use type_::Type;
a7813a04 37use value::Value;
54a0048b 38
32a655c1 39use syntax_pos::Span;
3b2f2976 40use syntax::ast;
54a0048b 41
5bcae85e 42use std::fmt;
a7813a04 43use std::ptr;
54a0048b 44
a7813a04 45use super::operand::{OperandRef, OperandValue};
2c00a5a8 46use super::FunctionCx;
54a0048b 47
a7813a04
XL
48/// A sized constant rvalue.
49/// The LLVM type might not be the same for a single Rust type,
50/// e.g. each enum variant would have its own LLVM struct type.
51#[derive(Copy, Clone)]
52pub struct Const<'tcx> {
53 pub llval: ValueRef,
54 pub ty: Ty<'tcx>
55}
54a0048b 56
ff7c6d11 57impl<'a, 'tcx> Const<'tcx> {
a7813a04
XL
58 pub fn new(llval: ValueRef, ty: Ty<'tcx>) -> Const<'tcx> {
59 Const {
3b2f2976
XL
60 llval,
61 ty,
54a0048b
SL
62 }
63 }
64
2c00a5a8
XL
65 pub fn from_constint(cx: &CodegenCx<'a, 'tcx>, ci: &ConstInt) -> Const<'tcx> {
66 let tcx = cx.tcx;
8bb4bdeb 67 let (llval, ty) = match *ci {
2c00a5a8
XL
68 I8(v) => (C_int(Type::i8(cx), v as i64), tcx.types.i8),
69 I16(v) => (C_int(Type::i16(cx), v as i64), tcx.types.i16),
70 I32(v) => (C_int(Type::i32(cx), v as i64), tcx.types.i32),
71 I64(v) => (C_int(Type::i64(cx), v as i64), tcx.types.i64),
72 I128(v) => (C_uint_big(Type::i128(cx), v as u128), tcx.types.i128),
73 Isize(v) => (C_int(Type::isize(cx), v.as_i64()), tcx.types.isize),
74 U8(v) => (C_uint(Type::i8(cx), v as u64), tcx.types.u8),
75 U16(v) => (C_uint(Type::i16(cx), v as u64), tcx.types.u16),
76 U32(v) => (C_uint(Type::i32(cx), v as u64), tcx.types.u32),
77 U64(v) => (C_uint(Type::i64(cx), v), tcx.types.u64),
78 U128(v) => (C_uint_big(Type::i128(cx), v), tcx.types.u128),
79 Usize(v) => (C_uint(Type::isize(cx), v.as_u64()), tcx.types.usize),
8bb4bdeb
XL
80 };
81 Const { llval: llval, ty: ty }
82 }
83
a7813a04 84 /// Translate ConstVal into a LLVM constant value.
2c00a5a8 85 pub fn from_constval(cx: &CodegenCx<'a, 'tcx>,
ff7c6d11
XL
86 cv: &ConstVal,
87 ty: Ty<'tcx>)
88 -> Const<'tcx> {
2c00a5a8 89 let llty = cx.layout_of(ty).llvm_type(cx);
ea8adc8c 90 let val = match *cv {
3b2f2976
XL
91 ConstVal::Float(v) => {
92 let bits = match v.ty {
2c00a5a8
XL
93 ast::FloatTy::F32 => C_u32(cx, v.bits as u32),
94 ast::FloatTy::F64 => C_u64(cx, v.bits as u64)
3b2f2976
XL
95 };
96 consts::bitcast(bits, llty)
97 }
2c00a5a8
XL
98 ConstVal::Bool(v) => C_bool(cx, v),
99 ConstVal::Integral(ref i) => return Const::from_constint(cx, i),
100 ConstVal::Str(ref v) => C_str_slice(cx, v.clone()),
ff7c6d11 101 ConstVal::ByteStr(v) => {
2c00a5a8 102 consts::addr_of(cx, C_bytes(cx, v.data), cx.align_of(ty), "byte_str")
ff7c6d11 103 }
2c00a5a8 104 ConstVal::Char(c) => C_uint(Type::char(cx), c as u64),
ff7c6d11 105 ConstVal::Function(..) => C_undef(llty),
cc61c64b 106 ConstVal::Variant(_) |
ea8adc8c
XL
107 ConstVal::Aggregate(..) |
108 ConstVal::Unevaluated(..) => {
cc61c64b 109 bug!("MIR must not use `{:?}` (aggregates are expanded to MIR rvalues)", cv)
54a0048b 110 }
a7813a04
XL
111 };
112
113 assert!(!ty.has_erasable_regions());
114
115 Const::new(val, ty)
116 }
117
2c00a5a8
XL
118 fn get_field(&self, cx: &CodegenCx<'a, 'tcx>, i: usize) -> ValueRef {
119 let layout = cx.layout_of(self.ty);
120 let field = layout.field(cx, i);
ff7c6d11 121 if field.is_zst() {
2c00a5a8 122 return C_undef(field.immediate_llvm_type(cx));
ff7c6d11
XL
123 }
124 let offset = layout.fields.offset(i);
125 match layout.abi {
126 layout::Abi::Scalar(_) |
127 layout::Abi::ScalarPair(..) |
128 layout::Abi::Vector { .. }
129 if offset.bytes() == 0 && field.size == layout.size => self.llval,
130
131 layout::Abi::ScalarPair(ref a, ref b) => {
132 if offset.bytes() == 0 {
2c00a5a8 133 assert_eq!(field.size, a.value.size(cx));
ff7c6d11
XL
134 const_get_elt(self.llval, 0)
135 } else {
2c00a5a8
XL
136 assert_eq!(offset, a.value.size(cx)
137 .abi_align(b.value.align(cx)));
138 assert_eq!(field.size, b.value.size(cx));
ff7c6d11
XL
139 const_get_elt(self.llval, 1)
140 }
141 }
142 _ => {
143 match layout.fields {
144 layout::FieldPlacement::Union(_) => self.llval,
145 _ => const_get_elt(self.llval, layout.llvm_field_index(i)),
146 }
147 }
148 }
149 }
150
2c00a5a8
XL
151 fn get_pair(&self, cx: &CodegenCx<'a, 'tcx>) -> (ValueRef, ValueRef) {
152 (self.get_field(cx, 0), self.get_field(cx, 1))
3157f602
XL
153 }
154
2c00a5a8 155 fn get_fat_ptr(&self, cx: &CodegenCx<'a, 'tcx>) -> (ValueRef, ValueRef) {
3157f602
XL
156 assert_eq!(abi::FAT_PTR_ADDR, 0);
157 assert_eq!(abi::FAT_PTR_EXTRA, 1);
2c00a5a8 158 self.get_pair(cx)
a7813a04
XL
159 }
160
ff7c6d11
XL
161 fn as_place(&self) -> ConstPlace<'tcx> {
162 ConstPlace {
a7813a04
XL
163 base: Base::Value(self.llval),
164 llextra: ptr::null_mut(),
165 ty: self.ty
166 }
167 }
168
2c00a5a8
XL
169 pub fn to_operand(&self, cx: &CodegenCx<'a, 'tcx>) -> OperandRef<'tcx> {
170 let layout = cx.layout_of(self.ty);
171 let llty = layout.immediate_llvm_type(cx);
a7813a04
XL
172 let llvalty = val_ty(self.llval);
173
ff7c6d11
XL
174 let val = if llty == llvalty && layout.is_llvm_scalar_pair() {
175 OperandValue::Pair(
176 const_get_elt(self.llval, 0),
177 const_get_elt(self.llval, 1))
178 } else if llty == llvalty && layout.is_llvm_immediate() {
a7813a04
XL
179 // If the types match, we can use the value directly.
180 OperandValue::Immediate(self.llval)
181 } else {
182 // Otherwise, or if the value is not immediate, we create
183 // a constant LLVM global and cast its address if necessary.
2c00a5a8
XL
184 let align = cx.align_of(self.ty);
185 let ptr = consts::addr_of(cx, self.llval, align, "const");
186 OperandValue::Ref(consts::ptrcast(ptr, layout.llvm_type(cx).ptr_to()),
ff7c6d11 187 layout.align)
a7813a04
XL
188 };
189
190 OperandRef {
3b2f2976 191 val,
ff7c6d11 192 layout
a7813a04
XL
193 }
194 }
195}
196
5bcae85e
SL
197impl<'tcx> fmt::Debug for Const<'tcx> {
198 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
199 write!(f, "Const({:?}: {:?})", Value(self.llval), self.ty)
200 }
201}
202
a7813a04
XL
203#[derive(Copy, Clone)]
204enum Base {
205 /// A constant value without an unique address.
206 Value(ValueRef),
207
208 /// String literal base pointer (cast from array).
209 Str(ValueRef),
210
211 /// The address of a static.
212 Static(ValueRef)
213}
214
2c00a5a8 215/// A place as seen from a constant.
a7813a04 216#[derive(Copy, Clone)]
ff7c6d11 217struct ConstPlace<'tcx> {
a7813a04
XL
218 base: Base,
219 llextra: ValueRef,
220 ty: Ty<'tcx>
221}
222
ff7c6d11 223impl<'tcx> ConstPlace<'tcx> {
a7813a04
XL
224 fn to_const(&self, span: Span) -> Const<'tcx> {
225 match self.base {
226 Base::Value(val) => Const::new(val, self.ty),
227 Base::Str(ptr) => {
228 span_bug!(span, "loading from `str` ({:?}) in constant",
229 Value(ptr))
230 }
231 Base::Static(val) => {
232 span_bug!(span, "loading from `static` ({:?}) in constant",
233 Value(val))
234 }
235 }
236 }
237
2c00a5a8 238 pub fn len<'a>(&self, cx: &CodegenCx<'a, 'tcx>) -> ValueRef {
a7813a04 239 match self.ty.sty {
ea8adc8c 240 ty::TyArray(_, n) => {
2c00a5a8 241 C_usize(cx, n.val.to_const_int().unwrap().to_u64().unwrap())
ea8adc8c 242 }
a7813a04
XL
243 ty::TySlice(_) | ty::TyStr => {
244 assert!(self.llextra != ptr::null_mut());
245 self.llextra
246 }
ff7c6d11 247 _ => bug!("unexpected type `{}` in ConstPlace::len", self.ty)
a7813a04
XL
248 }
249 }
250}
251
252/// Machinery for translating a constant's MIR to LLVM values.
253/// FIXME(eddyb) use miri and lower its allocations to LLVM.
254struct MirConstContext<'a, 'tcx: 'a> {
2c00a5a8 255 cx: &'a CodegenCx<'a, 'tcx>,
a7813a04
XL
256 mir: &'a mir::Mir<'tcx>,
257
258 /// Type parameters for const fn and associated constants.
259 substs: &'tcx Substs<'tcx>,
260
3157f602 261 /// Values of locals in a constant or const fn.
3b2f2976 262 locals: IndexVec<mir::Local, Option<Result<Const<'tcx>, ConstEvalErr<'tcx>>>>
a7813a04
XL
263}
264
3b2f2976
XL
265fn add_err<'tcx, U, V>(failure: &mut Result<U, ConstEvalErr<'tcx>>,
266 value: &Result<V, ConstEvalErr<'tcx>>)
267{
268 if let &Err(ref err) = value {
269 if failure.is_ok() {
270 *failure = Err(err.clone());
271 }
272 }
273}
a7813a04
XL
274
275impl<'a, 'tcx> MirConstContext<'a, 'tcx> {
2c00a5a8 276 fn new(cx: &'a CodegenCx<'a, 'tcx>,
a7813a04
XL
277 mir: &'a mir::Mir<'tcx>,
278 substs: &'tcx Substs<'tcx>,
3b2f2976 279 args: IndexVec<mir::Local, Result<Const<'tcx>, ConstEvalErr<'tcx>>>)
a7813a04 280 -> MirConstContext<'a, 'tcx> {
3157f602 281 let mut context = MirConstContext {
2c00a5a8 282 cx,
3b2f2976
XL
283 mir,
284 substs,
c30ab7b3 285 locals: (0..mir.local_decls.len()).map(|_| None).collect(),
3157f602
XL
286 };
287 for (i, arg) in args.into_iter().enumerate() {
c30ab7b3
SL
288 // Locals after local 0 are the function arguments
289 let index = mir::Local::new(i + 1);
3157f602 290 context.locals[index] = Some(arg);
54a0048b 291 }
3157f602 292 context
54a0048b
SL
293 }
294
2c00a5a8 295 fn trans_def(cx: &'a CodegenCx<'a, 'tcx>,
cc61c64b
XL
296 def_id: DefId,
297 substs: &'tcx Substs<'tcx>,
3b2f2976 298 args: IndexVec<mir::Local, Result<Const<'tcx>, ConstEvalErr<'tcx>>>)
8bb4bdeb 299 -> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
2c00a5a8 300 let instance = ty::Instance::resolve(cx.tcx,
ea8adc8c
XL
301 ty::ParamEnv::empty(traits::Reveal::All),
302 def_id,
303 substs).unwrap();
2c00a5a8
XL
304 let mir = cx.tcx.instance_mir(instance.def);
305 MirConstContext::new(cx, &mir, instance.substs, args).trans()
a7813a04
XL
306 }
307
308 fn monomorphize<T>(&self, value: &T) -> T
309 where T: TransNormalize<'tcx>
310 {
2c00a5a8 311 self.cx.tcx.trans_apply_param_substs(self.substs, value)
a7813a04
XL
312 }
313
8bb4bdeb 314 fn trans(&mut self) -> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
2c00a5a8 315 let tcx = self.cx.tcx;
a7813a04 316 let mut bb = mir::START_BLOCK;
3157f602
XL
317
318 // Make sure to evaluate all statemenets to
319 // report as many errors as we possibly can.
320 let mut failure = Ok(());
321
a7813a04 322 loop {
3157f602 323 let data = &self.mir[bb];
a7813a04 324 for statement in &data.statements {
3157f602 325 let span = statement.source_info.span;
a7813a04
XL
326 match statement.kind {
327 mir::StatementKind::Assign(ref dest, ref rvalue) => {
5bcae85e 328 let ty = dest.ty(self.mir, tcx);
a7813a04 329 let ty = self.monomorphize(&ty).to_ty(tcx);
3b2f2976
XL
330 let value = self.const_rvalue(rvalue, ty, span);
331 add_err(&mut failure, &value);
332 self.store(dest, value, span);
a7813a04 333 }
5bcae85e 334 mir::StatementKind::StorageLive(_) |
9e0c209e 335 mir::StatementKind::StorageDead(_) |
3b2f2976 336 mir::StatementKind::Validate(..) |
041b39d2 337 mir::StatementKind::EndRegion(_) |
9e0c209e 338 mir::StatementKind::Nop => {}
8bb4bdeb 339 mir::StatementKind::InlineAsm { .. } |
5bcae85e 340 mir::StatementKind::SetDiscriminant{ .. } => {
8bb4bdeb 341 span_bug!(span, "{:?} should not appear in constants?", statement.kind);
5bcae85e 342 }
a7813a04
XL
343 }
344 }
345
346 let terminator = data.terminator();
3157f602 347 let span = terminator.source_info.span;
a7813a04
XL
348 bb = match terminator.kind {
349 mir::TerminatorKind::Drop { target, .. } | // No dropping.
350 mir::TerminatorKind::Goto { target } => target,
351 mir::TerminatorKind::Return => {
3157f602 352 failure?;
ff7c6d11 353 return self.locals[mir::RETURN_PLACE].clone().unwrap_or_else(|| {
a7813a04 354 span_bug!(span, "no returned value in constant");
3b2f2976 355 });
a7813a04
XL
356 }
357
3157f602 358 mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, .. } => {
a7813a04 359 let cond = self.const_operand(cond, span)?;
3157f602
XL
360 let cond_bool = common::const_to_uint(cond.llval) != 0;
361 if cond_bool != expected {
362 let err = match *msg {
363 mir::AssertMessage::BoundsCheck { ref len, ref index } => {
364 let len = self.const_operand(len, span)?;
365 let index = self.const_operand(index, span)?;
366 ErrKind::IndexOutOfBounds {
367 len: common::const_to_uint(len.llval),
368 index: common::const_to_uint(index.llval)
369 }
370 }
371 mir::AssertMessage::Math(ref err) => {
372 ErrKind::Math(err.clone())
373 }
ea8adc8c
XL
374 mir::AssertMessage::GeneratorResumedAfterReturn |
375 mir::AssertMessage::GeneratorResumedAfterPanic =>
376 span_bug!(span, "{:?} should not appear in constants?", msg),
3157f602 377 };
9e0c209e 378
cc61c64b
XL
379 let err = ConstEvalErr { span: span, kind: err };
380 err.report(tcx, span, "expression");
9e0c209e 381 failure = Err(err);
a7813a04 382 }
3157f602 383 target
a7813a04
XL
384 }
385
386 mir::TerminatorKind::Call { ref func, ref args, ref destination, .. } => {
5bcae85e 387 let fn_ty = func.ty(self.mir, tcx);
a7813a04 388 let fn_ty = self.monomorphize(&fn_ty);
cc61c64b 389 let (def_id, substs) = match fn_ty.sty {
041b39d2 390 ty::TyFnDef(def_id, substs) => (def_id, substs),
a7813a04
XL
391 _ => span_bug!(span, "calling {:?} (of type {}) in constant",
392 func, fn_ty)
393 };
394
3b2f2976 395 let mut arg_vals = IndexVec::with_capacity(args.len());
3157f602 396 for arg in args {
3b2f2976
XL
397 let arg_val = self.const_operand(arg, span);
398 add_err(&mut failure, &arg_val);
399 arg_vals.push(arg_val);
a7813a04 400 }
a7813a04 401 if let Some((ref dest, target)) = *destination {
3b2f2976 402 let result = if fn_ty.fn_sig(tcx).abi() == Abi::RustIntrinsic {
ea8adc8c 403 match &tcx.item_name(def_id)[..] {
3b2f2976 404 "size_of" => {
2c00a5a8
XL
405 let llval = C_usize(self.cx,
406 self.cx.size_of(substs.type_at(0)).bytes());
3b2f2976
XL
407 Ok(Const::new(llval, tcx.types.usize))
408 }
409 "min_align_of" => {
2c00a5a8
XL
410 let llval = C_usize(self.cx,
411 self.cx.align_of(substs.type_at(0)).abi());
3b2f2976
XL
412 Ok(Const::new(llval, tcx.types.usize))
413 }
2c00a5a8
XL
414 "type_id" => {
415 let llval = C_u64(self.cx,
416 self.cx.tcx.type_id_hash(substs.type_at(0)));
417 Ok(Const::new(llval, tcx.types.u64))
418 }
3b2f2976
XL
419 _ => span_bug!(span, "{:?} in constant", terminator.kind)
420 }
ff7c6d11
XL
421 } else if let Some((op, is_checked)) = self.is_binop_lang_item(def_id) {
422 (||{
423 assert_eq!(arg_vals.len(), 2);
424 let rhs = arg_vals.pop().unwrap()?;
425 let lhs = arg_vals.pop().unwrap()?;
426 if !is_checked {
427 let binop_ty = op.ty(tcx, lhs.ty, rhs.ty);
428 let (lhs, rhs) = (lhs.llval, rhs.llval);
429 Ok(Const::new(const_scalar_binop(op, lhs, rhs, binop_ty),
430 binop_ty))
431 } else {
432 let ty = lhs.ty;
433 let val_ty = op.ty(tcx, lhs.ty, rhs.ty);
434 let binop_ty = tcx.intern_tup(&[val_ty, tcx.types.bool], false);
435 let (lhs, rhs) = (lhs.llval, rhs.llval);
436 assert!(!ty.is_fp());
437
438 match const_scalar_checked_binop(tcx, op, lhs, rhs, ty) {
439 Some((llval, of)) => {
440 Ok(trans_const_adt(
2c00a5a8 441 self.cx,
ff7c6d11
XL
442 binop_ty,
443 &mir::AggregateKind::Tuple,
444 &[
445 Const::new(llval, val_ty),
2c00a5a8 446 Const::new(C_bool(self.cx, of), tcx.types.bool)
ff7c6d11
XL
447 ]))
448 }
449 None => {
450 span_bug!(span,
451 "{:?} got non-integer operands: {:?} and {:?}",
452 op, Value(lhs), Value(rhs));
453 }
454 }
455 }
456 })()
3b2f2976 457 } else {
2c00a5a8 458 MirConstContext::trans_def(self.cx, def_id, substs, arg_vals)
3b2f2976
XL
459 };
460 add_err(&mut failure, &result);
461 self.store(dest, result, span);
a7813a04
XL
462 target
463 } else {
3157f602 464 span_bug!(span, "diverging {:?} in constant", terminator.kind);
a7813a04
XL
465 }
466 }
467 _ => span_bug!(span, "{:?} in constant", terminator.kind)
468 };
469 }
470 }
471
ff7c6d11 472 fn is_binop_lang_item(&mut self, def_id: DefId) -> Option<(mir::BinOp, bool)> {
2c00a5a8 473 let tcx = self.cx.tcx;
ff7c6d11
XL
474 let items = tcx.lang_items();
475 let def_id = Some(def_id);
476 if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
477 else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
478 else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
479 else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
480 else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
481 else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
482 else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
483 else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
484 else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
485 else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
486 else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
487 else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
488 else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
489 else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
490 else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
491 else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
492 else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
493 else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
494 else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
495 else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
496 else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
497 else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
498 else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
499 else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
500 else { None }
501 }
502
3b2f2976 503 fn store(&mut self,
ff7c6d11 504 dest: &mir::Place<'tcx>,
3b2f2976
XL
505 value: Result<Const<'tcx>, ConstEvalErr<'tcx>>,
506 span: Span) {
ff7c6d11 507 if let mir::Place::Local(index) = *dest {
3157f602
XL
508 self.locals[index] = Some(value);
509 } else {
510 span_bug!(span, "assignment to {:?} in constant", dest);
511 }
a7813a04
XL
512 }
513
ff7c6d11
XL
514 fn const_place(&self, place: &mir::Place<'tcx>, span: Span)
515 -> Result<ConstPlace<'tcx>, ConstEvalErr<'tcx>> {
2c00a5a8 516 let tcx = self.cx.tcx;
3157f602 517
ff7c6d11 518 if let mir::Place::Local(index) = *place {
3b2f2976 519 return self.locals[index].clone().unwrap_or_else(|| {
ff7c6d11
XL
520 span_bug!(span, "{:?} not initialized", place)
521 }).map(|v| v.as_place());
3157f602
XL
522 }
523
ff7c6d11
XL
524 let place = match *place {
525 mir::Place::Local(_) => bug!(), // handled above
526 mir::Place::Static(box mir::Static { def_id, ty }) => {
527 ConstPlace {
2c00a5a8 528 base: Base::Static(consts::get_static(self.cx, def_id)),
a7813a04 529 llextra: ptr::null_mut(),
8bb4bdeb 530 ty: self.monomorphize(&ty),
a7813a04
XL
531 }
532 }
ff7c6d11
XL
533 mir::Place::Projection(ref projection) => {
534 let tr_base = self.const_place(&projection.base, span)?;
535 let projected_ty = PlaceTy::Ty { ty: tr_base.ty }
a7813a04
XL
536 .projection_ty(tcx, &projection.elem);
537 let base = tr_base.to_const(span);
538 let projected_ty = self.monomorphize(&projected_ty).to_ty(tcx);
2c00a5a8 539 let has_metadata = self.cx.type_has_metadata(projected_ty);
a7813a04
XL
540
541 let (projected, llextra) = match projection.elem {
542 mir::ProjectionElem::Deref => {
abe05a73 543 let (base, extra) = if !has_metadata {
a7813a04
XL
544 (base.llval, ptr::null_mut())
545 } else {
2c00a5a8 546 base.get_fat_ptr(self.cx)
a7813a04 547 };
2c00a5a8 548 if self.cx.statics.borrow().contains_key(&base) {
a7813a04
XL
549 (Base::Static(base), extra)
550 } else if let ty::TyStr = projected_ty.sty {
551 (Base::Str(base), extra)
552 } else {
9e0c209e 553 let v = base;
2c00a5a8 554 let v = self.cx.const_unsized.borrow().get(&v).map_or(v, |&v| v);
9e0c209e 555 let mut val = unsafe { llvm::LLVMGetInitializer(v) };
a7813a04
XL
556 if val.is_null() {
557 span_bug!(span, "dereference of non-constant pointer `{:?}`",
558 Value(base));
559 }
2c00a5a8 560 let layout = self.cx.layout_of(projected_ty);
ff7c6d11 561 if let layout::Abi::Scalar(ref scalar) = layout.abi {
2c00a5a8 562 let i1_type = Type::i1(self.cx);
ff7c6d11 563 if scalar.is_bool() && val_ty(val) != i1_type {
7cac9316
XL
564 unsafe {
565 val = llvm::LLVMConstTrunc(val, i1_type.to_ref());
566 }
9e0c209e
SL
567 }
568 }
a7813a04
XL
569 (Base::Value(val), extra)
570 }
571 }
572 mir::ProjectionElem::Field(ref field, _) => {
2c00a5a8 573 let llprojected = base.get_field(self.cx, field.index());
abe05a73 574 let llextra = if !has_metadata {
a7813a04
XL
575 ptr::null_mut()
576 } else {
577 tr_base.llextra
578 };
579 (Base::Value(llprojected), llextra)
580 }
ea8adc8c 581 mir::ProjectionElem::Index(index) => {
ff7c6d11 582 let index = &mir::Operand::Copy(mir::Place::Local(index));
a7813a04
XL
583 let llindex = self.const_operand(index, span)?.llval;
584
32a655c1 585 let iv = if let Some(iv) = common::const_to_opt_u128(llindex, false) {
a7813a04
XL
586 iv
587 } else {
588 span_bug!(span, "index is not an integer-constant expression")
589 };
3157f602
XL
590
591 // Produce an undef instead of a LLVM assertion on OOB.
2c00a5a8 592 let len = common::const_to_uint(tr_base.len(self.cx));
32a655c1 593 let llelem = if iv < len as u128 {
ff7c6d11 594 const_get_elt(base.llval, iv as u64)
3157f602 595 } else {
2c00a5a8 596 C_undef(self.cx.layout_of(projected_ty).llvm_type(self.cx))
3157f602
XL
597 };
598
599 (Base::Value(llelem), ptr::null_mut())
a7813a04
XL
600 }
601 _ => span_bug!(span, "{:?} in constant", projection.elem)
602 };
ff7c6d11 603 ConstPlace {
a7813a04 604 base: projected,
3b2f2976 605 llextra,
a7813a04
XL
606 ty: projected_ty
607 }
608 }
609 };
ff7c6d11 610 Ok(place)
a7813a04
XL
611 }
612
613 fn const_operand(&self, operand: &mir::Operand<'tcx>, span: Span)
8bb4bdeb 614 -> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
5bcae85e
SL
615 debug!("const_operand({:?} @ {:?})", operand, span);
616 let result = match *operand {
ff7c6d11
XL
617 mir::Operand::Copy(ref place) |
618 mir::Operand::Move(ref place) => {
619 Ok(self.const_place(place, span)?.to_const(span))
a7813a04
XL
620 }
621
622 mir::Operand::Constant(ref constant) => {
623 let ty = self.monomorphize(&constant.ty);
624 match constant.literal.clone() {
a7813a04
XL
625 mir::Literal::Promoted { index } => {
626 let mir = &self.mir.promoted[index];
2c00a5a8 627 MirConstContext::new(self.cx, mir, self.substs, IndexVec::new()).trans()
a7813a04
XL
628 }
629 mir::Literal::Value { value } => {
ea8adc8c
XL
630 if let ConstVal::Unevaluated(def_id, substs) = value.val {
631 let substs = self.monomorphize(&substs);
2c00a5a8 632 MirConstContext::trans_def(self.cx, def_id, substs, IndexVec::new())
ea8adc8c 633 } else {
2c00a5a8 634 Ok(Const::from_constval(self.cx, &value.val, ty))
ea8adc8c 635 }
a7813a04
XL
636 }
637 }
638 }
5bcae85e
SL
639 };
640 debug!("const_operand({:?} @ {:?}) = {:?}", operand, span,
641 result.as_ref().ok());
642 result
643 }
644
645 fn const_array(&self, array_ty: Ty<'tcx>, fields: &[ValueRef])
646 -> Const<'tcx>
647 {
648 let elem_ty = array_ty.builtin_index().unwrap_or_else(|| {
649 bug!("bad array type {:?}", array_ty)
650 });
2c00a5a8 651 let llunitty = self.cx.layout_of(elem_ty).llvm_type(self.cx);
5bcae85e
SL
652 // If the array contains enums, an LLVM array won't work.
653 let val = if fields.iter().all(|&f| val_ty(f) == llunitty) {
654 C_array(llunitty, fields)
655 } else {
2c00a5a8 656 C_struct(self.cx, fields, false)
5bcae85e
SL
657 };
658 Const::new(val, array_ty)
a7813a04
XL
659 }
660
661 fn const_rvalue(&self, rvalue: &mir::Rvalue<'tcx>,
662 dest_ty: Ty<'tcx>, span: Span)
8bb4bdeb 663 -> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
2c00a5a8 664 let tcx = self.cx.tcx;
5bcae85e 665 debug!("const_rvalue({:?}: {:?} @ {:?})", rvalue, dest_ty, span);
a7813a04
XL
666 let val = match *rvalue {
667 mir::Rvalue::Use(ref operand) => self.const_operand(operand, span)?,
668
ea8adc8c 669 mir::Rvalue::Repeat(ref elem, count) => {
a7813a04 670 let elem = self.const_operand(elem, span)?;
ea8adc8c
XL
671 let size = count.as_u64();
672 assert_eq!(size as usize as u64, size);
a7813a04 673 let fields = vec![elem.llval; size as usize];
5bcae85e 674 self.const_array(dest_ty, &fields)
a7813a04
XL
675 }
676
ff7c6d11 677 mir::Rvalue::Aggregate(box mir::AggregateKind::Array(_), ref operands) => {
3157f602
XL
678 // Make sure to evaluate all operands to
679 // report as many errors as we possibly can.
680 let mut fields = Vec::with_capacity(operands.len());
681 let mut failure = Ok(());
682 for operand in operands {
683 match self.const_operand(operand, span) {
684 Ok(val) => fields.push(val.llval),
685 Err(err) => if failure.is_ok() { failure = Err(err); }
686 }
687 }
688 failure?;
a7813a04 689
ff7c6d11
XL
690 self.const_array(dest_ty, &fields)
691 }
692
693 mir::Rvalue::Aggregate(ref kind, ref operands) => {
694 // Make sure to evaluate all operands to
695 // report as many errors as we possibly can.
696 let mut fields = Vec::with_capacity(operands.len());
697 let mut failure = Ok(());
698 for operand in operands {
699 match self.const_operand(operand, span) {
700 Ok(val) => fields.push(val),
701 Err(err) => if failure.is_ok() { failure = Err(err); }
5bcae85e
SL
702 }
703 }
ff7c6d11
XL
704 failure?;
705
2c00a5a8 706 trans_const_adt(self.cx, dest_ty, kind, &fields)
a7813a04
XL
707 }
708
709 mir::Rvalue::Cast(ref kind, ref source, cast_ty) => {
710 let operand = self.const_operand(source, span)?;
711 let cast_ty = self.monomorphize(&cast_ty);
712
713 let val = match *kind {
714 mir::CastKind::ReifyFnPointer => {
715 match operand.ty.sty {
041b39d2 716 ty::TyFnDef(def_id, substs) => {
2c00a5a8
XL
717 if tcx.has_attr(def_id, "rustc_args_required_const") {
718 bug!("reifying a fn ptr that requires \
719 const arguments");
720 }
721 callee::resolve_and_get_fn(self.cx, def_id, substs)
a7813a04
XL
722 }
723 _ => {
724 span_bug!(span, "{} cannot be reified to a fn ptr",
725 operand.ty)
726 }
727 }
728 }
8bb4bdeb
XL
729 mir::CastKind::ClosureFnPointer => {
730 match operand.ty.sty {
731 ty::TyClosure(def_id, substs) => {
732 // Get the def_id for FnOnce::call_once
ea8adc8c 733 let fn_once = tcx.lang_items().fn_once_trait().unwrap();
8bb4bdeb
XL
734 let call_once = tcx
735 .global_tcx().associated_items(fn_once)
736 .find(|it| it.kind == ty::AssociatedKind::Method)
737 .unwrap().def_id;
738 // Now create its substs [Closure, Tuple]
ff7c6d11 739 let input = substs.closure_sig(def_id, tcx).input(0);
cc61c64b
XL
740 let input = tcx.erase_late_bound_regions_and_normalize(&input);
741 let substs = tcx.mk_substs([operand.ty, input]
8bb4bdeb 742 .iter().cloned().map(Kind::from));
2c00a5a8 743 callee::resolve_and_get_fn(self.cx, call_once, substs)
8bb4bdeb
XL
744 }
745 _ => {
746 bug!("{} cannot be cast to a fn ptr", operand.ty)
747 }
748 }
749 }
a7813a04
XL
750 mir::CastKind::UnsafeFnPointer => {
751 // this is a no-op at the LLVM level
752 operand.llval
753 }
754 mir::CastKind::Unsize => {
2c00a5a8 755 let pointee_ty = operand.ty.builtin_deref(true)
a7813a04 756 .expect("consts: unsizing got non-pointer type").ty;
2c00a5a8 757 let (base, old_info) = if !self.cx.type_is_sized(pointee_ty) {
a7813a04
XL
758 // Normally, the source is a thin pointer and we are
759 // adding extra info to make a fat pointer. The exception
760 // is when we are upcasting an existing object fat pointer
761 // to use a different vtable. In that case, we want to
762 // load out the original data pointer so we can repackage
763 // it.
2c00a5a8 764 let (base, extra) = operand.get_fat_ptr(self.cx);
a7813a04
XL
765 (base, Some(extra))
766 } else {
767 (operand.llval, None)
768 };
769
2c00a5a8 770 let unsized_ty = cast_ty.builtin_deref(true)
a7813a04 771 .expect("consts: unsizing got non-pointer target type").ty;
2c00a5a8 772 let ptr_ty = self.cx.layout_of(unsized_ty).llvm_type(self.cx).ptr_to();
a7813a04 773 let base = consts::ptrcast(base, ptr_ty);
2c00a5a8 774 let info = base::unsized_info(self.cx, pointee_ty,
a7813a04
XL
775 unsized_ty, old_info);
776
777 if old_info.is_none() {
2c00a5a8 778 let prev_const = self.cx.const_unsized.borrow_mut()
a7813a04
XL
779 .insert(base, operand.llval);
780 assert!(prev_const.is_none() || prev_const == Some(operand.llval));
781 }
2c00a5a8 782 C_fat_ptr(self.cx, base, info)
a7813a04 783 }
2c00a5a8 784 mir::CastKind::Misc if self.cx.layout_of(operand.ty).is_llvm_immediate() => {
a7813a04
XL
785 let r_t_in = CastTy::from_ty(operand.ty).expect("bad input type for cast");
786 let r_t_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
2c00a5a8 787 let cast_layout = self.cx.layout_of(cast_ty);
ff7c6d11 788 assert!(cast_layout.is_llvm_immediate());
2c00a5a8 789 let ll_t_out = cast_layout.immediate_llvm_type(self.cx);
a7813a04 790 let llval = operand.llval;
ff7c6d11
XL
791
792 let mut signed = false;
2c00a5a8 793 let l = self.cx.layout_of(operand.ty);
ff7c6d11
XL
794 if let layout::Abi::Scalar(ref scalar) = l.abi {
795 if let layout::Int(_, true) = scalar.value {
796 signed = true;
797 }
798 }
a7813a04
XL
799
800 unsafe {
801 match (r_t_in, r_t_out) {
802 (CastTy::Int(_), CastTy::Int(_)) => {
803 let s = signed as llvm::Bool;
804 llvm::LLVMConstIntCast(llval, ll_t_out.to_ref(), s)
805 }
806 (CastTy::Int(_), CastTy::Float) => {
2c00a5a8 807 cast_const_int_to_float(self.cx, llval, signed, ll_t_out)
a7813a04
XL
808 }
809 (CastTy::Float, CastTy::Float) => {
810 llvm::LLVMConstFPCast(llval, ll_t_out.to_ref())
811 }
812 (CastTy::Float, CastTy::Int(IntTy::I)) => {
2c00a5a8 813 cast_const_float_to_int(self.cx, &operand,
abe05a73 814 true, ll_t_out, span)
a7813a04
XL
815 }
816 (CastTy::Float, CastTy::Int(_)) => {
2c00a5a8 817 cast_const_float_to_int(self.cx, &operand,
abe05a73 818 false, ll_t_out, span)
a7813a04
XL
819 }
820 (CastTy::Ptr(_), CastTy::Ptr(_)) |
821 (CastTy::FnPtr, CastTy::Ptr(_)) |
822 (CastTy::RPtr(_), CastTy::Ptr(_)) => {
823 consts::ptrcast(llval, ll_t_out)
824 }
825 (CastTy::Int(_), CastTy::Ptr(_)) => {
2c00a5a8
XL
826 let s = signed as llvm::Bool;
827 let usize_llval = llvm::LLVMConstIntCast(llval,
828 self.cx.isize_ty.to_ref(), s);
829 llvm::LLVMConstIntToPtr(usize_llval, ll_t_out.to_ref())
a7813a04
XL
830 }
831 (CastTy::Ptr(_), CastTy::Int(_)) |
832 (CastTy::FnPtr, CastTy::Int(_)) => {
833 llvm::LLVMConstPtrToInt(llval, ll_t_out.to_ref())
834 }
835 _ => bug!("unsupported cast: {:?} to {:?}", operand.ty, cast_ty)
836 }
837 }
838 }
839 mir::CastKind::Misc => { // Casts from a fat-ptr.
2c00a5a8
XL
840 let l = self.cx.layout_of(operand.ty);
841 let cast = self.cx.layout_of(cast_ty);
ff7c6d11 842 if l.is_llvm_scalar_pair() {
2c00a5a8 843 let (data_ptr, meta) = operand.get_fat_ptr(self.cx);
ff7c6d11
XL
844 if cast.is_llvm_scalar_pair() {
845 let data_cast = consts::ptrcast(data_ptr,
2c00a5a8
XL
846 cast.scalar_pair_element_llvm_type(self.cx, 0));
847 C_fat_ptr(self.cx, data_cast, meta)
a7813a04
XL
848 } else { // cast to thin-ptr
849 // Cast of fat-ptr to thin-ptr is an extraction of data-ptr and
850 // pointer-cast of that pointer to desired pointer type.
2c00a5a8 851 let llcast_ty = cast.immediate_llvm_type(self.cx);
ff7c6d11 852 consts::ptrcast(data_ptr, llcast_ty)
a7813a04
XL
853 }
854 } else {
3157f602 855 bug!("Unexpected non-fat-pointer operand")
a7813a04
XL
856 }
857 }
858 };
859 Const::new(val, cast_ty)
860 }
861
ff7c6d11
XL
862 mir::Rvalue::Ref(_, bk, ref place) => {
863 let tr_place = self.const_place(place, span)?;
a7813a04 864
ff7c6d11 865 let ty = tr_place.ty;
cc61c64b 866 let ref_ty = tcx.mk_ref(tcx.types.re_erased,
a7813a04
XL
867 ty::TypeAndMut { ty: ty, mutbl: bk.to_mutbl_lossy() });
868
ff7c6d11 869 let base = match tr_place.base {
a7813a04 870 Base::Value(llval) => {
9e0c209e 871 // FIXME: may be wrong for &*(&simd_vec as &fmt::Debug)
2c00a5a8
XL
872 let align = if self.cx.type_is_sized(ty) {
873 self.cx.align_of(ty)
9e0c209e 874 } else {
2c00a5a8 875 self.cx.tcx.data_layout.pointer_align
9e0c209e 876 };
2c00a5a8
XL
877 if let mir::BorrowKind::Mut { .. } = bk {
878 consts::addr_of_mut(self.cx, llval, align, "ref_mut")
a7813a04 879 } else {
2c00a5a8 880 consts::addr_of(self.cx, llval, align, "ref")
a7813a04
XL
881 }
882 }
883 Base::Str(llval) |
884 Base::Static(llval) => llval
885 };
886
2c00a5a8 887 let ptr = if self.cx.type_is_sized(ty) {
a7813a04
XL
888 base
889 } else {
2c00a5a8 890 C_fat_ptr(self.cx, base, tr_place.llextra)
a7813a04
XL
891 };
892 Const::new(ptr, ref_ty)
893 }
894
ff7c6d11
XL
895 mir::Rvalue::Len(ref place) => {
896 let tr_place = self.const_place(place, span)?;
2c00a5a8 897 Const::new(tr_place.len(self.cx), tcx.types.usize)
a7813a04
XL
898 }
899
900 mir::Rvalue::BinaryOp(op, ref lhs, ref rhs) => {
901 let lhs = self.const_operand(lhs, span)?;
902 let rhs = self.const_operand(rhs, span)?;
903 let ty = lhs.ty;
5bcae85e 904 let binop_ty = op.ty(tcx, lhs.ty, rhs.ty);
a7813a04 905 let (lhs, rhs) = (lhs.llval, rhs.llval);
3157f602
XL
906 Const::new(const_scalar_binop(op, lhs, rhs, ty), binop_ty)
907 }
a7813a04 908
3157f602
XL
909 mir::Rvalue::CheckedBinaryOp(op, ref lhs, ref rhs) => {
910 let lhs = self.const_operand(lhs, span)?;
911 let rhs = self.const_operand(rhs, span)?;
912 let ty = lhs.ty;
5bcae85e 913 let val_ty = op.ty(tcx, lhs.ty, rhs.ty);
8bb4bdeb 914 let binop_ty = tcx.intern_tup(&[val_ty, tcx.types.bool], false);
3157f602
XL
915 let (lhs, rhs) = (lhs.llval, rhs.llval);
916 assert!(!ty.is_fp());
a7813a04 917
3157f602
XL
918 match const_scalar_checked_binop(tcx, op, lhs, rhs, ty) {
919 Some((llval, of)) => {
2c00a5a8 920 trans_const_adt(self.cx, binop_ty, &mir::AggregateKind::Tuple, &[
ff7c6d11 921 Const::new(llval, val_ty),
2c00a5a8 922 Const::new(C_bool(self.cx, of), tcx.types.bool)
ff7c6d11 923 ])
a7813a04 924 }
3157f602
XL
925 None => {
926 span_bug!(span, "{:?} got non-integer operands: {:?} and {:?}",
927 rvalue, Value(lhs), Value(rhs));
928 }
929 }
a7813a04
XL
930 }
931
932 mir::Rvalue::UnaryOp(op, ref operand) => {
933 let operand = self.const_operand(operand, span)?;
934 let lloperand = operand.llval;
935 let llval = match op {
936 mir::UnOp::Not => {
937 unsafe {
938 llvm::LLVMConstNot(lloperand)
939 }
940 }
941 mir::UnOp::Neg => {
a7813a04
XL
942 let is_float = operand.ty.is_fp();
943 unsafe {
944 if is_float {
945 llvm::LLVMConstFNeg(lloperand)
946 } else {
947 llvm::LLVMConstNeg(lloperand)
948 }
949 }
950 }
951 };
952 Const::new(llval, operand.ty)
953 }
954
7cac9316 955 mir::Rvalue::NullaryOp(mir::NullOp::SizeOf, ty) => {
2c00a5a8
XL
956 assert!(self.cx.type_is_sized(ty));
957 let llval = C_usize(self.cx, self.cx.size_of(ty).bytes());
7cac9316
XL
958 Const::new(llval, tcx.types.usize)
959 }
960
a7813a04
XL
961 _ => span_bug!(span, "{:?} in constant", rvalue)
962 };
963
5bcae85e
SL
964 debug!("const_rvalue({:?}: {:?} @ {:?}) = {:?}", rvalue, dest_ty, span, val);
965
a7813a04
XL
966 Ok(val)
967 }
3157f602
XL
968
969}
970
9e0c209e
SL
971fn to_const_int(value: ValueRef, t: Ty, tcx: TyCtxt) -> Option<ConstInt> {
972 match t.sty {
32a655c1
SL
973 ty::TyInt(int_type) => const_to_opt_u128(value, true)
974 .and_then(|input| ConstInt::new_signed(input as i128, int_type,
ea8adc8c 975 tcx.sess.target.isize_ty)),
32a655c1
SL
976 ty::TyUint(uint_type) => const_to_opt_u128(value, false)
977 .and_then(|input| ConstInt::new_unsigned(input, uint_type,
ea8adc8c 978 tcx.sess.target.usize_ty)),
32a655c1
SL
979 _ => None
980
9e0c209e
SL
981 }
982}
983
3157f602
XL
984pub fn const_scalar_binop(op: mir::BinOp,
985 lhs: ValueRef,
986 rhs: ValueRef,
987 input_ty: Ty) -> ValueRef {
988 assert!(!input_ty.is_simd());
989 let is_float = input_ty.is_fp();
990 let signed = input_ty.is_signed();
991
992 unsafe {
993 match op {
994 mir::BinOp::Add if is_float => llvm::LLVMConstFAdd(lhs, rhs),
995 mir::BinOp::Add => llvm::LLVMConstAdd(lhs, rhs),
996
997 mir::BinOp::Sub if is_float => llvm::LLVMConstFSub(lhs, rhs),
998 mir::BinOp::Sub => llvm::LLVMConstSub(lhs, rhs),
999
1000 mir::BinOp::Mul if is_float => llvm::LLVMConstFMul(lhs, rhs),
1001 mir::BinOp::Mul => llvm::LLVMConstMul(lhs, rhs),
1002
1003 mir::BinOp::Div if is_float => llvm::LLVMConstFDiv(lhs, rhs),
1004 mir::BinOp::Div if signed => llvm::LLVMConstSDiv(lhs, rhs),
1005 mir::BinOp::Div => llvm::LLVMConstUDiv(lhs, rhs),
1006
1007 mir::BinOp::Rem if is_float => llvm::LLVMConstFRem(lhs, rhs),
1008 mir::BinOp::Rem if signed => llvm::LLVMConstSRem(lhs, rhs),
1009 mir::BinOp::Rem => llvm::LLVMConstURem(lhs, rhs),
1010
1011 mir::BinOp::BitXor => llvm::LLVMConstXor(lhs, rhs),
1012 mir::BinOp::BitAnd => llvm::LLVMConstAnd(lhs, rhs),
1013 mir::BinOp::BitOr => llvm::LLVMConstOr(lhs, rhs),
1014 mir::BinOp::Shl => {
1015 let rhs = base::cast_shift_const_rhs(op.to_hir_binop(), lhs, rhs);
1016 llvm::LLVMConstShl(lhs, rhs)
1017 }
1018 mir::BinOp::Shr => {
1019 let rhs = base::cast_shift_const_rhs(op.to_hir_binop(), lhs, rhs);
1020 if signed { llvm::LLVMConstAShr(lhs, rhs) }
1021 else { llvm::LLVMConstLShr(lhs, rhs) }
1022 }
1023 mir::BinOp::Eq | mir::BinOp::Ne |
1024 mir::BinOp::Lt | mir::BinOp::Le |
1025 mir::BinOp::Gt | mir::BinOp::Ge => {
1026 if is_float {
1027 let cmp = base::bin_op_to_fcmp_predicate(op.to_hir_binop());
5bcae85e 1028 llvm::LLVMConstFCmp(cmp, lhs, rhs)
3157f602
XL
1029 } else {
1030 let cmp = base::bin_op_to_icmp_predicate(op.to_hir_binop(),
1031 signed);
5bcae85e 1032 llvm::LLVMConstICmp(cmp, lhs, rhs)
3157f602
XL
1033 }
1034 }
7cac9316 1035 mir::BinOp::Offset => unreachable!("BinOp::Offset in const-eval!")
3157f602
XL
1036 }
1037 }
1038}
1039
1040pub fn const_scalar_checked_binop<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1041 op: mir::BinOp,
1042 lllhs: ValueRef,
1043 llrhs: ValueRef,
1044 input_ty: Ty<'tcx>)
1045 -> Option<(ValueRef, bool)> {
1046 if let (Some(lhs), Some(rhs)) = (to_const_int(lllhs, input_ty, tcx),
1047 to_const_int(llrhs, input_ty, tcx)) {
1048 let result = match op {
1049 mir::BinOp::Add => lhs + rhs,
1050 mir::BinOp::Sub => lhs - rhs,
1051 mir::BinOp::Mul => lhs * rhs,
1052 mir::BinOp::Shl => lhs << rhs,
1053 mir::BinOp::Shr => lhs >> rhs,
1054 _ => {
1055 bug!("Operator `{:?}` is not a checkable operator", op)
1056 }
1057 };
1058
1059 let of = match result {
1060 Ok(_) => false,
1061 Err(ConstMathErr::Overflow(_)) |
1062 Err(ConstMathErr::ShiftNegative) => true,
1063 Err(err) => {
1064 bug!("Operator `{:?}` on `{:?}` and `{:?}` errored: {}",
1065 op, lhs, rhs, err.description());
1066 }
1067 };
1068
1069 Some((const_scalar_binop(op, lllhs, llrhs, input_ty), of))
1070 } else {
1071 None
1072 }
a7813a04
XL
1073}
1074
2c00a5a8 1075unsafe fn cast_const_float_to_int(cx: &CodegenCx,
abe05a73
XL
1076 operand: &Const,
1077 signed: bool,
1078 int_ty: Type,
1079 span: Span) -> ValueRef {
1080 let llval = operand.llval;
1081 let float_bits = match operand.ty.sty {
1082 ty::TyFloat(fty) => fty.bit_width(),
1083 _ => bug!("cast_const_float_to_int: operand not a float"),
1084 };
1085 // Note: this breaks if llval is a complex constant expression rather than a simple constant.
1086 // One way that might happen would be if addresses could be turned into integers in constant
1087 // expressions, but that doesn't appear to be possible?
1088 // In any case, an ICE is better than producing undef.
2c00a5a8 1089 let llval_bits = consts::bitcast(llval, Type::ix(cx, float_bits as u64));
abe05a73
XL
1090 let bits = const_to_opt_u128(llval_bits, false).unwrap_or_else(|| {
1091 panic!("could not get bits of constant float {:?}",
1092 Value(llval));
1093 });
1094 let int_width = int_ty.int_width() as usize;
1095 // Try to convert, but report an error for overflow and NaN. This matches HIR const eval.
1096 let cast_result = match float_bits {
1097 32 if signed => ieee::Single::from_bits(bits).to_i128(int_width).map(|v| v as u128),
1098 64 if signed => ieee::Double::from_bits(bits).to_i128(int_width).map(|v| v as u128),
1099 32 => ieee::Single::from_bits(bits).to_u128(int_width),
1100 64 => ieee::Double::from_bits(bits).to_u128(int_width),
1101 n => bug!("unsupported float width {}", n),
1102 };
1103 if cast_result.status.contains(Status::INVALID_OP) {
1104 let err = ConstEvalErr { span: span, kind: ErrKind::CannotCast };
2c00a5a8 1105 err.report(cx.tcx, span, "expression");
abe05a73 1106 }
ff7c6d11 1107 C_uint_big(int_ty, cast_result.value)
abe05a73
XL
1108}
1109
2c00a5a8 1110unsafe fn cast_const_int_to_float(cx: &CodegenCx,
abe05a73
XL
1111 llval: ValueRef,
1112 signed: bool,
1113 float_ty: Type) -> ValueRef {
1114 // Note: this breaks if llval is a complex constant expression rather than a simple constant.
1115 // One way that might happen would be if addresses could be turned into integers in constant
1116 // expressions, but that doesn't appear to be possible?
1117 // In any case, an ICE is better than producing undef.
1118 let value = const_to_opt_u128(llval, signed).unwrap_or_else(|| {
1119 panic!("could not get z128 value of constant integer {:?}",
1120 Value(llval));
1121 });
1122 if signed {
1123 llvm::LLVMConstSIToFP(llval, float_ty.to_ref())
1124 } else if float_ty.float_width() == 32 && value >= MAX_F32_PLUS_HALF_ULP {
1125 // We're casting to f32 and the value is > f32::MAX + 0.5 ULP -> round up to infinity.
2c00a5a8 1126 let infinity_bits = C_u32(cx, ieee::Single::INFINITY.to_bits() as u32);
abe05a73
XL
1127 consts::bitcast(infinity_bits, float_ty)
1128 } else {
1129 llvm::LLVMConstUIToFP(llval, float_ty.to_ref())
1130 }
1131}
1132
2c00a5a8 1133impl<'a, 'tcx> FunctionCx<'a, 'tcx> {
54a0048b 1134 pub fn trans_constant(&mut self,
2c00a5a8 1135 bx: &Builder<'a, 'tcx>,
54a0048b 1136 constant: &mir::Constant<'tcx>)
a7813a04 1137 -> Const<'tcx>
54a0048b 1138 {
5bcae85e 1139 debug!("trans_constant({:?})", constant);
32a655c1 1140 let ty = self.monomorphize(&constant.ty);
a7813a04 1141 let result = match constant.literal.clone() {
a7813a04
XL
1142 mir::Literal::Promoted { index } => {
1143 let mir = &self.mir.promoted[index];
2c00a5a8 1144 MirConstContext::new(bx.cx, mir, self.param_substs, IndexVec::new()).trans()
a7813a04
XL
1145 }
1146 mir::Literal::Value { value } => {
ea8adc8c
XL
1147 if let ConstVal::Unevaluated(def_id, substs) = value.val {
1148 let substs = self.monomorphize(&substs);
2c00a5a8 1149 MirConstContext::trans_def(bx.cx, def_id, substs, IndexVec::new())
ea8adc8c 1150 } else {
2c00a5a8 1151 Ok(Const::from_constval(bx.cx, &value.val, ty))
ea8adc8c 1152 }
54a0048b 1153 }
a7813a04
XL
1154 };
1155
9e0c209e
SL
1156 let result = result.unwrap_or_else(|_| {
1157 // We've errored, so we don't have to produce working code.
2c00a5a8 1158 let llty = bx.cx.layout_of(ty).llvm_type(bx.cx);
9e0c209e
SL
1159 Const::new(C_undef(llty), ty)
1160 });
5bcae85e
SL
1161
1162 debug!("trans_constant({:?}) = {:?}", constant, result);
1163 result
54a0048b
SL
1164 }
1165}
a7813a04
XL
1166
1167
8bb4bdeb 1168pub fn trans_static_initializer<'a, 'tcx>(
2c00a5a8 1169 cx: &CodegenCx<'a, 'tcx>,
8bb4bdeb
XL
1170 def_id: DefId)
1171 -> Result<ValueRef, ConstEvalErr<'tcx>>
1172{
2c00a5a8 1173 MirConstContext::trans_def(cx, def_id, Substs::empty(), IndexVec::new())
cc61c64b 1174 .map(|c| c.llval)
a7813a04 1175}
32a655c1
SL
1176
1177/// Construct a constant value, suitable for initializing a
1178/// GlobalVariable, given a case and constant values for its fields.
1179/// Note that this may have a different LLVM type (and different
1180/// alignment!) from the representation's `type_of`, so it needs a
1181/// pointer cast before use.
1182///
1183/// The LLVM type system does not directly support unions, and only
1184/// pointers can be bitcast, so a constant (and, by extension, the
1185/// GlobalVariable initialized by it) will have a type that can vary
1186/// depending on which case of an enum it is.
1187///
1188/// To understand the alignment situation, consider `enum E { V64(u64),
1189/// V32(u32, u32) }` on Windows. The type has 8-byte alignment to
1190/// accommodate the u64, but `V32(x, y)` would have LLVM type `{i32,
1191/// i32, i32}`, which is 4-byte aligned.
1192///
1193/// Currently the returned value has the same size as the type, but
1194/// this could be changed in the future to avoid allocating unnecessary
1195/// space after values of shorter-than-maximum cases.
ff7c6d11 1196fn trans_const_adt<'a, 'tcx>(
2c00a5a8 1197 cx: &CodegenCx<'a, 'tcx>,
32a655c1
SL
1198 t: Ty<'tcx>,
1199 kind: &mir::AggregateKind,
ff7c6d11
XL
1200 vals: &[Const<'tcx>]
1201) -> Const<'tcx> {
2c00a5a8 1202 let l = cx.layout_of(t);
32a655c1
SL
1203 let variant_index = match *kind {
1204 mir::AggregateKind::Adt(_, index, _, _) => index,
1205 _ => 0,
1206 };
ff7c6d11
XL
1207
1208 if let layout::Abi::Uninhabited = l.abi {
2c00a5a8 1209 return Const::new(C_undef(l.llvm_type(cx)), t);
ff7c6d11
XL
1210 }
1211
1212 match l.variants {
1213 layout::Variants::Single { index } => {
1214 assert_eq!(variant_index, index);
1215 if let layout::FieldPlacement::Union(_) = l.fields {
1216 assert_eq!(variant_index, 0);
1217 assert_eq!(vals.len(), 1);
2c00a5a8 1218 let (field_size, field_align) = cx.size_and_align_of(vals[0].ty);
ff7c6d11
XL
1219 let contents = [
1220 vals[0].llval,
2c00a5a8 1221 padding(cx, l.size - field_size)
ff7c6d11
XL
1222 ];
1223
1224 let packed = l.align.abi() < field_align.abi();
2c00a5a8 1225 Const::new(C_struct(cx, &contents, packed), t)
ff7c6d11
XL
1226 } else {
1227 if let layout::Abi::Vector { .. } = l.abi {
1228 if let layout::FieldPlacement::Array { .. } = l.fields {
1229 return Const::new(C_vector(&vals.iter().map(|x| x.llval)
1230 .collect::<Vec<_>>()), t);
1231 }
1232 }
2c00a5a8 1233 build_const_struct(cx, l, vals, None)
ff7c6d11
XL
1234 }
1235 }
1236 layout::Variants::Tagged { .. } => {
32a655c1
SL
1237 let discr = match *kind {
1238 mir::AggregateKind::Adt(adt_def, _, _, _) => {
2c00a5a8 1239 adt_def.discriminant_for_variant(cx.tcx, variant_index)
cc61c64b 1240 .to_u128_unchecked() as u64
32a655c1 1241 },
cc61c64b 1242 _ => 0,
32a655c1 1243 };
2c00a5a8
XL
1244 let discr_field = l.field(cx, 0);
1245 let discr = C_int(discr_field.llvm_type(cx), discr as i64);
ff7c6d11
XL
1246 if let layout::Abi::Scalar(_) = l.abi {
1247 Const::new(discr, t)
32a655c1 1248 } else {
ff7c6d11 1249 let discr = Const::new(discr, discr_field.ty);
2c00a5a8 1250 build_const_struct(cx, l.for_variant(cx, variant_index), vals, Some(discr))
32a655c1
SL
1251 }
1252 }
ff7c6d11
XL
1253 layout::Variants::NicheFilling {
1254 dataful_variant,
1255 ref niche_variants,
1256 niche_start,
1257 ..
1258 } => {
1259 if variant_index == dataful_variant {
2c00a5a8 1260 build_const_struct(cx, l.for_variant(cx, dataful_variant), vals, None)
32a655c1 1261 } else {
2c00a5a8
XL
1262 let niche = l.field(cx, 0);
1263 let niche_llty = niche.llvm_type(cx);
ff7c6d11
XL
1264 let niche_value = ((variant_index - niche_variants.start) as u128)
1265 .wrapping_add(niche_start);
1266 // FIXME(eddyb) Check the actual primitive type here.
1267 let niche_llval = if niche_value == 0 {
1268 // HACK(eddyb) Using `C_null` as it works on all types.
1269 C_null(niche_llty)
1270 } else {
1271 C_uint_big(niche_llty, niche_value)
1272 };
2c00a5a8 1273 build_const_struct(cx, l, &[Const::new(niche_llval, niche.ty)], None)
32a655c1
SL
1274 }
1275 }
32a655c1
SL
1276 }
1277}
1278
1279/// Building structs is a little complicated, because we might need to
1280/// insert padding if a field's value is less aligned than its type.
1281///
ff7c6d11 1282/// Continuing the example from `trans_const_adt`, a value of type `(u32,
32a655c1
SL
1283/// E)` should have the `E` at offset 8, but if that field's
1284/// initializer is 4-byte aligned then simply translating the tuple as
1285/// a two-element struct will locate it at offset 4, and accesses to it
1286/// will read the wrong memory.
2c00a5a8 1287fn build_const_struct<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
ff7c6d11
XL
1288 layout: layout::TyLayout<'tcx>,
1289 vals: &[Const<'tcx>],
1290 discr: Option<Const<'tcx>>)
1291 -> Const<'tcx> {
1292 assert_eq!(vals.len(), layout.fields.count());
1293
1294 match layout.abi {
1295 layout::Abi::Scalar(_) |
1296 layout::Abi::ScalarPair(..) |
1297 layout::Abi::Vector { .. } if discr.is_none() => {
1298 let mut non_zst_fields = vals.iter().enumerate().map(|(i, f)| {
1299 (f, layout.fields.offset(i))
2c00a5a8 1300 }).filter(|&(f, _)| !cx.layout_of(f.ty).is_zst());
ff7c6d11
XL
1301 match (non_zst_fields.next(), non_zst_fields.next()) {
1302 (Some((x, offset)), None) if offset.bytes() == 0 => {
1303 return Const::new(x.llval, layout.ty);
1304 }
1305 (Some((a, a_offset)), Some((b, _))) if a_offset.bytes() == 0 => {
2c00a5a8 1306 return Const::new(C_struct(cx, &[a.llval, b.llval], false), layout.ty);
ff7c6d11
XL
1307 }
1308 (Some((a, _)), Some((b, b_offset))) if b_offset.bytes() == 0 => {
2c00a5a8 1309 return Const::new(C_struct(cx, &[b.llval, a.llval], false), layout.ty);
ff7c6d11
XL
1310 }
1311 _ => {}
1312 }
1313 }
1314 _ => {}
32a655c1
SL
1315 }
1316
1317 // offset of current value
ff7c6d11
XL
1318 let mut packed = false;
1319 let mut offset = Size::from_bytes(0);
32a655c1 1320 let mut cfields = Vec::new();
ff7c6d11 1321 cfields.reserve(discr.is_some() as usize + 1 + layout.fields.count() * 2);
32a655c1 1322
ff7c6d11 1323 if let Some(discr) = discr {
2c00a5a8 1324 let (field_size, field_align) = cx.size_and_align_of(discr.ty);
ff7c6d11
XL
1325 packed |= layout.align.abi() < field_align.abi();
1326 cfields.push(discr.llval);
1327 offset = field_size;
32a655c1
SL
1328 }
1329
ff7c6d11
XL
1330 let parts = layout.fields.index_by_increasing_offset().map(|i| {
1331 (vals[i], layout.fields.offset(i))
1332 });
1333 for (val, target_offset) in parts {
2c00a5a8 1334 let (field_size, field_align) = cx.size_and_align_of(val.ty);
ff7c6d11 1335 packed |= layout.align.abi() < field_align.abi();
2c00a5a8 1336 cfields.push(padding(cx, target_offset - offset));
ff7c6d11
XL
1337 cfields.push(val.llval);
1338 offset = target_offset + field_size;
32a655c1
SL
1339 }
1340
ff7c6d11 1341 // Pad to the size of the whole type, not e.g. the variant.
2c00a5a8 1342 cfields.push(padding(cx, cx.size_of(layout.ty) - offset));
32a655c1 1343
2c00a5a8 1344 Const::new(C_struct(cx, &cfields, packed), layout.ty)
32a655c1
SL
1345}
1346
2c00a5a8
XL
1347fn padding(cx: &CodegenCx, size: Size) -> ValueRef {
1348 C_undef(Type::array(&Type::i8(cx), size.bytes()))
32a655c1 1349}