1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! # Translation of Expressions
13 //! The expr module handles translation of expressions. The most general
14 //! translation routine is `trans()`, which will translate an expression
15 //! into a datum. `trans_into()` is also available, which will translate
16 //! an expression and write the result directly into memory, sometimes
17 //! avoiding the need for a temporary stack slot. Finally,
18 //! `trans_to_lvalue()` is available if you'd like to ensure that the
19 //! result has cleanup scheduled.
21 //! Internally, each of these functions dispatches to various other
22 //! expression functions depending on the kind of expression. We divide
23 //! up expressions into:
25 //! - **Datum expressions:** Those that most naturally yield values.
26 //! Examples would be `22`, `box x`, or `a + b` (when not overloaded).
27 //! - **DPS expressions:** Those that most naturally write into a location
28 //! in memory. Examples would be `foo()` or `Point { x: 3, y: 4 }`.
29 //! - **Statement expressions:** That that do not generate a meaningful
30 //! result. Examples would be `while { ... }` or `return 44`.
32 //! Public entry points:
34 //! - `trans_into(bcx, expr, dest) -> bcx`: evaluates an expression,
35 //! storing the result into `dest`. This is the preferred form, if you
38 //! - `trans(bcx, expr) -> DatumBlock`: evaluates an expression, yielding
39 //! `Datum` with the result. You can then store the datum, inspect
40 //! the value, etc. This may introduce temporaries if the datum is a
43 //! - `trans_to_lvalue(bcx, expr, "...") -> DatumBlock`: evaluates an
44 //! expression and ensures that the result has a cleanup associated with it,
45 //! creating a temporary stack slot if necessary.
47 //! - `trans_local_var -> Datum`: looks up a local variable or upvar.
49 #![allow(non_camel_case_types)]
51 pub use self::cast_kind
::*;
52 pub use self::Dest
::*;
53 use self::lazy_binop_ty
::*;
56 use llvm
::{self, ValueRef}
;
57 use middle
::check_const
;
59 use middle
::mem_categorization
::Typer
;
60 use middle
::subst
::{self, Substs}
;
61 use trans
::{_match, adt, asm, base, callee, closure, consts, controlflow}
;
64 use trans
::cleanup
::{self, CleanupMethods}
;
67 use trans
::debuginfo
::{self, DebugLoc, ToDebugLoc}
;
71 use trans
::monomorphize
;
74 use middle
::ty
::{struct_fields, tup_fields}
;
75 use middle
::ty
::{AdjustDerefRef, AdjustReifyFnPointer, AdjustUnsafeFnPointer}
;
76 use middle
::ty
::{self, Ty}
;
77 use middle
::ty
::MethodCall
;
78 use util
::common
::indenter
;
79 use util
::ppaux
::Repr
;
80 use trans
::machine
::{llsize_of, llsize_of_alloc}
;
81 use trans
::type_
::Type
;
83 use syntax
::{ast, ast_util, codemap}
;
84 use syntax
::parse
::token
::InternedString
;
86 use syntax
::parse
::token
;
87 use std
::iter
::repeat
;
93 // These are passed around by the code generating functions to track the
94 // destination of a computation's value.
96 #[derive(Copy, Clone, PartialEq)]
103 pub fn to_string(&self, ccx
: &CrateContext
) -> String
{
105 SaveIn(v
) => format
!("SaveIn({})", ccx
.tn().val_to_string(v
)),
106 Ignore
=> "Ignore".to_string()
111 /// This function is equivalent to `trans(bcx, expr).store_to_dest(dest)` but it may generate
112 /// better optimized LLVM code.
113 pub fn trans_into
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
116 -> Block
<'blk
, 'tcx
> {
119 debuginfo
::set_source_location(bcx
.fcx
, expr
.id
, expr
.span
);
121 if bcx
.tcx().adjustments
.borrow().contains_key(&expr
.id
) {
122 // use trans, which may be less efficient but
123 // which will perform the adjustments:
124 let datum
= unpack_datum
!(bcx
, trans(bcx
, expr
));
125 return datum
.store_to_dest(bcx
, dest
, expr
.id
);
128 let qualif
= *bcx
.tcx().const_qualif_map
.borrow().get(&expr
.id
).unwrap();
129 if !qualif
.intersects(check_const
::NOT_CONST
| check_const
::NEEDS_DROP
) {
130 if !qualif
.intersects(check_const
::PREFER_IN_PLACE
) {
131 if let SaveIn(lldest
) = dest
{
132 let global
= consts
::get_const_expr_as_global(bcx
.ccx(), expr
, qualif
,
133 bcx
.fcx
.param_substs
);
134 // Cast pointer to destination, because constants
135 // have different types.
136 let lldest
= PointerCast(bcx
, lldest
, val_ty(global
));
137 memcpy_ty(bcx
, lldest
, global
, expr_ty_adjusted(bcx
, expr
));
139 // Don't do anything in the Ignore case, consts don't need drop.
142 // The only way we're going to see a `const` at this point is if
143 // it prefers in-place instantiation, likely because it contains
144 // `[x; N]` somewhere within.
146 ast
::ExprPath(..) => {
147 match bcx
.def(expr
.id
) {
148 def
::DefConst(did
) => {
149 let const_expr
= consts
::get_const_expr(bcx
.ccx(), did
, expr
);
150 // Temporarily get cleanup scopes out of the way,
151 // as they require sub-expressions to be contained
152 // inside the current AST scope.
153 // These should record no cleanups anyways, `const`
154 // can't have destructors.
155 let scopes
= mem
::replace(&mut *bcx
.fcx
.scopes
.borrow_mut(),
157 // Lock emitted debug locations to the location of
158 // the constant reference expression.
159 debuginfo
::with_source_location_override(bcx
.fcx
,
162 bcx
= trans_into(bcx
, const_expr
, dest
)
164 let scopes
= mem
::replace(&mut *bcx
.fcx
.scopes
.borrow_mut(),
166 assert
!(scopes
.is_empty());
177 debug
!("trans_into() expr={}", expr
.repr(bcx
.tcx()));
179 let cleanup_debug_loc
= debuginfo
::get_cleanup_debug_loc_for_ast_node(bcx
.ccx(),
183 bcx
.fcx
.push_ast_cleanup_scope(cleanup_debug_loc
);
185 let kind
= ty
::expr_kind(bcx
.tcx(), expr
);
187 ty
::LvalueExpr
| ty
::RvalueDatumExpr
=> {
188 trans_unadjusted(bcx
, expr
).store_to_dest(dest
, expr
.id
)
190 ty
::RvalueDpsExpr
=> {
191 trans_rvalue_dps_unadjusted(bcx
, expr
, dest
)
193 ty
::RvalueStmtExpr
=> {
194 trans_rvalue_stmt_unadjusted(bcx
, expr
)
198 bcx
.fcx
.pop_and_trans_ast_cleanup_scope(bcx
, expr
.id
)
201 /// Translates an expression, returning a datum (and new block) encapsulating the result. When
202 /// possible, it is preferred to use `trans_into`, as that may avoid creating a temporary on the
204 pub fn trans
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
206 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
207 debug
!("trans(expr={})", bcx
.expr_to_string(expr
));
211 let qualif
= *bcx
.tcx().const_qualif_map
.borrow().get(&expr
.id
).unwrap();
212 let adjusted_global
= !qualif
.intersects(check_const
::NON_STATIC_BORROWS
);
213 let global
= if !qualif
.intersects(check_const
::NOT_CONST
| check_const
::NEEDS_DROP
) {
214 let global
= consts
::get_const_expr_as_global(bcx
.ccx(), expr
, qualif
,
215 bcx
.fcx
.param_substs
);
217 if qualif
.intersects(check_const
::HAS_STATIC_BORROWS
) {
218 // Is borrowed as 'static, must return lvalue.
220 // Cast pointer to global, because constants have different types.
221 let const_ty
= expr_ty_adjusted(bcx
, expr
);
222 let llty
= type_of
::type_of(bcx
.ccx(), const_ty
);
223 let global
= PointerCast(bcx
, global
, llty
.ptr_to());
224 let datum
= Datum
::new(global
, const_ty
, Lvalue
);
225 return DatumBlock
::new(bcx
, datum
.to_expr_datum());
228 // Otherwise, keep around and perform adjustments, if needed.
229 let const_ty
= if adjusted_global
{
230 expr_ty_adjusted(bcx
, expr
)
235 // This could use a better heuristic.
236 Some(if type_is_immediate(bcx
.ccx(), const_ty
) {
237 // Cast pointer to global, because constants have different types.
238 let llty
= type_of
::type_of(bcx
.ccx(), const_ty
);
239 let global
= PointerCast(bcx
, global
, llty
.ptr_to());
240 // Maybe just get the value directly, instead of loading it?
241 immediate_rvalue(load_ty(bcx
, global
, const_ty
), const_ty
)
243 let llty
= type_of
::type_of(bcx
.ccx(), const_ty
);
244 // HACK(eddyb) get around issues with lifetime intrinsics.
245 let scratch
= alloca_no_lifetime(bcx
, llty
, "const");
246 let lldest
= if !ty
::type_is_structural(const_ty
) {
247 // Cast pointer to slot, because constants have different types.
248 PointerCast(bcx
, scratch
, val_ty(global
))
250 // In this case, memcpy_ty calls llvm.memcpy after casting both
251 // source and destination to i8*, so we don't need any casts.
254 memcpy_ty(bcx
, lldest
, global
, const_ty
);
255 Datum
::new(scratch
, const_ty
, Rvalue
::new(ByRef
))
261 let cleanup_debug_loc
= debuginfo
::get_cleanup_debug_loc_for_ast_node(bcx
.ccx(),
265 fcx
.push_ast_cleanup_scope(cleanup_debug_loc
);
266 let datum
= match global
{
267 Some(rvalue
) => rvalue
.to_expr_datum(),
268 None
=> unpack_datum
!(bcx
, trans_unadjusted(bcx
, expr
))
270 let datum
= if adjusted_global
{
271 datum
// trans::consts already performed adjustments.
273 unpack_datum
!(bcx
, apply_adjustments(bcx
, expr
, datum
))
275 bcx
= fcx
.pop_and_trans_ast_cleanup_scope(bcx
, expr
.id
);
276 return DatumBlock
::new(bcx
, datum
);
279 pub fn get_len(bcx
: Block
, fat_ptr
: ValueRef
) -> ValueRef
{
280 GEPi(bcx
, fat_ptr
, &[0, abi
::FAT_PTR_EXTRA
])
283 pub fn get_dataptr(bcx
: Block
, fat_ptr
: ValueRef
) -> ValueRef
{
284 GEPi(bcx
, fat_ptr
, &[0, abi
::FAT_PTR_ADDR
])
287 pub fn copy_fat_ptr(bcx
: Block
, src_ptr
: ValueRef
, dst_ptr
: ValueRef
) {
288 Store(bcx
, Load(bcx
, get_dataptr(bcx
, src_ptr
)), get_dataptr(bcx
, dst_ptr
));
289 Store(bcx
, Load(bcx
, get_len(bcx
, src_ptr
)), get_len(bcx
, dst_ptr
));
292 /// Retrieve the information we are losing (making dynamic) in an unsizing
295 /// The `old_info` argument is a bit funny. It is intended for use
296 /// in an upcast, where the new vtable for an object will be drived
297 /// from the old one.
298 pub fn unsized_info
<'ccx
, 'tcx
>(ccx
: &CrateContext
<'ccx
, 'tcx
>,
301 old_info
: Option
<ValueRef
>,
302 param_substs
: &'tcx subst
::Substs
<'tcx
>)
304 let (source
, target
) = ty
::struct_lockstep_tails(ccx
.tcx(), source
, target
);
305 match (&source
.sty
, &target
.sty
) {
306 (&ty
::ty_vec(_
, Some(len
)), &ty
::ty_vec(_
, None
)) => C_uint(ccx
, len
),
307 (&ty
::ty_trait(_
), &ty
::ty_trait(_
)) => {
308 // For now, upcasts are limited to changes in marker
309 // traits, and hence never actually require an actual
310 // change to the vtable.
311 old_info
.expect("unsized_info: missing old info for trait upcast")
313 (_
, &ty
::ty_trait(box ty
::TyTrait { ref principal, .. }
)) => {
314 // Note that we preserve binding levels here:
315 let substs
= principal
.0.substs
.with_self_ty(source
).erase_regions();
316 let substs
= ccx
.tcx().mk_substs(substs
);
317 let trait_ref
= ty
::Binder(Rc
::new(ty
::TraitRef
{ def_id
: principal
.def_id(),
319 consts
::ptrcast(meth
::get_vtable(ccx
, trait_ref
, param_substs
),
320 Type
::vtable_ptr(ccx
))
322 _
=> ccx
.sess().bug(&format
!("unsized_info: invalid unsizing {} -> {}",
323 source
.repr(ccx
.tcx()),
324 target
.repr(ccx
.tcx())))
328 /// Helper for trans that apply adjustments from `expr` to `datum`, which should be the unadjusted
329 /// translation of `expr`.
330 fn apply_adjustments
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
332 datum
: Datum
<'tcx
, Expr
>)
333 -> DatumBlock
<'blk
, 'tcx
, Expr
>
336 let mut datum
= datum
;
337 let adjustment
= match bcx
.tcx().adjustments
.borrow().get(&expr
.id
).cloned() {
339 return DatumBlock
::new(bcx
, datum
);
343 debug
!("unadjusted datum for expr {}: {} adjustment={:?}",
344 expr
.repr(bcx
.tcx()),
345 datum
.to_string(bcx
.ccx()),
348 AdjustReifyFnPointer
=> {
349 // FIXME(#19925) once fn item types are
350 // zero-sized, we'll need to do something here
352 AdjustUnsafeFnPointer
=> {
353 // purely a type-level thing
355 AdjustDerefRef(ref adj
) => {
356 let skip_reborrows
= if adj
.autoderefs
== 1 && adj
.autoref
.is_some() {
357 // We are a bit paranoid about adjustments and thus might have a re-
358 // borrow here which merely derefs and then refs again (it might have
359 // a different region or mutability, but we don't care here).
361 // Don't skip a conversion from Box<T> to &T, etc.
363 let method_call
= MethodCall
::autoderef(expr
.id
, 0);
364 if bcx
.tcx().method_map
.borrow().contains_key(&method_call
) {
365 // Don't skip an overloaded deref.
377 if adj
.autoderefs
> skip_reborrows
{
379 let lval
= unpack_datum
!(bcx
, datum
.to_lvalue_datum(bcx
, "auto_deref", expr
.id
));
380 datum
= unpack_datum
!(bcx
, deref_multiple(bcx
, expr
,
381 lval
.to_expr_datum(),
382 adj
.autoderefs
- skip_reborrows
));
385 // (You might think there is a more elegant way to do this than a
386 // skip_reborrows bool, but then you remember that the borrow checker exists).
387 if skip_reborrows
== 0 && adj
.autoref
.is_some() {
388 datum
= unpack_datum
!(bcx
, apply_autoref(bcx
, expr
, datum
));
391 if let Some(target
) = adj
.unsize
{
392 datum
= unpack_datum
!(bcx
, unsize_pointer(bcx
, datum
,
393 bcx
.monomorphize(&target
)));
397 debug
!("after adjustments, datum={}", datum
.to_string(bcx
.ccx()));
398 return DatumBlock
::new(bcx
, datum
);
400 fn apply_autoref
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
402 datum
: Datum
<'tcx
, Expr
>)
403 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
406 if !type_is_sized(bcx
.tcx(), datum
.ty
) {
408 let lval
= unpack_datum
!(bcx
,
409 datum
.to_lvalue_datum(bcx
, "ref_fat_ptr", expr
.id
));
410 ref_fat_ptr(bcx
, lval
)
412 auto_ref(bcx
, datum
, expr
)
416 fn unsize_pointer
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
417 datum
: Datum
<'tcx
, Expr
>,
419 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
421 let unsized_ty
= ty
::deref(target
, true)
422 .expect("expr::unsize got non-pointer target type").ty
;
423 debug
!("unsize_lvalue(unsized_ty={})", unsized_ty
.repr(bcx
.tcx()));
425 // We do not arrange cleanup ourselves; if we already are an
426 // L-value, then cleanup will have already been scheduled (and
427 // the `datum.to_rvalue_datum` call below will emit code to zero
428 // the drop flag when moving out of the L-value). If we are an
429 // R-value, then we do not need to schedule cleanup.
430 let datum
= unpack_datum
!(bcx
, datum
.to_rvalue_datum(bcx
, "__unsize_ref"));
432 let pointee_ty
= ty
::deref(datum
.ty
, true)
433 .expect("expr::unsize got non-pointer datum type").ty
;
434 let (base
, old_info
) = if !type_is_sized(bcx
.tcx(), pointee_ty
) {
435 // Normally, the source is a thin pointer and we are
436 // adding extra info to make a fat pointer. The exception
437 // is when we are upcasting an existing object fat pointer
438 // to use a different vtable. In that case, we want to
439 // load out the original data pointer so we can repackage
441 (Load(bcx
, get_dataptr(bcx
, datum
.val
)),
442 Some(Load(bcx
, get_len(bcx
, datum
.val
))))
447 let info
= unsized_info(bcx
.ccx(), pointee_ty
, unsized_ty
,
448 old_info
, bcx
.fcx
.param_substs
);
450 // Compute the base pointer. This doesn't change the pointer value,
451 // but merely its type.
452 let ptr_ty
= type_of
::in_memory_type_of(bcx
.ccx(), unsized_ty
).ptr_to();
453 let base
= PointerCast(bcx
, base
, ptr_ty
);
455 let llty
= type_of
::type_of(bcx
.ccx(), target
);
456 // HACK(eddyb) get around issues with lifetime intrinsics.
457 let scratch
= alloca_no_lifetime(bcx
, llty
, "__fat_ptr");
458 Store(bcx
, base
, get_dataptr(bcx
, scratch
));
459 Store(bcx
, info
, get_len(bcx
, scratch
));
461 DatumBlock
::new(bcx
, Datum
::new(scratch
, target
, RvalueExpr(Rvalue
::new(ByRef
))))
465 /// Translates an expression in "lvalue" mode -- meaning that it returns a reference to the memory
466 /// that the expr represents.
468 /// If this expression is an rvalue, this implies introducing a temporary. In other words,
469 /// something like `x().f` is translated into roughly the equivalent of
471 /// { tmp = x(); tmp.f }
472 pub fn trans_to_lvalue
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
475 -> DatumBlock
<'blk
, 'tcx
, Lvalue
> {
477 let datum
= unpack_datum
!(bcx
, trans(bcx
, expr
));
478 return datum
.to_lvalue_datum(bcx
, name
, expr
.id
);
481 /// A version of `trans` that ignores adjustments. You almost certainly do not want to call this
483 fn trans_unadjusted
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
485 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
488 debug
!("trans_unadjusted(expr={})", bcx
.expr_to_string(expr
));
489 let _indenter
= indenter();
491 debuginfo
::set_source_location(bcx
.fcx
, expr
.id
, expr
.span
);
493 return match ty
::expr_kind(bcx
.tcx(), expr
) {
494 ty
::LvalueExpr
| ty
::RvalueDatumExpr
=> {
495 let datum
= unpack_datum
!(bcx
, {
496 trans_datum_unadjusted(bcx
, expr
)
499 DatumBlock {bcx: bcx, datum: datum}
502 ty
::RvalueStmtExpr
=> {
503 bcx
= trans_rvalue_stmt_unadjusted(bcx
, expr
);
504 nil(bcx
, expr_ty(bcx
, expr
))
507 ty
::RvalueDpsExpr
=> {
508 let ty
= expr_ty(bcx
, expr
);
509 if type_is_zero_size(bcx
.ccx(), ty
) {
510 bcx
= trans_rvalue_dps_unadjusted(bcx
, expr
, Ignore
);
513 let scratch
= rvalue_scratch_datum(bcx
, ty
, "");
514 bcx
= trans_rvalue_dps_unadjusted(
515 bcx
, expr
, SaveIn(scratch
.val
));
517 // Note: this is not obviously a good idea. It causes
518 // immediate values to be loaded immediately after a
519 // return from a call or other similar expression,
520 // which in turn leads to alloca's having shorter
521 // lifetimes and hence larger stack frames. However,
522 // in turn it can lead to more register pressure.
523 // Still, in practice it seems to increase
524 // performance, since we have fewer problems with
526 let scratch
= unpack_datum
!(
527 bcx
, scratch
.to_appropriate_datum(bcx
));
529 DatumBlock
::new(bcx
, scratch
.to_expr_datum())
534 fn nil
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>, ty
: Ty
<'tcx
>)
535 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
536 let llval
= C_undef(type_of
::type_of(bcx
.ccx(), ty
));
537 let datum
= immediate_rvalue(llval
, ty
);
538 DatumBlock
::new(bcx
, datum
.to_expr_datum())
542 fn trans_datum_unadjusted
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
544 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
547 let _icx
= push_ctxt("trans_datum_unadjusted");
550 ast
::ExprParen(ref e
) => {
553 ast
::ExprPath(..) => {
554 trans_def(bcx
, expr
, bcx
.def(expr
.id
))
556 ast
::ExprField(ref base
, ident
) => {
557 trans_rec_field(bcx
, &**base
, ident
.node
.name
)
559 ast
::ExprTupField(ref base
, idx
) => {
560 trans_rec_tup_field(bcx
, &**base
, idx
.node
)
562 ast
::ExprIndex(ref base
, ref idx
) => {
563 trans_index(bcx
, expr
, &**base
, &**idx
, MethodCall
::expr(expr
.id
))
565 ast
::ExprBox(_
, ref contents
) => {
566 // Special case for `Box<T>`
567 let box_ty
= expr_ty(bcx
, expr
);
568 let contents_ty
= expr_ty(bcx
, &**contents
);
571 trans_uniq_expr(bcx
, expr
, box_ty
, &**contents
, contents_ty
)
573 _
=> bcx
.sess().span_bug(expr
.span
,
574 "expected unique box")
578 ast
::ExprLit(ref lit
) => trans_immediate_lit(bcx
, expr
, &**lit
),
579 ast
::ExprBinary(op
, ref lhs
, ref rhs
) => {
580 trans_binary(bcx
, expr
, op
, &**lhs
, &**rhs
)
582 ast
::ExprUnary(op
, ref x
) => {
583 trans_unary(bcx
, expr
, op
, &**x
)
585 ast
::ExprAddrOf(_
, ref x
) => {
587 ast
::ExprRepeat(..) | ast
::ExprVec(..) => {
588 // Special case for slices.
589 let cleanup_debug_loc
=
590 debuginfo
::get_cleanup_debug_loc_for_ast_node(bcx
.ccx(),
594 fcx
.push_ast_cleanup_scope(cleanup_debug_loc
);
595 let datum
= unpack_datum
!(
596 bcx
, tvec
::trans_slice_vec(bcx
, expr
, &**x
));
597 bcx
= fcx
.pop_and_trans_ast_cleanup_scope(bcx
, x
.id
);
598 DatumBlock
::new(bcx
, datum
)
601 trans_addr_of(bcx
, expr
, &**x
)
605 ast
::ExprCast(ref val
, _
) => {
606 // Datum output mode means this is a scalar cast:
607 trans_imm_cast(bcx
, &**val
, expr
.id
)
610 bcx
.tcx().sess
.span_bug(
612 &format
!("trans_rvalue_datum_unadjusted reached \
613 fall-through case: {:?}",
619 fn trans_field
<'blk
, 'tcx
, F
>(bcx
: Block
<'blk
, 'tcx
>,
622 -> DatumBlock
<'blk
, 'tcx
, Expr
> where
623 F
: FnOnce(&'blk ty
::ctxt
<'tcx
>, &[ty
::field
<'tcx
>]) -> usize,
626 let _icx
= push_ctxt("trans_rec_field");
628 let base_datum
= unpack_datum
!(bcx
, trans_to_lvalue(bcx
, base
, "field"));
629 let bare_ty
= base_datum
.ty
;
630 let repr
= adt
::represent_type(bcx
.ccx(), bare_ty
);
631 with_field_tys(bcx
.tcx(), bare_ty
, None
, move |discr
, field_tys
| {
632 let ix
= get_idx(bcx
.tcx(), field_tys
);
633 let d
= base_datum
.get_element(
636 |srcval
| adt
::trans_field_ptr(bcx
, &*repr
, srcval
, discr
, ix
));
638 if type_is_sized(bcx
.tcx(), d
.ty
) {
639 DatumBlock { datum: d.to_expr_datum(), bcx: bcx }
641 let scratch
= rvalue_scratch_datum(bcx
, d
.ty
, "");
642 Store(bcx
, d
.val
, get_dataptr(bcx
, scratch
.val
));
643 let info
= Load(bcx
, get_len(bcx
, base_datum
.val
));
644 Store(bcx
, info
, get_len(bcx
, scratch
.val
));
646 DatumBlock
::new(bcx
, scratch
.to_expr_datum())
653 /// Translates `base.field`.
654 fn trans_rec_field
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
657 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
658 trans_field(bcx
, base
, |tcx
, field_tys
| ty
::field_idx_strict(tcx
, field
, field_tys
))
661 /// Translates `base.<idx>`.
662 fn trans_rec_tup_field
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
665 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
666 trans_field(bcx
, base
, |_
, _
| idx
)
669 fn trans_index
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
670 index_expr
: &ast
::Expr
,
673 method_call
: MethodCall
)
674 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
675 //! Translates `base[idx]`.
677 let _icx
= push_ctxt("trans_index");
681 let index_expr_debug_loc
= index_expr
.debug_loc();
683 // Check for overloaded index.
684 let method_ty
= ccx
.tcx()
688 .map(|method
| method
.ty
);
689 let elt_datum
= match method_ty
{
691 let method_ty
= monomorphize_type(bcx
, method_ty
);
693 let base_datum
= unpack_datum
!(bcx
, trans(bcx
, base
));
695 // Translate index expression.
696 let ix_datum
= unpack_datum
!(bcx
, trans(bcx
, idx
));
698 let ref_ty
= // invoked methods have LB regions instantiated:
699 ty
::no_late_bound_regions(
700 bcx
.tcx(), &ty
::ty_fn_ret(method_ty
)).unwrap().unwrap();
701 let elt_ty
= match ty
::deref(ref_ty
, true) {
703 bcx
.tcx().sess
.span_bug(index_expr
.span
,
704 "index method didn't return a \
705 dereferenceable type?!")
707 Some(elt_tm
) => elt_tm
.ty
,
710 // Overloaded. Evaluate `trans_overloaded_op`, which will
711 // invoke the user's index() method, which basically yields
712 // a `&T` pointer. We can then proceed down the normal
713 // path (below) to dereference that `&T`.
714 let scratch
= rvalue_scratch_datum(bcx
, ref_ty
, "overloaded_index_elt");
716 trans_overloaded_op(bcx
,
720 vec
![(ix_datum
, idx
.id
)],
721 Some(SaveIn(scratch
.val
)),
723 let datum
= scratch
.to_expr_datum();
724 if type_is_sized(bcx
.tcx(), elt_ty
) {
725 Datum
::new(datum
.to_llscalarish(bcx
), elt_ty
, LvalueExpr
)
727 Datum
::new(datum
.val
, elt_ty
, LvalueExpr
)
731 let base_datum
= unpack_datum
!(bcx
, trans_to_lvalue(bcx
,
735 // Translate index expression and cast to a suitable LLVM integer.
736 // Rust is less strict than LLVM in this regard.
737 let ix_datum
= unpack_datum
!(bcx
, trans(bcx
, idx
));
738 let ix_val
= ix_datum
.to_llscalarish(bcx
);
739 let ix_size
= machine
::llbitsize_of_real(bcx
.ccx(),
741 let int_size
= machine
::llbitsize_of_real(bcx
.ccx(),
744 if ix_size
< int_size
{
745 if ty
::type_is_signed(expr_ty(bcx
, idx
)) {
746 SExt(bcx
, ix_val
, ccx
.int_type())
747 } else { ZExt(bcx, ix_val, ccx.int_type()) }
748 } else if ix_size
> int_size
{
749 Trunc(bcx
, ix_val
, ccx
.int_type())
755 let unit_ty
= ty
::sequence_element_type(bcx
.tcx(), base_datum
.ty
);
757 let (base
, len
) = base_datum
.get_vec_base_and_len(bcx
);
759 debug
!("trans_index: base {}", bcx
.val_to_string(base
));
760 debug
!("trans_index: len {}", bcx
.val_to_string(len
));
762 let bounds_check
= ICmp(bcx
,
766 index_expr_debug_loc
);
767 let expect
= ccx
.get_intrinsic(&("llvm.expect.i1"));
768 let expected
= Call(bcx
,
770 &[bounds_check
, C_bool(ccx
, false)],
772 index_expr_debug_loc
);
773 bcx
= with_cond(bcx
, expected
, |bcx
| {
774 controlflow
::trans_fail_bounds_check(bcx
,
775 expr_info(index_expr
),
779 let elt
= InBoundsGEP(bcx
, base
, &[ix_val
]);
780 let elt
= PointerCast(bcx
, elt
, type_of
::type_of(ccx
, unit_ty
).ptr_to());
781 Datum
::new(elt
, unit_ty
, LvalueExpr
)
785 DatumBlock
::new(bcx
, elt_datum
)
788 fn trans_def
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
789 ref_expr
: &ast
::Expr
,
791 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
792 //! Translates a reference to a path.
794 let _icx
= push_ctxt("trans_def_lvalue");
796 def
::DefFn(..) | def
::DefMethod(..) |
797 def
::DefStruct(_
) | def
::DefVariant(..) => {
798 let datum
= trans_def_fn_unadjusted(bcx
.ccx(), ref_expr
, def
,
799 bcx
.fcx
.param_substs
);
800 DatumBlock
::new(bcx
, datum
.to_expr_datum())
802 def
::DefStatic(did
, _
) => {
803 // There are two things that may happen here:
804 // 1) If the static item is defined in this crate, it will be
805 // translated using `get_item_val`, and we return a pointer to
807 // 2) If the static item is defined in another crate then we add
808 // (or reuse) a declaration of an external global, and return a
810 let const_ty
= expr_ty(bcx
, ref_expr
);
812 // For external constants, we don't inline.
813 let val
= if did
.krate
== ast
::LOCAL_CRATE
{
816 // The LLVM global has the type of its initializer,
817 // which may not be equal to the enum's type for
819 let val
= base
::get_item_val(bcx
.ccx(), did
.node
);
820 let pty
= type_of
::type_of(bcx
.ccx(), const_ty
).ptr_to();
821 PointerCast(bcx
, val
, pty
)
824 base
::get_extern_const(bcx
.ccx(), did
, const_ty
)
826 DatumBlock
::new(bcx
, Datum
::new(val
, const_ty
, LvalueExpr
))
828 def
::DefConst(_
) => {
829 bcx
.sess().span_bug(ref_expr
.span
,
830 "constant expression should not reach expr::trans_def")
833 DatumBlock
::new(bcx
, trans_local_var(bcx
, def
).to_expr_datum())
838 fn trans_rvalue_stmt_unadjusted
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
840 -> Block
<'blk
, 'tcx
> {
842 let _icx
= push_ctxt("trans_rvalue_stmt");
844 if bcx
.unreachable
.get() {
848 debuginfo
::set_source_location(bcx
.fcx
, expr
.id
, expr
.span
);
851 ast
::ExprParen(ref e
) => {
852 trans_into(bcx
, &**e
, Ignore
)
854 ast
::ExprBreak(label_opt
) => {
855 controlflow
::trans_break(bcx
, expr
, label_opt
)
857 ast
::ExprAgain(label_opt
) => {
858 controlflow
::trans_cont(bcx
, expr
, label_opt
)
860 ast
::ExprRet(ref ex
) => {
861 // Check to see if the return expression itself is reachable.
862 // This can occur when the inner expression contains a return
863 let reachable
= if let Some(ref cfg
) = bcx
.fcx
.cfg
{
864 cfg
.node_is_reachable(expr
.id
)
870 controlflow
::trans_ret(bcx
, expr
, ex
.as_ref().map(|e
| &**e
))
872 // If it's not reachable, just translate the inner expression
873 // directly. This avoids having to manage a return slot when
874 // it won't actually be used anyway.
875 if let &Some(ref x
) = ex
{
876 bcx
= trans_into(bcx
, &**x
, Ignore
);
878 // Mark the end of the block as unreachable. Once we get to
879 // a return expression, there's no more we should be doing
885 ast
::ExprWhile(ref cond
, ref body
, _
) => {
886 controlflow
::trans_while(bcx
, expr
, &**cond
, &**body
)
888 ast
::ExprLoop(ref body
, _
) => {
889 controlflow
::trans_loop(bcx
, expr
, &**body
)
891 ast
::ExprAssign(ref dst
, ref src
) => {
892 let src_datum
= unpack_datum
!(bcx
, trans(bcx
, &**src
));
893 let dst_datum
= unpack_datum
!(bcx
, trans_to_lvalue(bcx
, &**dst
, "assign"));
895 if bcx
.fcx
.type_needs_drop(dst_datum
.ty
) {
896 // If there are destructors involved, make sure we
897 // are copying from an rvalue, since that cannot possible
898 // alias an lvalue. We are concerned about code like:
906 // where e.g. a : Option<Foo> and a.b :
907 // Option<Foo>. In that case, freeing `a` before the
908 // assignment may also free `a.b`!
910 // We could avoid this intermediary with some analysis
911 // to determine whether `dst` may possibly own `src`.
912 debuginfo
::set_source_location(bcx
.fcx
, expr
.id
, expr
.span
);
913 let src_datum
= unpack_datum
!(
914 bcx
, src_datum
.to_rvalue_datum(bcx
, "ExprAssign"));
915 bcx
= glue
::drop_ty(bcx
,
919 src_datum
.store_to(bcx
, dst_datum
.val
)
921 src_datum
.store_to(bcx
, dst_datum
.val
)
924 ast
::ExprAssignOp(op
, ref dst
, ref src
) => {
925 trans_assign_op(bcx
, expr
, op
, &**dst
, &**src
)
927 ast
::ExprInlineAsm(ref a
) => {
928 asm
::trans_inline_asm(bcx
, a
)
931 bcx
.tcx().sess
.span_bug(
933 &format
!("trans_rvalue_stmt_unadjusted reached \
934 fall-through case: {:?}",
940 fn trans_rvalue_dps_unadjusted
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
943 -> Block
<'blk
, 'tcx
> {
944 let _icx
= push_ctxt("trans_rvalue_dps_unadjusted");
948 debuginfo
::set_source_location(bcx
.fcx
, expr
.id
, expr
.span
);
951 ast
::ExprParen(ref e
) => {
952 trans_into(bcx
, &**e
, dest
)
954 ast
::ExprPath(..) => {
955 trans_def_dps_unadjusted(bcx
, expr
, bcx
.def(expr
.id
), dest
)
957 ast
::ExprIf(ref cond
, ref thn
, ref els
) => {
958 controlflow
::trans_if(bcx
, expr
.id
, &**cond
, &**thn
, els
.as_ref().map(|e
| &**e
), dest
)
960 ast
::ExprMatch(ref discr
, ref arms
, _
) => {
961 _match
::trans_match(bcx
, expr
, &**discr
, &arms
[..], dest
)
963 ast
::ExprBlock(ref blk
) => {
964 controlflow
::trans_block(bcx
, &**blk
, dest
)
966 ast
::ExprStruct(_
, ref fields
, ref base
) => {
969 base
.as_ref().map(|e
| &**e
),
972 node_id_type(bcx
, expr
.id
),
975 ast
::ExprRange(ref start
, ref end
) => {
976 // FIXME it is just not right that we are synthesising ast nodes in
978 fn make_field(field_name
: &str, expr
: P
<ast
::Expr
>) -> ast
::Field
{
980 ident
: codemap
::dummy_spanned(token
::str_to_ident(field_name
)),
982 span
: codemap
::DUMMY_SP
,
986 // A range just desugars into a struct.
987 // Note that the type of the start and end may not be the same, but
988 // they should only differ in their lifetime, which should not matter
990 let (did
, fields
, ty_params
) = match (start
, end
) {
991 (&Some(ref start
), &Some(ref end
)) => {
993 let fields
= vec
![make_field("start", start
.clone()),
994 make_field("end", end
.clone())];
995 (tcx
.lang_items
.range_struct(), fields
, vec
![node_id_type(bcx
, start
.id
)])
997 (&Some(ref start
), &None
) => {
998 // Desugar to RangeFrom
999 let fields
= vec
![make_field("start", start
.clone())];
1000 (tcx
.lang_items
.range_from_struct(), fields
, vec
![node_id_type(bcx
, start
.id
)])
1002 (&None
, &Some(ref end
)) => {
1003 // Desugar to RangeTo
1004 let fields
= vec
![make_field("end", end
.clone())];
1005 (tcx
.lang_items
.range_to_struct(), fields
, vec
![node_id_type(bcx
, end
.id
)])
1008 // Desugar to RangeFull
1009 (tcx
.lang_items
.range_full_struct(), vec
![], vec
![])
1013 if let Some(did
) = did
{
1014 let substs
= Substs
::new_type(ty_params
, vec
![]);
1020 ty
::mk_struct(tcx
, did
, tcx
.mk_substs(substs
)),
1023 tcx
.sess
.span_bug(expr
.span
,
1024 "No lang item for ranges (how did we get this far?)")
1027 ast
::ExprTup(ref args
) => {
1028 let numbered_fields
: Vec
<(usize, &ast
::Expr
)> =
1029 args
.iter().enumerate().map(|(i
, arg
)| (i
, &**arg
)).collect();
1033 &numbered_fields
[..],
1038 ast
::ExprLit(ref lit
) => {
1040 ast
::LitStr(ref s
, _
) => {
1041 tvec
::trans_lit_str(bcx
, expr
, (*s
).clone(), dest
)
1046 .span_bug(expr
.span
,
1047 "trans_rvalue_dps_unadjusted shouldn't be \
1048 translating this type of literal")
1052 ast
::ExprVec(..) | ast
::ExprRepeat(..) => {
1053 tvec
::trans_fixed_vstore(bcx
, expr
, dest
)
1055 ast
::ExprClosure(_
, ref decl
, ref body
) => {
1056 let dest
= match dest
{
1057 SaveIn(lldest
) => closure
::Dest
::SaveIn(bcx
, lldest
),
1058 Ignore
=> closure
::Dest
::Ignore(bcx
.ccx())
1060 closure
::trans_closure_expr(dest
, &**decl
, &**body
, expr
.id
, bcx
.fcx
.param_substs
)
1063 ast
::ExprCall(ref f
, ref args
) => {
1064 if bcx
.tcx().is_method_call(expr
.id
) {
1065 trans_overloaded_call(bcx
,
1071 callee
::trans_call(bcx
,
1074 callee
::ArgExprs(&args
[..]),
1078 ast
::ExprMethodCall(_
, _
, ref args
) => {
1079 callee
::trans_method_call(bcx
,
1082 callee
::ArgExprs(&args
[..]),
1085 ast
::ExprBinary(op
, ref lhs
, ref rhs
) => {
1086 // if not overloaded, would be RvalueDatumExpr
1087 let lhs
= unpack_datum
!(bcx
, trans(bcx
, &**lhs
));
1088 let rhs_datum
= unpack_datum
!(bcx
, trans(bcx
, &**rhs
));
1089 trans_overloaded_op(bcx
, expr
, MethodCall
::expr(expr
.id
), lhs
,
1090 vec
![(rhs_datum
, rhs
.id
)], Some(dest
),
1091 !ast_util
::is_by_value_binop(op
.node
)).bcx
1093 ast
::ExprUnary(op
, ref subexpr
) => {
1094 // if not overloaded, would be RvalueDatumExpr
1095 let arg
= unpack_datum
!(bcx
, trans(bcx
, &**subexpr
));
1096 trans_overloaded_op(bcx
, expr
, MethodCall
::expr(expr
.id
),
1097 arg
, Vec
::new(), Some(dest
), !ast_util
::is_by_value_unop(op
)).bcx
1099 ast
::ExprIndex(ref base
, ref idx
) => {
1100 // if not overloaded, would be RvalueDatumExpr
1101 let base
= unpack_datum
!(bcx
, trans(bcx
, &**base
));
1102 let idx_datum
= unpack_datum
!(bcx
, trans(bcx
, &**idx
));
1103 trans_overloaded_op(bcx
, expr
, MethodCall
::expr(expr
.id
), base
,
1104 vec
![(idx_datum
, idx
.id
)], Some(dest
), true).bcx
1106 ast
::ExprCast(..) => {
1107 // Trait casts used to come this way, now they should be coercions.
1108 bcx
.tcx().sess
.span_bug(expr
.span
, "DPS expr_cast (residual trait cast?)")
1110 ast
::ExprAssignOp(op
, ref dst
, ref src
) => {
1111 trans_assign_op(bcx
, expr
, op
, &**dst
, &**src
)
1114 bcx
.tcx().sess
.span_bug(
1116 &format
!("trans_rvalue_dps_unadjusted reached fall-through \
1123 fn trans_def_dps_unadjusted
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1124 ref_expr
: &ast
::Expr
,
1127 -> Block
<'blk
, 'tcx
> {
1128 let _icx
= push_ctxt("trans_def_dps_unadjusted");
1130 let lldest
= match dest
{
1131 SaveIn(lldest
) => lldest
,
1132 Ignore
=> { return bcx; }
1136 def
::DefVariant(tid
, vid
, _
) => {
1137 let variant_info
= ty
::enum_variant_with_id(bcx
.tcx(), tid
, vid
);
1138 if !variant_info
.args
.is_empty() {
1140 let llfn
= callee
::trans_fn_ref(bcx
.ccx(), vid
,
1141 ExprId(ref_expr
.id
),
1142 bcx
.fcx
.param_substs
).val
;
1143 Store(bcx
, llfn
, lldest
);
1147 let ty
= expr_ty(bcx
, ref_expr
);
1148 let repr
= adt
::represent_type(bcx
.ccx(), ty
);
1149 adt
::trans_set_discr(bcx
, &*repr
, lldest
,
1150 variant_info
.disr_val
);
1154 def
::DefStruct(_
) => {
1155 let ty
= expr_ty(bcx
, ref_expr
);
1157 ty
::ty_struct(did
, _
) if ty
::has_dtor(bcx
.tcx(), did
) => {
1158 let repr
= adt
::represent_type(bcx
.ccx(), ty
);
1159 adt
::trans_set_discr(bcx
, &*repr
, lldest
, 0);
1166 bcx
.tcx().sess
.span_bug(ref_expr
.span
, &format
!(
1167 "Non-DPS def {:?} referened by {}",
1168 def
, bcx
.node_id_to_string(ref_expr
.id
)));
1173 pub fn trans_def_fn_unadjusted
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
1174 ref_expr
: &ast
::Expr
,
1176 param_substs
: &'tcx subst
::Substs
<'tcx
>)
1177 -> Datum
<'tcx
, Rvalue
> {
1178 let _icx
= push_ctxt("trans_def_datum_unadjusted");
1181 def
::DefFn(did
, _
) |
1182 def
::DefStruct(did
) | def
::DefVariant(_
, did
, _
) |
1183 def
::DefMethod(did
, def
::FromImpl(_
)) => {
1184 callee
::trans_fn_ref(ccx
, did
, ExprId(ref_expr
.id
), param_substs
)
1186 def
::DefMethod(impl_did
, def
::FromTrait(trait_did
)) => {
1187 meth
::trans_static_method_callee(ccx
, impl_did
,
1188 trait_did
, ref_expr
.id
,
1192 ccx
.tcx().sess
.span_bug(ref_expr
.span
, &format
!(
1193 "trans_def_fn_unadjusted invoked on: {:?} for {}",
1195 ref_expr
.repr(ccx
.tcx())));
1200 /// Translates a reference to a local variable or argument. This always results in an lvalue datum.
1201 pub fn trans_local_var
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1203 -> Datum
<'tcx
, Lvalue
> {
1204 let _icx
= push_ctxt("trans_local_var");
1207 def
::DefUpvar(nid
, _
) => {
1208 // Can't move upvars, so this is never a ZeroMemLastUse.
1209 let local_ty
= node_id_type(bcx
, nid
);
1210 match bcx
.fcx
.llupvars
.borrow().get(&nid
) {
1211 Some(&val
) => Datum
::new(val
, local_ty
, Lvalue
),
1213 bcx
.sess().bug(&format
!(
1214 "trans_local_var: no llval for upvar {} found",
1219 def
::DefLocal(nid
) => {
1220 let datum
= match bcx
.fcx
.lllocals
.borrow().get(&nid
) {
1223 bcx
.sess().bug(&format
!(
1224 "trans_local_var: no datum for local/arg {} found",
1228 debug
!("take_local(nid={}, v={}, ty={})",
1229 nid
, bcx
.val_to_string(datum
.val
), bcx
.ty_to_string(datum
.ty
));
1233 bcx
.sess().unimpl(&format
!(
1234 "unsupported def type in trans_local_var: {:?}",
1240 /// Helper for enumerating the field types of structs, enums, or records. The optional node ID here
1241 /// is the node ID of the path identifying the enum variant in use. If none, this cannot possibly
1242 /// an enum variant (so, if it is and `node_id_opt` is none, this function panics).
1243 pub fn with_field_tys
<'tcx
, R
, F
>(tcx
: &ty
::ctxt
<'tcx
>,
1245 node_id_opt
: Option
<ast
::NodeId
>,
1248 F
: FnOnce(ty
::Disr
, &[ty
::field
<'tcx
>]) -> R
,
1251 ty
::ty_struct(did
, substs
) => {
1252 let fields
= struct_fields(tcx
, did
, substs
);
1253 let fields
= monomorphize
::normalize_associated_type(tcx
, &fields
);
1257 ty
::ty_tup(ref v
) => {
1258 op(0, &tup_fields(&v
[..]))
1261 ty
::ty_enum(_
, substs
) => {
1262 // We want the *variant* ID here, not the enum ID.
1265 tcx
.sess
.bug(&format
!(
1266 "cannot get field types from the enum type {} \
1271 let def
= tcx
.def_map
.borrow().get(&node_id
).unwrap().full_def();
1273 def
::DefVariant(enum_id
, variant_id
, _
) => {
1274 let variant_info
= ty
::enum_variant_with_id(tcx
, enum_id
, variant_id
);
1275 let fields
= struct_fields(tcx
, variant_id
, substs
);
1276 let fields
= monomorphize
::normalize_associated_type(tcx
, &fields
);
1277 op(variant_info
.disr_val
, &fields
[..])
1280 tcx
.sess
.bug("resolve didn't map this expr to a \
1289 tcx
.sess
.bug(&format
!(
1290 "cannot get field types from the type {}",
1296 fn trans_struct
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1297 fields
: &[ast
::Field
],
1298 base
: Option
<&ast
::Expr
>,
1299 expr_span
: codemap
::Span
,
1300 expr_id
: ast
::NodeId
,
1302 dest
: Dest
) -> Block
<'blk
, 'tcx
> {
1303 let _icx
= push_ctxt("trans_rec");
1305 let tcx
= bcx
.tcx();
1306 with_field_tys(tcx
, ty
, Some(expr_id
), |discr
, field_tys
| {
1307 let mut need_base
: Vec
<bool
> = repeat(true).take(field_tys
.len()).collect();
1309 let numbered_fields
= fields
.iter().map(|field
| {
1311 field_tys
.iter().position(|field_ty
|
1312 field_ty
.name
== field
.ident
.node
.name
);
1313 let result
= match opt_pos
{
1315 need_base
[i
] = false;
1319 tcx
.sess
.span_bug(field
.span
,
1320 "Couldn't find field in struct type")
1324 }).collect
::<Vec
<_
>>();
1325 let optbase
= match base
{
1326 Some(base_expr
) => {
1327 let mut leftovers
= Vec
::new();
1328 for (i
, b
) in need_base
.iter().enumerate() {
1330 leftovers
.push((i
, field_tys
[i
].mt
.ty
));
1333 Some(StructBaseInfo
{expr
: base_expr
,
1334 fields
: leftovers
})
1337 if need_base
.iter().any(|b
| *b
) {
1338 tcx
.sess
.span_bug(expr_span
, "missing fields and no base expr")
1350 DebugLoc
::At(expr_id
, expr_span
))
1354 /// Information that `trans_adt` needs in order to fill in the fields
1355 /// of a struct copied from a base struct (e.g., from an expression
1356 /// like `Foo { a: b, ..base }`.
1358 /// Note that `fields` may be empty; the base expression must always be
1359 /// evaluated for side-effects.
1360 pub struct StructBaseInfo
<'a
, 'tcx
> {
1361 /// The base expression; will be evaluated after all explicit fields.
1362 expr
: &'a ast
::Expr
,
1363 /// The indices of fields to copy paired with their types.
1364 fields
: Vec
<(usize, Ty
<'tcx
>)>
1367 /// Constructs an ADT instance:
1369 /// - `fields` should be a list of field indices paired with the
1370 /// expression to store into that field. The initializers will be
1371 /// evaluated in the order specified by `fields`.
1373 /// - `optbase` contains information on the base struct (if any) from
1374 /// which remaining fields are copied; see comments on `StructBaseInfo`.
1375 pub fn trans_adt
<'a
, 'blk
, 'tcx
>(mut bcx
: Block
<'blk
, 'tcx
>,
1378 fields
: &[(usize, &ast
::Expr
)],
1379 optbase
: Option
<StructBaseInfo
<'a
, 'tcx
>>,
1381 debug_location
: DebugLoc
)
1382 -> Block
<'blk
, 'tcx
> {
1383 let _icx
= push_ctxt("trans_adt");
1385 let repr
= adt
::represent_type(bcx
.ccx(), ty
);
1387 debug_location
.apply(bcx
.fcx
);
1389 // If we don't care about the result, just make a
1390 // temporary stack slot
1391 let addr
= match dest
{
1393 Ignore
=> alloc_ty(bcx
, ty
, "temp"),
1396 // This scope holds intermediates that must be cleaned should
1397 // panic occur before the ADT as a whole is ready.
1398 let custom_cleanup_scope
= fcx
.push_custom_cleanup_scope();
1400 if ty
::type_is_simd(bcx
.tcx(), ty
) {
1401 // Issue 23112: The original logic appeared vulnerable to same
1402 // order-of-eval bug. But, SIMD values are tuple-structs;
1403 // i.e. functional record update (FRU) syntax is unavailable.
1405 // To be safe, double-check that we did not get here via FRU.
1406 assert
!(optbase
.is_none());
1408 // This is the constructor of a SIMD type, such types are
1409 // always primitive machine types and so do not have a
1410 // destructor or require any clean-up.
1411 let llty
= type_of
::type_of(bcx
.ccx(), ty
);
1413 // keep a vector as a register, and running through the field
1414 // `insertelement`ing them directly into that register
1415 // (i.e. avoid GEPi and `store`s to an alloca) .
1416 let mut vec_val
= C_undef(llty
);
1418 for &(i
, ref e
) in fields
{
1419 let block_datum
= trans(bcx
, &**e
);
1420 bcx
= block_datum
.bcx
;
1421 let position
= C_uint(bcx
.ccx(), i
);
1422 let value
= block_datum
.datum
.to_llscalarish(bcx
);
1423 vec_val
= InsertElement(bcx
, vec_val
, value
, position
);
1425 Store(bcx
, vec_val
, addr
);
1426 } else if let Some(base
) = optbase
{
1427 // Issue 23112: If there is a base, then order-of-eval
1428 // requires field expressions eval'ed before base expression.
1430 // First, trans field expressions to temporary scratch values.
1431 let scratch_vals
: Vec
<_
> = fields
.iter().map(|&(i
, ref e
)| {
1432 let datum
= unpack_datum
!(bcx
, trans(bcx
, &**e
));
1436 debug_location
.apply(bcx
.fcx
);
1438 // Second, trans the base to the dest.
1439 assert_eq
!(discr
, 0);
1441 match ty
::expr_kind(bcx
.tcx(), &*base
.expr
) {
1442 ty
::RvalueDpsExpr
| ty
::RvalueDatumExpr
if !bcx
.fcx
.type_needs_drop(ty
) => {
1443 bcx
= trans_into(bcx
, &*base
.expr
, SaveIn(addr
));
1445 ty
::RvalueStmtExpr
=> bcx
.tcx().sess
.bug("unexpected expr kind for struct base expr"),
1447 let base_datum
= unpack_datum
!(bcx
, trans_to_lvalue(bcx
, &*base
.expr
, "base"));
1448 for &(i
, t
) in &base
.fields
{
1449 let datum
= base_datum
.get_element(
1450 bcx
, t
, |srcval
| adt
::trans_field_ptr(bcx
, &*repr
, srcval
, discr
, i
));
1451 assert
!(type_is_sized(bcx
.tcx(), datum
.ty
));
1452 let dest
= adt
::trans_field_ptr(bcx
, &*repr
, addr
, discr
, i
);
1453 bcx
= datum
.store_to(bcx
, dest
);
1458 // Finally, move scratch field values into actual field locations
1459 for (i
, datum
) in scratch_vals
.into_iter() {
1460 let dest
= adt
::trans_field_ptr(bcx
, &*repr
, addr
, discr
, i
);
1461 bcx
= datum
.store_to(bcx
, dest
);
1464 // No base means we can write all fields directly in place.
1465 for &(i
, ref e
) in fields
{
1466 let dest
= adt
::trans_field_ptr(bcx
, &*repr
, addr
, discr
, i
);
1467 let e_ty
= expr_ty_adjusted(bcx
, &**e
);
1468 bcx
= trans_into(bcx
, &**e
, SaveIn(dest
));
1469 let scope
= cleanup
::CustomScope(custom_cleanup_scope
);
1470 fcx
.schedule_lifetime_end(scope
, dest
);
1471 fcx
.schedule_drop_mem(scope
, dest
, e_ty
);
1475 adt
::trans_set_discr(bcx
, &*repr
, addr
, discr
);
1477 fcx
.pop_custom_cleanup_scope(custom_cleanup_scope
);
1479 // If we don't care about the result drop the temporary we made
1483 bcx
= glue
::drop_ty(bcx
, addr
, ty
, debug_location
);
1484 base
::call_lifetime_end(bcx
, addr
);
1491 fn trans_immediate_lit
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1494 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
1495 // must not be a string constant, that is a RvalueDpsExpr
1496 let _icx
= push_ctxt("trans_immediate_lit");
1497 let ty
= expr_ty(bcx
, expr
);
1498 let v
= consts
::const_lit(bcx
.ccx(), expr
, lit
);
1499 immediate_rvalue_bcx(bcx
, v
, ty
).to_expr_datumblock()
1502 fn trans_unary
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1505 sub_expr
: &ast
::Expr
)
1506 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
1507 let ccx
= bcx
.ccx();
1509 let _icx
= push_ctxt("trans_unary_datum");
1511 let method_call
= MethodCall
::expr(expr
.id
);
1513 // The only overloaded operator that is translated to a datum
1514 // is an overloaded deref, since it is always yields a `&T`.
1515 // Otherwise, we should be in the RvalueDpsExpr path.
1517 op
== ast
::UnDeref
||
1518 !ccx
.tcx().method_map
.borrow().contains_key(&method_call
));
1520 let un_ty
= expr_ty(bcx
, expr
);
1522 let debug_loc
= expr
.debug_loc();
1526 let datum
= unpack_datum
!(bcx
, trans(bcx
, sub_expr
));
1527 let llresult
= Not(bcx
, datum
.to_llscalarish(bcx
), debug_loc
);
1528 immediate_rvalue_bcx(bcx
, llresult
, un_ty
).to_expr_datumblock()
1531 let datum
= unpack_datum
!(bcx
, trans(bcx
, sub_expr
));
1532 let val
= datum
.to_llscalarish(bcx
);
1533 let (bcx
, llneg
) = {
1534 if ty
::type_is_fp(un_ty
) {
1535 let result
= FNeg(bcx
, val
, debug_loc
);
1538 let is_signed
= ty
::type_is_signed(un_ty
);
1539 let result
= Neg(bcx
, val
, debug_loc
);
1540 let bcx
= if bcx
.ccx().check_overflow() && is_signed
{
1541 let (llty
, min
) = base
::llty_and_min_for_signed_ty(bcx
, un_ty
);
1542 let is_min
= ICmp(bcx
, llvm
::IntEQ
, val
,
1543 C_integral(llty
, min
, true), debug_loc
);
1544 with_cond(bcx
, is_min
, |bcx
| {
1545 let msg
= InternedString
::new(
1546 "attempted to negate with overflow");
1547 controlflow
::trans_fail(bcx
, expr_info(expr
), msg
)
1555 immediate_rvalue_bcx(bcx
, llneg
, un_ty
).to_expr_datumblock()
1558 trans_uniq_expr(bcx
, expr
, un_ty
, sub_expr
, expr_ty(bcx
, sub_expr
))
1561 let datum
= unpack_datum
!(bcx
, trans(bcx
, sub_expr
));
1562 deref_once(bcx
, expr
, datum
, method_call
)
1567 fn trans_uniq_expr
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1568 box_expr
: &ast
::Expr
,
1570 contents
: &ast
::Expr
,
1571 contents_ty
: Ty
<'tcx
>)
1572 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
1573 let _icx
= push_ctxt("trans_uniq_expr");
1575 assert
!(type_is_sized(bcx
.tcx(), contents_ty
));
1576 let llty
= type_of
::type_of(bcx
.ccx(), contents_ty
);
1577 let size
= llsize_of(bcx
.ccx(), llty
);
1578 let align
= C_uint(bcx
.ccx(), type_of
::align_of(bcx
.ccx(), contents_ty
));
1579 let llty_ptr
= llty
.ptr_to();
1580 let Result { bcx, val }
= malloc_raw_dyn(bcx
,
1585 box_expr
.debug_loc());
1586 // Unique boxes do not allocate for zero-size types. The standard library
1587 // may assume that `free` is never called on the pointer returned for
1588 // `Box<ZeroSizeType>`.
1589 let bcx
= if llsize_of_alloc(bcx
.ccx(), llty
) == 0 {
1590 trans_into(bcx
, contents
, SaveIn(val
))
1592 let custom_cleanup_scope
= fcx
.push_custom_cleanup_scope();
1593 fcx
.schedule_free_value(cleanup
::CustomScope(custom_cleanup_scope
),
1594 val
, cleanup
::HeapExchange
, contents_ty
);
1595 let bcx
= trans_into(bcx
, contents
, SaveIn(val
));
1596 fcx
.pop_custom_cleanup_scope(custom_cleanup_scope
);
1599 immediate_rvalue_bcx(bcx
, val
, box_ty
).to_expr_datumblock()
1602 fn ref_fat_ptr
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1603 lval
: Datum
<'tcx
, Lvalue
>)
1604 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
1605 let dest_ty
= ty
::mk_imm_rptr(bcx
.tcx(), bcx
.tcx().mk_region(ty
::ReStatic
), lval
.ty
);
1606 let scratch
= rvalue_scratch_datum(bcx
, dest_ty
, "__fat_ptr");
1607 memcpy_ty(bcx
, scratch
.val
, lval
.val
, scratch
.ty
);
1609 DatumBlock
::new(bcx
, scratch
.to_expr_datum())
1612 fn trans_addr_of
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1614 subexpr
: &ast
::Expr
)
1615 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
1616 let _icx
= push_ctxt("trans_addr_of");
1618 let sub_datum
= unpack_datum
!(bcx
, trans_to_lvalue(bcx
, subexpr
, "addr_of"));
1619 if !type_is_sized(bcx
.tcx(), sub_datum
.ty
) {
1620 // DST lvalue, close to a fat pointer
1621 ref_fat_ptr(bcx
, sub_datum
)
1623 // Sized value, ref to a thin pointer
1624 let ty
= expr_ty(bcx
, expr
);
1625 immediate_rvalue_bcx(bcx
, sub_datum
.val
, ty
).to_expr_datumblock()
1629 // Important to get types for both lhs and rhs, because one might be _|_
1630 // and the other not.
1631 fn trans_eager_binop
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1632 binop_expr
: &ast
::Expr
,
1639 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
1640 let _icx
= push_ctxt("trans_eager_binop");
1642 let tcx
= bcx
.tcx();
1643 let is_simd
= ty
::type_is_simd(tcx
, lhs_t
);
1644 let intype
= if is_simd
{
1645 ty
::simd_type(tcx
, lhs_t
)
1649 let is_float
= ty
::type_is_fp(intype
);
1650 let is_signed
= ty
::type_is_signed(intype
);
1651 let info
= expr_info(binop_expr
);
1653 let binop_debug_loc
= binop_expr
.debug_loc();
1656 let val
= match op
.node
{
1659 FAdd(bcx
, lhs
, rhs
, binop_debug_loc
)
1661 Add(bcx
, lhs
, rhs
, binop_debug_loc
)
1663 let (newbcx
, res
) = with_overflow_check(
1664 bcx
, OverflowOp
::Add
, info
, lhs_t
, lhs
, rhs
, binop_debug_loc
);
1671 FSub(bcx
, lhs
, rhs
, binop_debug_loc
)
1673 Sub(bcx
, lhs
, rhs
, binop_debug_loc
)
1675 let (newbcx
, res
) = with_overflow_check(
1676 bcx
, OverflowOp
::Sub
, info
, lhs_t
, lhs
, rhs
, binop_debug_loc
);
1683 FMul(bcx
, lhs
, rhs
, binop_debug_loc
)
1685 Mul(bcx
, lhs
, rhs
, binop_debug_loc
)
1687 let (newbcx
, res
) = with_overflow_check(
1688 bcx
, OverflowOp
::Mul
, info
, lhs_t
, lhs
, rhs
, binop_debug_loc
);
1695 FDiv(bcx
, lhs
, rhs
, binop_debug_loc
)
1697 // Only zero-check integers; fp /0 is NaN
1698 bcx
= base
::fail_if_zero_or_overflows(bcx
,
1699 expr_info(binop_expr
),
1705 SDiv(bcx
, lhs
, rhs
, binop_debug_loc
)
1707 UDiv(bcx
, lhs
, rhs
, binop_debug_loc
)
1713 FRem(bcx
, lhs
, rhs
, binop_debug_loc
)
1715 // Only zero-check integers; fp %0 is NaN
1716 bcx
= base
::fail_if_zero_or_overflows(bcx
,
1717 expr_info(binop_expr
),
1718 op
, lhs
, rhs
, rhs_t
);
1720 SRem(bcx
, lhs
, rhs
, binop_debug_loc
)
1722 URem(bcx
, lhs
, rhs
, binop_debug_loc
)
1726 ast
::BiBitOr
=> Or(bcx
, lhs
, rhs
, binop_debug_loc
),
1727 ast
::BiBitAnd
=> And(bcx
, lhs
, rhs
, binop_debug_loc
),
1728 ast
::BiBitXor
=> Xor(bcx
, lhs
, rhs
, binop_debug_loc
),
1730 let (newbcx
, res
) = with_overflow_check(
1731 bcx
, OverflowOp
::Shl
, info
, lhs_t
, lhs
, rhs
, binop_debug_loc
);
1736 let (newbcx
, res
) = with_overflow_check(
1737 bcx
, OverflowOp
::Shr
, info
, lhs_t
, lhs
, rhs
, binop_debug_loc
);
1741 ast
::BiEq
| ast
::BiNe
| ast
::BiLt
| ast
::BiGe
| ast
::BiLe
| ast
::BiGt
=> {
1743 base
::compare_simd_types(bcx
, lhs
, rhs
, intype
, op
.node
, binop_debug_loc
)
1745 base
::compare_scalar_types(bcx
, lhs
, rhs
, intype
, op
.node
, binop_debug_loc
)
1749 bcx
.tcx().sess
.span_bug(binop_expr
.span
, "unexpected binop");
1753 immediate_rvalue_bcx(bcx
, val
, binop_ty
).to_expr_datumblock()
1756 // refinement types would obviate the need for this
1757 enum lazy_binop_ty
{
1762 fn trans_lazy_binop
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1763 binop_expr
: &ast
::Expr
,
1767 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
1768 let _icx
= push_ctxt("trans_lazy_binop");
1769 let binop_ty
= expr_ty(bcx
, binop_expr
);
1772 let DatumBlock {bcx: past_lhs, datum: lhs}
= trans(bcx
, a
);
1773 let lhs
= lhs
.to_llscalarish(past_lhs
);
1775 if past_lhs
.unreachable
.get() {
1776 return immediate_rvalue_bcx(past_lhs
, lhs
, binop_ty
).to_expr_datumblock();
1779 let join
= fcx
.new_id_block("join", binop_expr
.id
);
1780 let before_rhs
= fcx
.new_id_block("before_rhs", b
.id
);
1783 lazy_and
=> CondBr(past_lhs
, lhs
, before_rhs
.llbb
, join
.llbb
, DebugLoc
::None
),
1784 lazy_or
=> CondBr(past_lhs
, lhs
, join
.llbb
, before_rhs
.llbb
, DebugLoc
::None
)
1787 let DatumBlock {bcx: past_rhs, datum: rhs}
= trans(before_rhs
, b
);
1788 let rhs
= rhs
.to_llscalarish(past_rhs
);
1790 if past_rhs
.unreachable
.get() {
1791 return immediate_rvalue_bcx(join
, lhs
, binop_ty
).to_expr_datumblock();
1794 Br(past_rhs
, join
.llbb
, DebugLoc
::None
);
1795 let phi
= Phi(join
, Type
::i1(bcx
.ccx()), &[lhs
, rhs
],
1796 &[past_lhs
.llbb
, past_rhs
.llbb
]);
1798 return immediate_rvalue_bcx(join
, phi
, binop_ty
).to_expr_datumblock();
1801 fn trans_binary
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1806 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
1807 let _icx
= push_ctxt("trans_binary");
1808 let ccx
= bcx
.ccx();
1810 // if overloaded, would be RvalueDpsExpr
1811 assert
!(!ccx
.tcx().method_map
.borrow().contains_key(&MethodCall
::expr(expr
.id
)));
1815 trans_lazy_binop(bcx
, expr
, lazy_and
, lhs
, rhs
)
1818 trans_lazy_binop(bcx
, expr
, lazy_or
, lhs
, rhs
)
1822 let lhs_datum
= unpack_datum
!(bcx
, trans(bcx
, lhs
));
1823 let rhs_datum
= unpack_datum
!(bcx
, trans(bcx
, rhs
));
1824 let binop_ty
= expr_ty(bcx
, expr
);
1826 debug
!("trans_binary (expr {}): lhs_datum={}",
1828 lhs_datum
.to_string(ccx
));
1829 let lhs_ty
= lhs_datum
.ty
;
1830 let lhs
= lhs_datum
.to_llscalarish(bcx
);
1832 debug
!("trans_binary (expr {}): rhs_datum={}",
1834 rhs_datum
.to_string(ccx
));
1835 let rhs_ty
= rhs_datum
.ty
;
1836 let rhs
= rhs_datum
.to_llscalarish(bcx
);
1837 trans_eager_binop(bcx
, expr
, binop_ty
, op
,
1838 lhs_ty
, lhs
, rhs_ty
, rhs
)
1843 fn trans_overloaded_op
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1845 method_call
: MethodCall
,
1846 lhs
: Datum
<'tcx
, Expr
>,
1847 rhs
: Vec
<(Datum
<'tcx
, Expr
>, ast
::NodeId
)>,
1850 -> Result
<'blk
, 'tcx
> {
1851 let method_ty
= bcx
.tcx().method_map
.borrow().get(&method_call
).unwrap().ty
;
1852 callee
::trans_call_inner(bcx
,
1854 monomorphize_type(bcx
, method_ty
),
1855 |bcx
, arg_cleanup_scope
| {
1856 meth
::trans_method_callee(bcx
,
1861 callee
::ArgOverloadedOp(lhs
, rhs
, autoref
),
1865 fn trans_overloaded_call
<'a
, 'blk
, 'tcx
>(mut bcx
: Block
<'blk
, 'tcx
>,
1867 callee
: &'a ast
::Expr
,
1868 args
: &'a
[P
<ast
::Expr
>],
1870 -> Block
<'blk
, 'tcx
> {
1871 let method_call
= MethodCall
::expr(expr
.id
);
1872 let method_type
= bcx
.tcx()
1878 let mut all_args
= vec
!(callee
);
1879 all_args
.extend(args
.iter().map(|e
| &**e
));
1881 callee
::trans_call_inner(bcx
,
1883 monomorphize_type(bcx
,
1885 |bcx
, arg_cleanup_scope
| {
1886 meth
::trans_method_callee(
1892 callee
::ArgOverloadedCall(all_args
),
1897 fn int_cast(bcx
: Block
,
1903 let _icx
= push_ctxt("int_cast");
1904 let srcsz
= llsrctype
.int_width();
1905 let dstsz
= lldsttype
.int_width();
1906 return if dstsz
== srcsz
{
1907 BitCast(bcx
, llsrc
, lldsttype
)
1908 } else if srcsz
> dstsz
{
1909 TruncOrBitCast(bcx
, llsrc
, lldsttype
)
1911 SExtOrBitCast(bcx
, llsrc
, lldsttype
)
1913 ZExtOrBitCast(bcx
, llsrc
, lldsttype
)
1917 fn float_cast(bcx
: Block
,
1922 let _icx
= push_ctxt("float_cast");
1923 let srcsz
= llsrctype
.float_width();
1924 let dstsz
= lldsttype
.float_width();
1925 return if dstsz
> srcsz
{
1926 FPExt(bcx
, llsrc
, lldsttype
)
1927 } else if srcsz
> dstsz
{
1928 FPTrunc(bcx
, llsrc
, lldsttype
)
1932 #[derive(Copy, Clone, PartialEq, Debug)]
1933 pub enum cast_kind
{
1941 pub fn cast_type_kind
<'tcx
>(tcx
: &ty
::ctxt
<'tcx
>, t
: Ty
<'tcx
>) -> cast_kind
{
1943 ty
::ty_char
=> cast_integral
,
1944 ty
::ty_float(..) => cast_float
,
1945 ty
::ty_rptr(_
, mt
) | ty
::ty_ptr(mt
) => {
1946 if type_is_sized(tcx
, mt
.ty
) {
1952 ty
::ty_bare_fn(..) => cast_pointer
,
1953 ty
::ty_int(..) => cast_integral
,
1954 ty
::ty_uint(..) => cast_integral
,
1955 ty
::ty_bool
=> cast_integral
,
1956 ty
::ty_enum(..) => cast_enum
,
1961 pub fn cast_is_noop
<'tcx
>(t_in
: Ty
<'tcx
>, t_out
: Ty
<'tcx
>) -> bool
{
1962 match (ty
::deref(t_in
, true), ty
::deref(t_out
, true)) {
1963 (Some(ty
::mt{ ty: t_in, .. }
), Some(ty
::mt{ ty: t_out, .. }
)) => {
1970 fn trans_imm_cast
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
1973 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
1974 let _icx
= push_ctxt("trans_cast");
1976 let ccx
= bcx
.ccx();
1978 let t_in
= expr_ty_adjusted(bcx
, expr
);
1979 let t_out
= node_id_type(bcx
, id
);
1980 let k_in
= cast_type_kind(bcx
.tcx(), t_in
);
1981 let k_out
= cast_type_kind(bcx
.tcx(), t_out
);
1982 let s_in
= k_in
== cast_integral
&& ty
::type_is_signed(t_in
);
1983 let ll_t_in
= type_of
::arg_type_of(ccx
, t_in
);
1984 let ll_t_out
= type_of
::arg_type_of(ccx
, t_out
);
1986 // Convert the value to be cast into a ValueRef, either by-ref or
1987 // by-value as appropriate given its type:
1988 let mut datum
= unpack_datum
!(bcx
, trans(bcx
, expr
));
1990 let datum_ty
= monomorphize_type(bcx
, datum
.ty
);
1991 if cast_is_noop(datum_ty
, t_out
) {
1993 return DatumBlock
::new(bcx
, datum
);
1996 let newval
= match (k_in
, k_out
) {
1997 (cast_integral
, cast_integral
) => {
1998 let llexpr
= datum
.to_llscalarish(bcx
);
1999 int_cast(bcx
, ll_t_out
, ll_t_in
, llexpr
, s_in
)
2001 (cast_float
, cast_float
) => {
2002 let llexpr
= datum
.to_llscalarish(bcx
);
2003 float_cast(bcx
, ll_t_out
, ll_t_in
, llexpr
)
2005 (cast_integral
, cast_float
) => {
2006 let llexpr
= datum
.to_llscalarish(bcx
);
2008 SIToFP(bcx
, llexpr
, ll_t_out
)
2009 } else { UIToFP(bcx, llexpr, ll_t_out) }
2011 (cast_float
, cast_integral
) => {
2012 let llexpr
= datum
.to_llscalarish(bcx
);
2013 if ty
::type_is_signed(t_out
) {
2014 FPToSI(bcx
, llexpr
, ll_t_out
)
2015 } else { FPToUI(bcx, llexpr, ll_t_out) }
2017 (cast_integral
, cast_pointer
) => {
2018 let llexpr
= datum
.to_llscalarish(bcx
);
2019 IntToPtr(bcx
, llexpr
, ll_t_out
)
2021 (cast_pointer
, cast_integral
) => {
2022 let llexpr
= datum
.to_llscalarish(bcx
);
2023 PtrToInt(bcx
, llexpr
, ll_t_out
)
2025 (cast_pointer
, cast_pointer
) => {
2026 let llexpr
= datum
.to_llscalarish(bcx
);
2027 PointerCast(bcx
, llexpr
, ll_t_out
)
2029 (cast_enum
, cast_integral
) |
2030 (cast_enum
, cast_float
) => {
2032 let repr
= adt
::represent_type(ccx
, t_in
);
2033 let datum
= unpack_datum
!(
2034 bcx
, datum
.to_lvalue_datum(bcx
, "trans_imm_cast", expr
.id
));
2035 let llexpr_ptr
= datum
.to_llref();
2037 adt
::trans_get_discr(bcx
, &*repr
, llexpr_ptr
, Some(Type
::i64(ccx
)));
2039 cast_integral
=> int_cast(bcx
, ll_t_out
,
2040 val_ty(lldiscrim_a
),
2042 cast_float
=> SIToFP(bcx
, lldiscrim_a
, ll_t_out
),
2044 ccx
.sess().bug(&format
!("translating unsupported cast: \
2045 {} ({:?}) -> {} ({:?})",
2046 t_in
.repr(bcx
.tcx()),
2048 t_out
.repr(bcx
.tcx()),
2053 _
=> ccx
.sess().bug(&format
!("translating unsupported cast: \
2054 {} ({:?}) -> {} ({:?})",
2055 t_in
.repr(bcx
.tcx()),
2057 t_out
.repr(bcx
.tcx()),
2060 return immediate_rvalue_bcx(bcx
, newval
, t_out
).to_expr_datumblock();
2063 fn trans_assign_op
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
2068 -> Block
<'blk
, 'tcx
> {
2069 let _icx
= push_ctxt("trans_assign_op");
2072 debug
!("trans_assign_op(expr={})", bcx
.expr_to_string(expr
));
2074 // User-defined operator methods cannot be used with `+=` etc right now
2075 assert
!(!bcx
.tcx().method_map
.borrow().contains_key(&MethodCall
::expr(expr
.id
)));
2077 // Evaluate LHS (destination), which should be an lvalue
2078 let dst_datum
= unpack_datum
!(bcx
, trans_to_lvalue(bcx
, dst
, "assign_op"));
2079 assert
!(!bcx
.fcx
.type_needs_drop(dst_datum
.ty
));
2080 let dst_ty
= dst_datum
.ty
;
2081 let dst
= load_ty(bcx
, dst_datum
.val
, dst_datum
.ty
);
2084 let rhs_datum
= unpack_datum
!(bcx
, trans(bcx
, &*src
));
2085 let rhs_ty
= rhs_datum
.ty
;
2086 let rhs
= rhs_datum
.to_llscalarish(bcx
);
2088 // Perform computation and store the result
2089 let result_datum
= unpack_datum
!(
2090 bcx
, trans_eager_binop(bcx
, expr
, dst_datum
.ty
, op
,
2091 dst_ty
, dst
, rhs_ty
, rhs
));
2092 return result_datum
.store_to(bcx
, dst_datum
.val
);
2095 fn auto_ref
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
2096 datum
: Datum
<'tcx
, Expr
>,
2098 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
2101 // Ensure cleanup of `datum` if not already scheduled and obtain
2102 // a "by ref" pointer.
2103 let lv_datum
= unpack_datum
!(bcx
, datum
.to_lvalue_datum(bcx
, "autoref", expr
.id
));
2105 // Compute final type. Note that we are loose with the region and
2106 // mutability, since those things don't matter in trans.
2107 let referent_ty
= lv_datum
.ty
;
2108 let ptr_ty
= ty
::mk_imm_rptr(bcx
.tcx(), bcx
.tcx().mk_region(ty
::ReStatic
), referent_ty
);
2111 let llref
= lv_datum
.to_llref();
2113 // Construct the resulting datum, using what was the "by ref"
2114 // ValueRef of type `referent_ty` to be the "by value" ValueRef
2115 // of type `&referent_ty`.
2116 DatumBlock
::new(bcx
, Datum
::new(llref
, ptr_ty
, RvalueExpr(Rvalue
::new(ByValue
))))
2119 fn deref_multiple
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
2121 datum
: Datum
<'tcx
, Expr
>,
2123 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
2125 let mut datum
= datum
;
2127 let method_call
= MethodCall
::autoderef(expr
.id
, i
as u32);
2128 datum
= unpack_datum
!(bcx
, deref_once(bcx
, expr
, datum
, method_call
));
2130 DatumBlock { bcx: bcx, datum: datum }
2133 fn deref_once
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
2135 datum
: Datum
<'tcx
, Expr
>,
2136 method_call
: MethodCall
)
2137 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
2138 let ccx
= bcx
.ccx();
2140 debug
!("deref_once(expr={}, datum={}, method_call={:?})",
2141 expr
.repr(bcx
.tcx()),
2142 datum
.to_string(ccx
),
2147 // Check for overloaded deref.
2148 let method_ty
= ccx
.tcx().method_map
.borrow()
2149 .get(&method_call
).map(|method
| method
.ty
);
2150 let datum
= match method_ty
{
2151 Some(method_ty
) => {
2152 let method_ty
= monomorphize_type(bcx
, method_ty
);
2154 // Overloaded. Evaluate `trans_overloaded_op`, which will
2155 // invoke the user's deref() method, which basically
2156 // converts from the `Smaht<T>` pointer that we have into
2157 // a `&T` pointer. We can then proceed down the normal
2158 // path (below) to dereference that `&T`.
2159 let datum
= if method_call
.autoderef
== 0 {
2162 // Always perform an AutoPtr when applying an overloaded auto-deref
2163 unpack_datum
!(bcx
, auto_ref(bcx
, datum
, expr
))
2166 let ref_ty
= // invoked methods have their LB regions instantiated
2167 ty
::no_late_bound_regions(
2168 ccx
.tcx(), &ty
::ty_fn_ret(method_ty
)).unwrap().unwrap();
2169 let scratch
= rvalue_scratch_datum(bcx
, ref_ty
, "overloaded_deref");
2171 unpack_result
!(bcx
, trans_overloaded_op(bcx
, expr
, method_call
,
2172 datum
, Vec
::new(), Some(SaveIn(scratch
.val
)),
2174 scratch
.to_expr_datum()
2177 // Not overloaded. We already have a pointer we know how to deref.
2182 let r
= match datum
.ty
.sty
{
2183 ty
::ty_uniq(content_ty
) => {
2184 if type_is_sized(bcx
.tcx(), content_ty
) {
2185 deref_owned_pointer(bcx
, expr
, datum
, content_ty
)
2187 // A fat pointer and a DST lvalue have the same representation
2188 // just different types. Since there is no temporary for `*e`
2189 // here (because it is unsized), we cannot emulate the sized
2190 // object code path for running drop glue and free. Instead,
2191 // we schedule cleanup for `e`, turning it into an lvalue.
2192 let datum
= unpack_datum
!(
2193 bcx
, datum
.to_lvalue_datum(bcx
, "deref", expr
.id
));
2195 let datum
= Datum
::new(datum
.val
, content_ty
, LvalueExpr
);
2196 DatumBlock
::new(bcx
, datum
)
2200 ty
::ty_ptr(ty
::mt { ty: content_ty, .. }
) |
2201 ty
::ty_rptr(_
, ty
::mt { ty: content_ty, .. }
) => {
2202 if type_is_sized(bcx
.tcx(), content_ty
) {
2203 let ptr
= datum
.to_llscalarish(bcx
);
2205 // Always generate an lvalue datum, even if datum.mode is
2206 // an rvalue. This is because datum.mode is only an
2207 // rvalue for non-owning pointers like &T or *T, in which
2208 // case cleanup *is* scheduled elsewhere, by the true
2209 // owner (or, in the case of *T, by the user).
2210 DatumBlock
::new(bcx
, Datum
::new(ptr
, content_ty
, LvalueExpr
))
2212 // A fat pointer and a DST lvalue have the same representation
2213 // just different types.
2214 DatumBlock
::new(bcx
, Datum
::new(datum
.val
, content_ty
, LvalueExpr
))
2219 bcx
.tcx().sess
.span_bug(
2221 &format
!("deref invoked on expr of illegal type {}",
2222 datum
.ty
.repr(bcx
.tcx())));
2226 debug
!("deref_once(expr={}, method_call={:?}, result={})",
2227 expr
.id
, method_call
, r
.datum
.to_string(ccx
));
2231 /// We microoptimize derefs of owned pointers a bit here. Basically, the idea is to make the
2232 /// deref of an rvalue result in an rvalue. This helps to avoid intermediate stack slots in the
2233 /// resulting LLVM. The idea here is that, if the `Box<T>` pointer is an rvalue, then we can
2234 /// schedule a *shallow* free of the `Box<T>` pointer, and then return a ByRef rvalue into the
2235 /// pointer. Because the free is shallow, it is legit to return an rvalue, because we know that
2236 /// the contents are not yet scheduled to be freed. The language rules ensure that the contents
2237 /// will be used (or moved) before the free occurs.
2238 fn deref_owned_pointer
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
2240 datum
: Datum
<'tcx
, Expr
>,
2241 content_ty
: Ty
<'tcx
>)
2242 -> DatumBlock
<'blk
, 'tcx
, Expr
> {
2244 RvalueExpr(Rvalue { mode: ByRef }
) => {
2245 let scope
= cleanup
::temporary_scope(bcx
.tcx(), expr
.id
);
2246 let ptr
= Load(bcx
, datum
.val
);
2247 if !type_is_zero_size(bcx
.ccx(), content_ty
) {
2248 bcx
.fcx
.schedule_free_value(scope
, ptr
, cleanup
::HeapExchange
, content_ty
);
2251 RvalueExpr(Rvalue { mode: ByValue }
) => {
2252 let scope
= cleanup
::temporary_scope(bcx
.tcx(), expr
.id
);
2253 if !type_is_zero_size(bcx
.ccx(), content_ty
) {
2254 bcx
.fcx
.schedule_free_value(scope
, datum
.val
, cleanup
::HeapExchange
,
2261 // If we had an rvalue in, we produce an rvalue out.
2262 let (llptr
, kind
) = match datum
.kind
{
2264 (Load(bcx
, datum
.val
), LvalueExpr
)
2266 RvalueExpr(Rvalue { mode: ByRef }
) => {
2267 (Load(bcx
, datum
.val
), RvalueExpr(Rvalue
::new(ByRef
)))
2269 RvalueExpr(Rvalue { mode: ByValue }
) => {
2270 (datum
.val
, RvalueExpr(Rvalue
::new(ByRef
)))
2274 let datum
= Datum { ty: content_ty, val: llptr, kind: kind }
;
2275 DatumBlock { bcx: bcx, datum: datum }
2289 fn codegen_strategy(&self) -> OverflowCodegen
{
2290 use self::OverflowCodegen
::{ViaIntrinsic, ViaInputCheck}
;
2292 OverflowOp
::Add
=> ViaIntrinsic(OverflowOpViaIntrinsic
::Add
),
2293 OverflowOp
::Sub
=> ViaIntrinsic(OverflowOpViaIntrinsic
::Sub
),
2294 OverflowOp
::Mul
=> ViaIntrinsic(OverflowOpViaIntrinsic
::Mul
),
2296 OverflowOp
::Shl
=> ViaInputCheck(OverflowOpViaInputCheck
::Shl
),
2297 OverflowOp
::Shr
=> ViaInputCheck(OverflowOpViaInputCheck
::Shr
),
2302 enum OverflowCodegen
{
2303 ViaIntrinsic(OverflowOpViaIntrinsic
),
2304 ViaInputCheck(OverflowOpViaInputCheck
),
2307 enum OverflowOpViaInputCheck { Shl, Shr, }
2310 enum OverflowOpViaIntrinsic { Add, Sub, Mul, }
2312 impl OverflowOpViaIntrinsic
{
2313 fn to_intrinsic
<'blk
, 'tcx
>(&self, bcx
: Block
<'blk
, 'tcx
>, lhs_ty
: Ty
) -> ValueRef
{
2314 let name
= self.to_intrinsic_name(bcx
.tcx(), lhs_ty
);
2315 bcx
.ccx().get_intrinsic(&name
)
2317 fn to_intrinsic_name(&self, tcx
: &ty
::ctxt
, ty
: Ty
) -> &'
static str {
2318 use syntax
::ast
::IntTy
::*;
2319 use syntax
::ast
::UintTy
::*;
2320 use middle
::ty
::{ty_int, ty_uint}
;
2322 let new_sty
= match ty
.sty
{
2323 ty_int(TyIs
) => match &tcx
.sess
.target
.target
.target_pointer_width
[..] {
2324 "32" => ty_int(TyI32
),
2325 "64" => ty_int(TyI64
),
2326 _
=> panic
!("unsupported target word size")
2328 ty_uint(TyUs
) => match &tcx
.sess
.target
.target
.target_pointer_width
[..] {
2329 "32" => ty_uint(TyU32
),
2330 "64" => ty_uint(TyU64
),
2331 _
=> panic
!("unsupported target word size")
2333 ref t @
ty_uint(_
) | ref t @
ty_int(_
) => t
.clone(),
2334 _
=> panic
!("tried to get overflow intrinsic for {:?} applied to non-int type",
2339 OverflowOpViaIntrinsic
::Add
=> match new_sty
{
2340 ty_int(TyI8
) => "llvm.sadd.with.overflow.i8",
2341 ty_int(TyI16
) => "llvm.sadd.with.overflow.i16",
2342 ty_int(TyI32
) => "llvm.sadd.with.overflow.i32",
2343 ty_int(TyI64
) => "llvm.sadd.with.overflow.i64",
2345 ty_uint(TyU8
) => "llvm.uadd.with.overflow.i8",
2346 ty_uint(TyU16
) => "llvm.uadd.with.overflow.i16",
2347 ty_uint(TyU32
) => "llvm.uadd.with.overflow.i32",
2348 ty_uint(TyU64
) => "llvm.uadd.with.overflow.i64",
2350 _
=> unreachable
!(),
2352 OverflowOpViaIntrinsic
::Sub
=> match new_sty
{
2353 ty_int(TyI8
) => "llvm.ssub.with.overflow.i8",
2354 ty_int(TyI16
) => "llvm.ssub.with.overflow.i16",
2355 ty_int(TyI32
) => "llvm.ssub.with.overflow.i32",
2356 ty_int(TyI64
) => "llvm.ssub.with.overflow.i64",
2358 ty_uint(TyU8
) => "llvm.usub.with.overflow.i8",
2359 ty_uint(TyU16
) => "llvm.usub.with.overflow.i16",
2360 ty_uint(TyU32
) => "llvm.usub.with.overflow.i32",
2361 ty_uint(TyU64
) => "llvm.usub.with.overflow.i64",
2363 _
=> unreachable
!(),
2365 OverflowOpViaIntrinsic
::Mul
=> match new_sty
{
2366 ty_int(TyI8
) => "llvm.smul.with.overflow.i8",
2367 ty_int(TyI16
) => "llvm.smul.with.overflow.i16",
2368 ty_int(TyI32
) => "llvm.smul.with.overflow.i32",
2369 ty_int(TyI64
) => "llvm.smul.with.overflow.i64",
2371 ty_uint(TyU8
) => "llvm.umul.with.overflow.i8",
2372 ty_uint(TyU16
) => "llvm.umul.with.overflow.i16",
2373 ty_uint(TyU32
) => "llvm.umul.with.overflow.i32",
2374 ty_uint(TyU64
) => "llvm.umul.with.overflow.i64",
2376 _
=> unreachable
!(),
2381 fn build_intrinsic_call
<'blk
, 'tcx
>(&self, bcx
: Block
<'blk
, 'tcx
>,
2382 info
: NodeIdAndSpan
,
2383 lhs_t
: Ty
<'tcx
>, lhs
: ValueRef
,
2385 binop_debug_loc
: DebugLoc
)
2386 -> (Block
<'blk
, 'tcx
>, ValueRef
) {
2387 let llfn
= self.to_intrinsic(bcx
, lhs_t
);
2389 let val
= Call(bcx
, llfn
, &[lhs
, rhs
], None
, binop_debug_loc
);
2390 let result
= ExtractValue(bcx
, val
, 0); // iN operation result
2391 let overflow
= ExtractValue(bcx
, val
, 1); // i1 "did it overflow?"
2393 let cond
= ICmp(bcx
, llvm
::IntEQ
, overflow
, C_integral(Type
::i1(bcx
.ccx()), 1, false),
2396 let expect
= bcx
.ccx().get_intrinsic(&"llvm.expect.i1");
2397 Call(bcx
, expect
, &[cond
, C_integral(Type
::i1(bcx
.ccx()), 0, false)],
2398 None
, binop_debug_loc
);
2401 base
::with_cond(bcx
, cond
, |bcx
|
2402 controlflow
::trans_fail(bcx
, info
,
2403 InternedString
::new("arithmetic operation overflowed")));
2409 impl OverflowOpViaInputCheck
{
2410 fn build_with_input_check
<'blk
, 'tcx
>(&self,
2411 bcx
: Block
<'blk
, 'tcx
>,
2412 info
: NodeIdAndSpan
,
2416 binop_debug_loc
: DebugLoc
)
2417 -> (Block
<'blk
, 'tcx
>, ValueRef
)
2419 let lhs_llty
= val_ty(lhs
);
2420 let rhs_llty
= val_ty(rhs
);
2422 // Panic if any bits are set outside of bits that we always
2425 // Note that the mask's value is derived from the LHS type
2426 // (since that is where the 32/64 distinction is relevant) but
2427 // the mask's type must match the RHS type (since they will
2428 // both be fed into a and-binop)
2429 let invert_mask
= !shift_mask_val(lhs_llty
);
2430 let invert_mask
= C_integral(rhs_llty
, invert_mask
, true);
2432 let outer_bits
= And(bcx
, rhs
, invert_mask
, binop_debug_loc
);
2433 let cond
= ICmp(bcx
, llvm
::IntNE
, outer_bits
,
2434 C_integral(rhs_llty
, 0, false), binop_debug_loc
);
2435 let result
= match *self {
2436 OverflowOpViaInputCheck
::Shl
=>
2437 build_unchecked_lshift(bcx
, lhs
, rhs
, binop_debug_loc
),
2438 OverflowOpViaInputCheck
::Shr
=>
2439 build_unchecked_rshift(bcx
, lhs_t
, lhs
, rhs
, binop_debug_loc
),
2442 base
::with_cond(bcx
, cond
, |bcx
|
2443 controlflow
::trans_fail(bcx
, info
,
2444 InternedString
::new("shift operation overflowed")));
2450 fn shift_mask_val(llty
: Type
) -> u64 {
2451 // i8/u8 can shift by at most 7, i16/u16 by at most 15, etc.
2452 llty
.int_width() - 1
2455 // To avoid UB from LLVM, these two functions mask RHS with an
2456 // appropriate mask unconditionally (i.e. the fallback behavior for
2457 // all shifts). For 32- and 64-bit types, this matches the semantics
2458 // of Java. (See related discussion on #1877 and #10183.)
2460 fn build_unchecked_lshift
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
2463 binop_debug_loc
: DebugLoc
) -> ValueRef
{
2464 let rhs
= base
::cast_shift_expr_rhs(bcx
, ast
::BinOp_
::BiShl
, lhs
, rhs
);
2465 // #1877, #10183: Ensure that input is always valid
2466 let rhs
= shift_mask_rhs(bcx
, rhs
, binop_debug_loc
);
2467 Shl(bcx
, lhs
, rhs
, binop_debug_loc
)
2470 fn build_unchecked_rshift
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
2474 binop_debug_loc
: DebugLoc
) -> ValueRef
{
2475 let rhs
= base
::cast_shift_expr_rhs(bcx
, ast
::BinOp_
::BiShr
, lhs
, rhs
);
2476 // #1877, #10183: Ensure that input is always valid
2477 let rhs
= shift_mask_rhs(bcx
, rhs
, binop_debug_loc
);
2478 let is_signed
= ty
::type_is_signed(lhs_t
);
2480 AShr(bcx
, lhs
, rhs
, binop_debug_loc
)
2482 LShr(bcx
, lhs
, rhs
, binop_debug_loc
)
2486 fn shift_mask_rhs
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
2488 debug_loc
: DebugLoc
) -> ValueRef
{
2489 let rhs_llty
= val_ty(rhs
);
2490 let mask
= shift_mask_val(rhs_llty
);
2491 And(bcx
, rhs
, C_integral(rhs_llty
, mask
, false), debug_loc
)
2494 fn with_overflow_check
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>, oop
: OverflowOp
, info
: NodeIdAndSpan
,
2495 lhs_t
: Ty
<'tcx
>, lhs
: ValueRef
,
2497 binop_debug_loc
: DebugLoc
)
2498 -> (Block
<'blk
, 'tcx
>, ValueRef
) {
2499 if bcx
.unreachable
.get() { return (bcx, _Undef(lhs)); }
2500 if bcx
.ccx().check_overflow() {
2502 match oop
.codegen_strategy() {
2503 OverflowCodegen
::ViaIntrinsic(oop
) =>
2504 oop
.build_intrinsic_call(bcx
, info
, lhs_t
, lhs
, rhs
, binop_debug_loc
),
2505 OverflowCodegen
::ViaInputCheck(oop
) =>
2506 oop
.build_with_input_check(bcx
, info
, lhs_t
, lhs
, rhs
, binop_debug_loc
),
2509 let res
= match oop
{
2510 OverflowOp
::Add
=> Add(bcx
, lhs
, rhs
, binop_debug_loc
),
2511 OverflowOp
::Sub
=> Sub(bcx
, lhs
, rhs
, binop_debug_loc
),
2512 OverflowOp
::Mul
=> Mul(bcx
, lhs
, rhs
, binop_debug_loc
),
2515 build_unchecked_lshift(bcx
, lhs
, rhs
, binop_debug_loc
),
2517 build_unchecked_rshift(bcx
, lhs_t
, lhs
, rhs
, binop_debug_loc
),