]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/trans/expr.rs
Imported Upstream version 1.0.0~beta.3
[rustc.git] / src / librustc_trans / trans / expr.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! # Translation of Expressions
12 //!
13 //! The expr module handles translation of expressions. The most general
14 //! translation routine is `trans()`, which will translate an expression
15 //! into a datum. `trans_into()` is also available, which will translate
16 //! an expression and write the result directly into memory, sometimes
17 //! avoiding the need for a temporary stack slot. Finally,
18 //! `trans_to_lvalue()` is available if you'd like to ensure that the
19 //! result has cleanup scheduled.
20 //!
21 //! Internally, each of these functions dispatches to various other
22 //! expression functions depending on the kind of expression. We divide
23 //! up expressions into:
24 //!
25 //! - **Datum expressions:** Those that most naturally yield values.
26 //! Examples would be `22`, `box x`, or `a + b` (when not overloaded).
27 //! - **DPS expressions:** Those that most naturally write into a location
28 //! in memory. Examples would be `foo()` or `Point { x: 3, y: 4 }`.
29 //! - **Statement expressions:** That that do not generate a meaningful
30 //! result. Examples would be `while { ... }` or `return 44`.
31 //!
32 //! Public entry points:
33 //!
34 //! - `trans_into(bcx, expr, dest) -> bcx`: evaluates an expression,
35 //! storing the result into `dest`. This is the preferred form, if you
36 //! can manage it.
37 //!
38 //! - `trans(bcx, expr) -> DatumBlock`: evaluates an expression, yielding
39 //! `Datum` with the result. You can then store the datum, inspect
40 //! the value, etc. This may introduce temporaries if the datum is a
41 //! structural type.
42 //!
43 //! - `trans_to_lvalue(bcx, expr, "...") -> DatumBlock`: evaluates an
44 //! expression and ensures that the result has a cleanup associated with it,
45 //! creating a temporary stack slot if necessary.
46 //!
47 //! - `trans_local_var -> Datum`: looks up a local variable or upvar.
48
49 #![allow(non_camel_case_types)]
50
51 pub use self::cast_kind::*;
52 pub use self::Dest::*;
53 use self::lazy_binop_ty::*;
54
55 use back::abi;
56 use llvm::{self, ValueRef};
57 use middle::check_const;
58 use middle::def;
59 use middle::mem_categorization::Typer;
60 use middle::subst::{self, Substs};
61 use trans::{_match, adt, asm, base, callee, closure, consts, controlflow};
62 use trans::base::*;
63 use trans::build::*;
64 use trans::cleanup::{self, CleanupMethods};
65 use trans::common::*;
66 use trans::datum::*;
67 use trans::debuginfo::{self, DebugLoc, ToDebugLoc};
68 use trans::glue;
69 use trans::machine;
70 use trans::meth;
71 use trans::monomorphize;
72 use trans::tvec;
73 use trans::type_of;
74 use middle::ty::{struct_fields, tup_fields};
75 use middle::ty::{AdjustDerefRef, AdjustReifyFnPointer, AdjustUnsafeFnPointer};
76 use middle::ty::{self, Ty};
77 use middle::ty::MethodCall;
78 use util::common::indenter;
79 use util::ppaux::Repr;
80 use trans::machine::{llsize_of, llsize_of_alloc};
81 use trans::type_::Type;
82
83 use syntax::{ast, ast_util, codemap};
84 use syntax::parse::token::InternedString;
85 use syntax::ptr::P;
86 use syntax::parse::token;
87 use std::iter::repeat;
88 use std::mem;
89 use std::rc::Rc;
90
91 // Destinations
92
93 // These are passed around by the code generating functions to track the
94 // destination of a computation's value.
95
96 #[derive(Copy, Clone, PartialEq)]
97 pub enum Dest {
98 SaveIn(ValueRef),
99 Ignore,
100 }
101
102 impl Dest {
103 pub fn to_string(&self, ccx: &CrateContext) -> String {
104 match *self {
105 SaveIn(v) => format!("SaveIn({})", ccx.tn().val_to_string(v)),
106 Ignore => "Ignore".to_string()
107 }
108 }
109 }
110
111 /// This function is equivalent to `trans(bcx, expr).store_to_dest(dest)` but it may generate
112 /// better optimized LLVM code.
113 pub fn trans_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
114 expr: &ast::Expr,
115 dest: Dest)
116 -> Block<'blk, 'tcx> {
117 let mut bcx = bcx;
118
119 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
120
121 if bcx.tcx().adjustments.borrow().contains_key(&expr.id) {
122 // use trans, which may be less efficient but
123 // which will perform the adjustments:
124 let datum = unpack_datum!(bcx, trans(bcx, expr));
125 return datum.store_to_dest(bcx, dest, expr.id);
126 }
127
128 let qualif = *bcx.tcx().const_qualif_map.borrow().get(&expr.id).unwrap();
129 if !qualif.intersects(check_const::NOT_CONST | check_const::NEEDS_DROP) {
130 if !qualif.intersects(check_const::PREFER_IN_PLACE) {
131 if let SaveIn(lldest) = dest {
132 let global = consts::get_const_expr_as_global(bcx.ccx(), expr, qualif,
133 bcx.fcx.param_substs);
134 // Cast pointer to destination, because constants
135 // have different types.
136 let lldest = PointerCast(bcx, lldest, val_ty(global));
137 memcpy_ty(bcx, lldest, global, expr_ty_adjusted(bcx, expr));
138 }
139 // Don't do anything in the Ignore case, consts don't need drop.
140 return bcx;
141 } else {
142 // The only way we're going to see a `const` at this point is if
143 // it prefers in-place instantiation, likely because it contains
144 // `[x; N]` somewhere within.
145 match expr.node {
146 ast::ExprPath(..) => {
147 match bcx.def(expr.id) {
148 def::DefConst(did) => {
149 let const_expr = consts::get_const_expr(bcx.ccx(), did, expr);
150 // Temporarily get cleanup scopes out of the way,
151 // as they require sub-expressions to be contained
152 // inside the current AST scope.
153 // These should record no cleanups anyways, `const`
154 // can't have destructors.
155 let scopes = mem::replace(&mut *bcx.fcx.scopes.borrow_mut(),
156 vec![]);
157 // Lock emitted debug locations to the location of
158 // the constant reference expression.
159 debuginfo::with_source_location_override(bcx.fcx,
160 expr.debug_loc(),
161 || {
162 bcx = trans_into(bcx, const_expr, dest)
163 });
164 let scopes = mem::replace(&mut *bcx.fcx.scopes.borrow_mut(),
165 scopes);
166 assert!(scopes.is_empty());
167 return bcx;
168 }
169 _ => {}
170 }
171 }
172 _ => {}
173 }
174 }
175 }
176
177 debug!("trans_into() expr={}", expr.repr(bcx.tcx()));
178
179 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
180 expr.id,
181 expr.span,
182 false);
183 bcx.fcx.push_ast_cleanup_scope(cleanup_debug_loc);
184
185 let kind = ty::expr_kind(bcx.tcx(), expr);
186 bcx = match kind {
187 ty::LvalueExpr | ty::RvalueDatumExpr => {
188 trans_unadjusted(bcx, expr).store_to_dest(dest, expr.id)
189 }
190 ty::RvalueDpsExpr => {
191 trans_rvalue_dps_unadjusted(bcx, expr, dest)
192 }
193 ty::RvalueStmtExpr => {
194 trans_rvalue_stmt_unadjusted(bcx, expr)
195 }
196 };
197
198 bcx.fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id)
199 }
200
201 /// Translates an expression, returning a datum (and new block) encapsulating the result. When
202 /// possible, it is preferred to use `trans_into`, as that may avoid creating a temporary on the
203 /// stack.
204 pub fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
205 expr: &ast::Expr)
206 -> DatumBlock<'blk, 'tcx, Expr> {
207 debug!("trans(expr={})", bcx.expr_to_string(expr));
208
209 let mut bcx = bcx;
210 let fcx = bcx.fcx;
211 let qualif = *bcx.tcx().const_qualif_map.borrow().get(&expr.id).unwrap();
212 let adjusted_global = !qualif.intersects(check_const::NON_STATIC_BORROWS);
213 let global = if !qualif.intersects(check_const::NOT_CONST | check_const::NEEDS_DROP) {
214 let global = consts::get_const_expr_as_global(bcx.ccx(), expr, qualif,
215 bcx.fcx.param_substs);
216
217 if qualif.intersects(check_const::HAS_STATIC_BORROWS) {
218 // Is borrowed as 'static, must return lvalue.
219
220 // Cast pointer to global, because constants have different types.
221 let const_ty = expr_ty_adjusted(bcx, expr);
222 let llty = type_of::type_of(bcx.ccx(), const_ty);
223 let global = PointerCast(bcx, global, llty.ptr_to());
224 let datum = Datum::new(global, const_ty, Lvalue);
225 return DatumBlock::new(bcx, datum.to_expr_datum());
226 }
227
228 // Otherwise, keep around and perform adjustments, if needed.
229 let const_ty = if adjusted_global {
230 expr_ty_adjusted(bcx, expr)
231 } else {
232 expr_ty(bcx, expr)
233 };
234
235 // This could use a better heuristic.
236 Some(if type_is_immediate(bcx.ccx(), const_ty) {
237 // Cast pointer to global, because constants have different types.
238 let llty = type_of::type_of(bcx.ccx(), const_ty);
239 let global = PointerCast(bcx, global, llty.ptr_to());
240 // Maybe just get the value directly, instead of loading it?
241 immediate_rvalue(load_ty(bcx, global, const_ty), const_ty)
242 } else {
243 let llty = type_of::type_of(bcx.ccx(), const_ty);
244 // HACK(eddyb) get around issues with lifetime intrinsics.
245 let scratch = alloca_no_lifetime(bcx, llty, "const");
246 let lldest = if !ty::type_is_structural(const_ty) {
247 // Cast pointer to slot, because constants have different types.
248 PointerCast(bcx, scratch, val_ty(global))
249 } else {
250 // In this case, memcpy_ty calls llvm.memcpy after casting both
251 // source and destination to i8*, so we don't need any casts.
252 scratch
253 };
254 memcpy_ty(bcx, lldest, global, const_ty);
255 Datum::new(scratch, const_ty, Rvalue::new(ByRef))
256 })
257 } else {
258 None
259 };
260
261 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
262 expr.id,
263 expr.span,
264 false);
265 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
266 let datum = match global {
267 Some(rvalue) => rvalue.to_expr_datum(),
268 None => unpack_datum!(bcx, trans_unadjusted(bcx, expr))
269 };
270 let datum = if adjusted_global {
271 datum // trans::consts already performed adjustments.
272 } else {
273 unpack_datum!(bcx, apply_adjustments(bcx, expr, datum))
274 };
275 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id);
276 return DatumBlock::new(bcx, datum);
277 }
278
279 pub fn get_len(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
280 GEPi(bcx, fat_ptr, &[0, abi::FAT_PTR_EXTRA])
281 }
282
283 pub fn get_dataptr(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
284 GEPi(bcx, fat_ptr, &[0, abi::FAT_PTR_ADDR])
285 }
286
287 pub fn copy_fat_ptr(bcx: Block, src_ptr: ValueRef, dst_ptr: ValueRef) {
288 Store(bcx, Load(bcx, get_dataptr(bcx, src_ptr)), get_dataptr(bcx, dst_ptr));
289 Store(bcx, Load(bcx, get_len(bcx, src_ptr)), get_len(bcx, dst_ptr));
290 }
291
292 /// Retrieve the information we are losing (making dynamic) in an unsizing
293 /// adjustment.
294 ///
295 /// The `old_info` argument is a bit funny. It is intended for use
296 /// in an upcast, where the new vtable for an object will be drived
297 /// from the old one.
298 pub fn unsized_info<'ccx, 'tcx>(ccx: &CrateContext<'ccx, 'tcx>,
299 source: Ty<'tcx>,
300 target: Ty<'tcx>,
301 old_info: Option<ValueRef>,
302 param_substs: &'tcx subst::Substs<'tcx>)
303 -> ValueRef {
304 let (source, target) = ty::struct_lockstep_tails(ccx.tcx(), source, target);
305 match (&source.sty, &target.sty) {
306 (&ty::ty_vec(_, Some(len)), &ty::ty_vec(_, None)) => C_uint(ccx, len),
307 (&ty::ty_trait(_), &ty::ty_trait(_)) => {
308 // For now, upcasts are limited to changes in marker
309 // traits, and hence never actually require an actual
310 // change to the vtable.
311 old_info.expect("unsized_info: missing old info for trait upcast")
312 }
313 (_, &ty::ty_trait(box ty::TyTrait { ref principal, .. })) => {
314 // Note that we preserve binding levels here:
315 let substs = principal.0.substs.with_self_ty(source).erase_regions();
316 let substs = ccx.tcx().mk_substs(substs);
317 let trait_ref = ty::Binder(Rc::new(ty::TraitRef { def_id: principal.def_id(),
318 substs: substs }));
319 consts::ptrcast(meth::get_vtable(ccx, trait_ref, param_substs),
320 Type::vtable_ptr(ccx))
321 }
322 _ => ccx.sess().bug(&format!("unsized_info: invalid unsizing {} -> {}",
323 source.repr(ccx.tcx()),
324 target.repr(ccx.tcx())))
325 }
326 }
327
328 /// Helper for trans that apply adjustments from `expr` to `datum`, which should be the unadjusted
329 /// translation of `expr`.
330 fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
331 expr: &ast::Expr,
332 datum: Datum<'tcx, Expr>)
333 -> DatumBlock<'blk, 'tcx, Expr>
334 {
335 let mut bcx = bcx;
336 let mut datum = datum;
337 let adjustment = match bcx.tcx().adjustments.borrow().get(&expr.id).cloned() {
338 None => {
339 return DatumBlock::new(bcx, datum);
340 }
341 Some(adj) => { adj }
342 };
343 debug!("unadjusted datum for expr {}: {} adjustment={:?}",
344 expr.repr(bcx.tcx()),
345 datum.to_string(bcx.ccx()),
346 adjustment);
347 match adjustment {
348 AdjustReifyFnPointer => {
349 // FIXME(#19925) once fn item types are
350 // zero-sized, we'll need to do something here
351 }
352 AdjustUnsafeFnPointer => {
353 // purely a type-level thing
354 }
355 AdjustDerefRef(ref adj) => {
356 let skip_reborrows = if adj.autoderefs == 1 && adj.autoref.is_some() {
357 // We are a bit paranoid about adjustments and thus might have a re-
358 // borrow here which merely derefs and then refs again (it might have
359 // a different region or mutability, but we don't care here).
360 match datum.ty.sty {
361 // Don't skip a conversion from Box<T> to &T, etc.
362 ty::ty_rptr(..) => {
363 let method_call = MethodCall::autoderef(expr.id, 0);
364 if bcx.tcx().method_map.borrow().contains_key(&method_call) {
365 // Don't skip an overloaded deref.
366 0
367 } else {
368 1
369 }
370 }
371 _ => 0
372 }
373 } else {
374 0
375 };
376
377 if adj.autoderefs > skip_reborrows {
378 // Schedule cleanup.
379 let lval = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "auto_deref", expr.id));
380 datum = unpack_datum!(bcx, deref_multiple(bcx, expr,
381 lval.to_expr_datum(),
382 adj.autoderefs - skip_reborrows));
383 }
384
385 // (You might think there is a more elegant way to do this than a
386 // skip_reborrows bool, but then you remember that the borrow checker exists).
387 if skip_reborrows == 0 && adj.autoref.is_some() {
388 datum = unpack_datum!(bcx, apply_autoref(bcx, expr, datum));
389 }
390
391 if let Some(target) = adj.unsize {
392 datum = unpack_datum!(bcx, unsize_pointer(bcx, datum,
393 bcx.monomorphize(&target)));
394 }
395 }
396 }
397 debug!("after adjustments, datum={}", datum.to_string(bcx.ccx()));
398 return DatumBlock::new(bcx, datum);
399
400 fn apply_autoref<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
401 expr: &ast::Expr,
402 datum: Datum<'tcx, Expr>)
403 -> DatumBlock<'blk, 'tcx, Expr> {
404 let mut bcx = bcx;
405
406 if !type_is_sized(bcx.tcx(), datum.ty) {
407 // Arrange cleanup
408 let lval = unpack_datum!(bcx,
409 datum.to_lvalue_datum(bcx, "ref_fat_ptr", expr.id));
410 ref_fat_ptr(bcx, lval)
411 } else {
412 auto_ref(bcx, datum, expr)
413 }
414 }
415
416 fn unsize_pointer<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
417 datum: Datum<'tcx, Expr>,
418 target: Ty<'tcx>)
419 -> DatumBlock<'blk, 'tcx, Expr> {
420 let mut bcx = bcx;
421 let unsized_ty = ty::deref(target, true)
422 .expect("expr::unsize got non-pointer target type").ty;
423 debug!("unsize_lvalue(unsized_ty={})", unsized_ty.repr(bcx.tcx()));
424
425 // We do not arrange cleanup ourselves; if we already are an
426 // L-value, then cleanup will have already been scheduled (and
427 // the `datum.to_rvalue_datum` call below will emit code to zero
428 // the drop flag when moving out of the L-value). If we are an
429 // R-value, then we do not need to schedule cleanup.
430 let datum = unpack_datum!(bcx, datum.to_rvalue_datum(bcx, "__unsize_ref"));
431
432 let pointee_ty = ty::deref(datum.ty, true)
433 .expect("expr::unsize got non-pointer datum type").ty;
434 let (base, old_info) = if !type_is_sized(bcx.tcx(), pointee_ty) {
435 // Normally, the source is a thin pointer and we are
436 // adding extra info to make a fat pointer. The exception
437 // is when we are upcasting an existing object fat pointer
438 // to use a different vtable. In that case, we want to
439 // load out the original data pointer so we can repackage
440 // it.
441 (Load(bcx, get_dataptr(bcx, datum.val)),
442 Some(Load(bcx, get_len(bcx, datum.val))))
443 } else {
444 (datum.val, None)
445 };
446
447 let info = unsized_info(bcx.ccx(), pointee_ty, unsized_ty,
448 old_info, bcx.fcx.param_substs);
449
450 // Compute the base pointer. This doesn't change the pointer value,
451 // but merely its type.
452 let ptr_ty = type_of::in_memory_type_of(bcx.ccx(), unsized_ty).ptr_to();
453 let base = PointerCast(bcx, base, ptr_ty);
454
455 let llty = type_of::type_of(bcx.ccx(), target);
456 // HACK(eddyb) get around issues with lifetime intrinsics.
457 let scratch = alloca_no_lifetime(bcx, llty, "__fat_ptr");
458 Store(bcx, base, get_dataptr(bcx, scratch));
459 Store(bcx, info, get_len(bcx, scratch));
460
461 DatumBlock::new(bcx, Datum::new(scratch, target, RvalueExpr(Rvalue::new(ByRef))))
462 }
463 }
464
465 /// Translates an expression in "lvalue" mode -- meaning that it returns a reference to the memory
466 /// that the expr represents.
467 ///
468 /// If this expression is an rvalue, this implies introducing a temporary. In other words,
469 /// something like `x().f` is translated into roughly the equivalent of
470 ///
471 /// { tmp = x(); tmp.f }
472 pub fn trans_to_lvalue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
473 expr: &ast::Expr,
474 name: &str)
475 -> DatumBlock<'blk, 'tcx, Lvalue> {
476 let mut bcx = bcx;
477 let datum = unpack_datum!(bcx, trans(bcx, expr));
478 return datum.to_lvalue_datum(bcx, name, expr.id);
479 }
480
481 /// A version of `trans` that ignores adjustments. You almost certainly do not want to call this
482 /// directly.
483 fn trans_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
484 expr: &ast::Expr)
485 -> DatumBlock<'blk, 'tcx, Expr> {
486 let mut bcx = bcx;
487
488 debug!("trans_unadjusted(expr={})", bcx.expr_to_string(expr));
489 let _indenter = indenter();
490
491 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
492
493 return match ty::expr_kind(bcx.tcx(), expr) {
494 ty::LvalueExpr | ty::RvalueDatumExpr => {
495 let datum = unpack_datum!(bcx, {
496 trans_datum_unadjusted(bcx, expr)
497 });
498
499 DatumBlock {bcx: bcx, datum: datum}
500 }
501
502 ty::RvalueStmtExpr => {
503 bcx = trans_rvalue_stmt_unadjusted(bcx, expr);
504 nil(bcx, expr_ty(bcx, expr))
505 }
506
507 ty::RvalueDpsExpr => {
508 let ty = expr_ty(bcx, expr);
509 if type_is_zero_size(bcx.ccx(), ty) {
510 bcx = trans_rvalue_dps_unadjusted(bcx, expr, Ignore);
511 nil(bcx, ty)
512 } else {
513 let scratch = rvalue_scratch_datum(bcx, ty, "");
514 bcx = trans_rvalue_dps_unadjusted(
515 bcx, expr, SaveIn(scratch.val));
516
517 // Note: this is not obviously a good idea. It causes
518 // immediate values to be loaded immediately after a
519 // return from a call or other similar expression,
520 // which in turn leads to alloca's having shorter
521 // lifetimes and hence larger stack frames. However,
522 // in turn it can lead to more register pressure.
523 // Still, in practice it seems to increase
524 // performance, since we have fewer problems with
525 // morestack churn.
526 let scratch = unpack_datum!(
527 bcx, scratch.to_appropriate_datum(bcx));
528
529 DatumBlock::new(bcx, scratch.to_expr_datum())
530 }
531 }
532 };
533
534 fn nil<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ty: Ty<'tcx>)
535 -> DatumBlock<'blk, 'tcx, Expr> {
536 let llval = C_undef(type_of::type_of(bcx.ccx(), ty));
537 let datum = immediate_rvalue(llval, ty);
538 DatumBlock::new(bcx, datum.to_expr_datum())
539 }
540 }
541
542 fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
543 expr: &ast::Expr)
544 -> DatumBlock<'blk, 'tcx, Expr> {
545 let mut bcx = bcx;
546 let fcx = bcx.fcx;
547 let _icx = push_ctxt("trans_datum_unadjusted");
548
549 match expr.node {
550 ast::ExprParen(ref e) => {
551 trans(bcx, &**e)
552 }
553 ast::ExprPath(..) => {
554 trans_def(bcx, expr, bcx.def(expr.id))
555 }
556 ast::ExprField(ref base, ident) => {
557 trans_rec_field(bcx, &**base, ident.node.name)
558 }
559 ast::ExprTupField(ref base, idx) => {
560 trans_rec_tup_field(bcx, &**base, idx.node)
561 }
562 ast::ExprIndex(ref base, ref idx) => {
563 trans_index(bcx, expr, &**base, &**idx, MethodCall::expr(expr.id))
564 }
565 ast::ExprBox(_, ref contents) => {
566 // Special case for `Box<T>`
567 let box_ty = expr_ty(bcx, expr);
568 let contents_ty = expr_ty(bcx, &**contents);
569 match box_ty.sty {
570 ty::ty_uniq(..) => {
571 trans_uniq_expr(bcx, expr, box_ty, &**contents, contents_ty)
572 }
573 _ => bcx.sess().span_bug(expr.span,
574 "expected unique box")
575 }
576
577 }
578 ast::ExprLit(ref lit) => trans_immediate_lit(bcx, expr, &**lit),
579 ast::ExprBinary(op, ref lhs, ref rhs) => {
580 trans_binary(bcx, expr, op, &**lhs, &**rhs)
581 }
582 ast::ExprUnary(op, ref x) => {
583 trans_unary(bcx, expr, op, &**x)
584 }
585 ast::ExprAddrOf(_, ref x) => {
586 match x.node {
587 ast::ExprRepeat(..) | ast::ExprVec(..) => {
588 // Special case for slices.
589 let cleanup_debug_loc =
590 debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
591 x.id,
592 x.span,
593 false);
594 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
595 let datum = unpack_datum!(
596 bcx, tvec::trans_slice_vec(bcx, expr, &**x));
597 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, x.id);
598 DatumBlock::new(bcx, datum)
599 }
600 _ => {
601 trans_addr_of(bcx, expr, &**x)
602 }
603 }
604 }
605 ast::ExprCast(ref val, _) => {
606 // Datum output mode means this is a scalar cast:
607 trans_imm_cast(bcx, &**val, expr.id)
608 }
609 _ => {
610 bcx.tcx().sess.span_bug(
611 expr.span,
612 &format!("trans_rvalue_datum_unadjusted reached \
613 fall-through case: {:?}",
614 expr.node));
615 }
616 }
617 }
618
619 fn trans_field<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
620 base: &ast::Expr,
621 get_idx: F)
622 -> DatumBlock<'blk, 'tcx, Expr> where
623 F: FnOnce(&'blk ty::ctxt<'tcx>, &[ty::field<'tcx>]) -> usize,
624 {
625 let mut bcx = bcx;
626 let _icx = push_ctxt("trans_rec_field");
627
628 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, base, "field"));
629 let bare_ty = base_datum.ty;
630 let repr = adt::represent_type(bcx.ccx(), bare_ty);
631 with_field_tys(bcx.tcx(), bare_ty, None, move |discr, field_tys| {
632 let ix = get_idx(bcx.tcx(), field_tys);
633 let d = base_datum.get_element(
634 bcx,
635 field_tys[ix].mt.ty,
636 |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, ix));
637
638 if type_is_sized(bcx.tcx(), d.ty) {
639 DatumBlock { datum: d.to_expr_datum(), bcx: bcx }
640 } else {
641 let scratch = rvalue_scratch_datum(bcx, d.ty, "");
642 Store(bcx, d.val, get_dataptr(bcx, scratch.val));
643 let info = Load(bcx, get_len(bcx, base_datum.val));
644 Store(bcx, info, get_len(bcx, scratch.val));
645
646 DatumBlock::new(bcx, scratch.to_expr_datum())
647
648 }
649 })
650
651 }
652
653 /// Translates `base.field`.
654 fn trans_rec_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
655 base: &ast::Expr,
656 field: ast::Name)
657 -> DatumBlock<'blk, 'tcx, Expr> {
658 trans_field(bcx, base, |tcx, field_tys| ty::field_idx_strict(tcx, field, field_tys))
659 }
660
661 /// Translates `base.<idx>`.
662 fn trans_rec_tup_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
663 base: &ast::Expr,
664 idx: usize)
665 -> DatumBlock<'blk, 'tcx, Expr> {
666 trans_field(bcx, base, |_, _| idx)
667 }
668
669 fn trans_index<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
670 index_expr: &ast::Expr,
671 base: &ast::Expr,
672 idx: &ast::Expr,
673 method_call: MethodCall)
674 -> DatumBlock<'blk, 'tcx, Expr> {
675 //! Translates `base[idx]`.
676
677 let _icx = push_ctxt("trans_index");
678 let ccx = bcx.ccx();
679 let mut bcx = bcx;
680
681 let index_expr_debug_loc = index_expr.debug_loc();
682
683 // Check for overloaded index.
684 let method_ty = ccx.tcx()
685 .method_map
686 .borrow()
687 .get(&method_call)
688 .map(|method| method.ty);
689 let elt_datum = match method_ty {
690 Some(method_ty) => {
691 let method_ty = monomorphize_type(bcx, method_ty);
692
693 let base_datum = unpack_datum!(bcx, trans(bcx, base));
694
695 // Translate index expression.
696 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
697
698 let ref_ty = // invoked methods have LB regions instantiated:
699 ty::no_late_bound_regions(
700 bcx.tcx(), &ty::ty_fn_ret(method_ty)).unwrap().unwrap();
701 let elt_ty = match ty::deref(ref_ty, true) {
702 None => {
703 bcx.tcx().sess.span_bug(index_expr.span,
704 "index method didn't return a \
705 dereferenceable type?!")
706 }
707 Some(elt_tm) => elt_tm.ty,
708 };
709
710 // Overloaded. Evaluate `trans_overloaded_op`, which will
711 // invoke the user's index() method, which basically yields
712 // a `&T` pointer. We can then proceed down the normal
713 // path (below) to dereference that `&T`.
714 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_index_elt");
715 unpack_result!(bcx,
716 trans_overloaded_op(bcx,
717 index_expr,
718 method_call,
719 base_datum,
720 vec![(ix_datum, idx.id)],
721 Some(SaveIn(scratch.val)),
722 false));
723 let datum = scratch.to_expr_datum();
724 if type_is_sized(bcx.tcx(), elt_ty) {
725 Datum::new(datum.to_llscalarish(bcx), elt_ty, LvalueExpr)
726 } else {
727 Datum::new(datum.val, elt_ty, LvalueExpr)
728 }
729 }
730 None => {
731 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx,
732 base,
733 "index"));
734
735 // Translate index expression and cast to a suitable LLVM integer.
736 // Rust is less strict than LLVM in this regard.
737 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
738 let ix_val = ix_datum.to_llscalarish(bcx);
739 let ix_size = machine::llbitsize_of_real(bcx.ccx(),
740 val_ty(ix_val));
741 let int_size = machine::llbitsize_of_real(bcx.ccx(),
742 ccx.int_type());
743 let ix_val = {
744 if ix_size < int_size {
745 if ty::type_is_signed(expr_ty(bcx, idx)) {
746 SExt(bcx, ix_val, ccx.int_type())
747 } else { ZExt(bcx, ix_val, ccx.int_type()) }
748 } else if ix_size > int_size {
749 Trunc(bcx, ix_val, ccx.int_type())
750 } else {
751 ix_val
752 }
753 };
754
755 let unit_ty = ty::sequence_element_type(bcx.tcx(), base_datum.ty);
756
757 let (base, len) = base_datum.get_vec_base_and_len(bcx);
758
759 debug!("trans_index: base {}", bcx.val_to_string(base));
760 debug!("trans_index: len {}", bcx.val_to_string(len));
761
762 let bounds_check = ICmp(bcx,
763 llvm::IntUGE,
764 ix_val,
765 len,
766 index_expr_debug_loc);
767 let expect = ccx.get_intrinsic(&("llvm.expect.i1"));
768 let expected = Call(bcx,
769 expect,
770 &[bounds_check, C_bool(ccx, false)],
771 None,
772 index_expr_debug_loc);
773 bcx = with_cond(bcx, expected, |bcx| {
774 controlflow::trans_fail_bounds_check(bcx,
775 expr_info(index_expr),
776 ix_val,
777 len)
778 });
779 let elt = InBoundsGEP(bcx, base, &[ix_val]);
780 let elt = PointerCast(bcx, elt, type_of::type_of(ccx, unit_ty).ptr_to());
781 Datum::new(elt, unit_ty, LvalueExpr)
782 }
783 };
784
785 DatumBlock::new(bcx, elt_datum)
786 }
787
788 fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
789 ref_expr: &ast::Expr,
790 def: def::Def)
791 -> DatumBlock<'blk, 'tcx, Expr> {
792 //! Translates a reference to a path.
793
794 let _icx = push_ctxt("trans_def_lvalue");
795 match def {
796 def::DefFn(..) | def::DefMethod(..) |
797 def::DefStruct(_) | def::DefVariant(..) => {
798 let datum = trans_def_fn_unadjusted(bcx.ccx(), ref_expr, def,
799 bcx.fcx.param_substs);
800 DatumBlock::new(bcx, datum.to_expr_datum())
801 }
802 def::DefStatic(did, _) => {
803 // There are two things that may happen here:
804 // 1) If the static item is defined in this crate, it will be
805 // translated using `get_item_val`, and we return a pointer to
806 // the result.
807 // 2) If the static item is defined in another crate then we add
808 // (or reuse) a declaration of an external global, and return a
809 // pointer to that.
810 let const_ty = expr_ty(bcx, ref_expr);
811
812 // For external constants, we don't inline.
813 let val = if did.krate == ast::LOCAL_CRATE {
814 // Case 1.
815
816 // The LLVM global has the type of its initializer,
817 // which may not be equal to the enum's type for
818 // non-C-like enums.
819 let val = base::get_item_val(bcx.ccx(), did.node);
820 let pty = type_of::type_of(bcx.ccx(), const_ty).ptr_to();
821 PointerCast(bcx, val, pty)
822 } else {
823 // Case 2.
824 base::get_extern_const(bcx.ccx(), did, const_ty)
825 };
826 DatumBlock::new(bcx, Datum::new(val, const_ty, LvalueExpr))
827 }
828 def::DefConst(_) => {
829 bcx.sess().span_bug(ref_expr.span,
830 "constant expression should not reach expr::trans_def")
831 }
832 _ => {
833 DatumBlock::new(bcx, trans_local_var(bcx, def).to_expr_datum())
834 }
835 }
836 }
837
838 fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
839 expr: &ast::Expr)
840 -> Block<'blk, 'tcx> {
841 let mut bcx = bcx;
842 let _icx = push_ctxt("trans_rvalue_stmt");
843
844 if bcx.unreachable.get() {
845 return bcx;
846 }
847
848 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
849
850 match expr.node {
851 ast::ExprParen(ref e) => {
852 trans_into(bcx, &**e, Ignore)
853 }
854 ast::ExprBreak(label_opt) => {
855 controlflow::trans_break(bcx, expr, label_opt)
856 }
857 ast::ExprAgain(label_opt) => {
858 controlflow::trans_cont(bcx, expr, label_opt)
859 }
860 ast::ExprRet(ref ex) => {
861 // Check to see if the return expression itself is reachable.
862 // This can occur when the inner expression contains a return
863 let reachable = if let Some(ref cfg) = bcx.fcx.cfg {
864 cfg.node_is_reachable(expr.id)
865 } else {
866 true
867 };
868
869 if reachable {
870 controlflow::trans_ret(bcx, expr, ex.as_ref().map(|e| &**e))
871 } else {
872 // If it's not reachable, just translate the inner expression
873 // directly. This avoids having to manage a return slot when
874 // it won't actually be used anyway.
875 if let &Some(ref x) = ex {
876 bcx = trans_into(bcx, &**x, Ignore);
877 }
878 // Mark the end of the block as unreachable. Once we get to
879 // a return expression, there's no more we should be doing
880 // after this.
881 Unreachable(bcx);
882 bcx
883 }
884 }
885 ast::ExprWhile(ref cond, ref body, _) => {
886 controlflow::trans_while(bcx, expr, &**cond, &**body)
887 }
888 ast::ExprLoop(ref body, _) => {
889 controlflow::trans_loop(bcx, expr, &**body)
890 }
891 ast::ExprAssign(ref dst, ref src) => {
892 let src_datum = unpack_datum!(bcx, trans(bcx, &**src));
893 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &**dst, "assign"));
894
895 if bcx.fcx.type_needs_drop(dst_datum.ty) {
896 // If there are destructors involved, make sure we
897 // are copying from an rvalue, since that cannot possible
898 // alias an lvalue. We are concerned about code like:
899 //
900 // a = a
901 //
902 // but also
903 //
904 // a = a.b
905 //
906 // where e.g. a : Option<Foo> and a.b :
907 // Option<Foo>. In that case, freeing `a` before the
908 // assignment may also free `a.b`!
909 //
910 // We could avoid this intermediary with some analysis
911 // to determine whether `dst` may possibly own `src`.
912 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
913 let src_datum = unpack_datum!(
914 bcx, src_datum.to_rvalue_datum(bcx, "ExprAssign"));
915 bcx = glue::drop_ty(bcx,
916 dst_datum.val,
917 dst_datum.ty,
918 expr.debug_loc());
919 src_datum.store_to(bcx, dst_datum.val)
920 } else {
921 src_datum.store_to(bcx, dst_datum.val)
922 }
923 }
924 ast::ExprAssignOp(op, ref dst, ref src) => {
925 trans_assign_op(bcx, expr, op, &**dst, &**src)
926 }
927 ast::ExprInlineAsm(ref a) => {
928 asm::trans_inline_asm(bcx, a)
929 }
930 _ => {
931 bcx.tcx().sess.span_bug(
932 expr.span,
933 &format!("trans_rvalue_stmt_unadjusted reached \
934 fall-through case: {:?}",
935 expr.node));
936 }
937 }
938 }
939
940 fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
941 expr: &ast::Expr,
942 dest: Dest)
943 -> Block<'blk, 'tcx> {
944 let _icx = push_ctxt("trans_rvalue_dps_unadjusted");
945 let mut bcx = bcx;
946 let tcx = bcx.tcx();
947
948 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
949
950 match expr.node {
951 ast::ExprParen(ref e) => {
952 trans_into(bcx, &**e, dest)
953 }
954 ast::ExprPath(..) => {
955 trans_def_dps_unadjusted(bcx, expr, bcx.def(expr.id), dest)
956 }
957 ast::ExprIf(ref cond, ref thn, ref els) => {
958 controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest)
959 }
960 ast::ExprMatch(ref discr, ref arms, _) => {
961 _match::trans_match(bcx, expr, &**discr, &arms[..], dest)
962 }
963 ast::ExprBlock(ref blk) => {
964 controlflow::trans_block(bcx, &**blk, dest)
965 }
966 ast::ExprStruct(_, ref fields, ref base) => {
967 trans_struct(bcx,
968 &fields[..],
969 base.as_ref().map(|e| &**e),
970 expr.span,
971 expr.id,
972 node_id_type(bcx, expr.id),
973 dest)
974 }
975 ast::ExprRange(ref start, ref end) => {
976 // FIXME it is just not right that we are synthesising ast nodes in
977 // trans. Shudder.
978 fn make_field(field_name: &str, expr: P<ast::Expr>) -> ast::Field {
979 ast::Field {
980 ident: codemap::dummy_spanned(token::str_to_ident(field_name)),
981 expr: expr,
982 span: codemap::DUMMY_SP,
983 }
984 }
985
986 // A range just desugars into a struct.
987 // Note that the type of the start and end may not be the same, but
988 // they should only differ in their lifetime, which should not matter
989 // in trans.
990 let (did, fields, ty_params) = match (start, end) {
991 (&Some(ref start), &Some(ref end)) => {
992 // Desugar to Range
993 let fields = vec![make_field("start", start.clone()),
994 make_field("end", end.clone())];
995 (tcx.lang_items.range_struct(), fields, vec![node_id_type(bcx, start.id)])
996 }
997 (&Some(ref start), &None) => {
998 // Desugar to RangeFrom
999 let fields = vec![make_field("start", start.clone())];
1000 (tcx.lang_items.range_from_struct(), fields, vec![node_id_type(bcx, start.id)])
1001 }
1002 (&None, &Some(ref end)) => {
1003 // Desugar to RangeTo
1004 let fields = vec![make_field("end", end.clone())];
1005 (tcx.lang_items.range_to_struct(), fields, vec![node_id_type(bcx, end.id)])
1006 }
1007 _ => {
1008 // Desugar to RangeFull
1009 (tcx.lang_items.range_full_struct(), vec![], vec![])
1010 }
1011 };
1012
1013 if let Some(did) = did {
1014 let substs = Substs::new_type(ty_params, vec![]);
1015 trans_struct(bcx,
1016 &fields,
1017 None,
1018 expr.span,
1019 expr.id,
1020 ty::mk_struct(tcx, did, tcx.mk_substs(substs)),
1021 dest)
1022 } else {
1023 tcx.sess.span_bug(expr.span,
1024 "No lang item for ranges (how did we get this far?)")
1025 }
1026 }
1027 ast::ExprTup(ref args) => {
1028 let numbered_fields: Vec<(usize, &ast::Expr)> =
1029 args.iter().enumerate().map(|(i, arg)| (i, &**arg)).collect();
1030 trans_adt(bcx,
1031 expr_ty(bcx, expr),
1032 0,
1033 &numbered_fields[..],
1034 None,
1035 dest,
1036 expr.debug_loc())
1037 }
1038 ast::ExprLit(ref lit) => {
1039 match lit.node {
1040 ast::LitStr(ref s, _) => {
1041 tvec::trans_lit_str(bcx, expr, (*s).clone(), dest)
1042 }
1043 _ => {
1044 bcx.tcx()
1045 .sess
1046 .span_bug(expr.span,
1047 "trans_rvalue_dps_unadjusted shouldn't be \
1048 translating this type of literal")
1049 }
1050 }
1051 }
1052 ast::ExprVec(..) | ast::ExprRepeat(..) => {
1053 tvec::trans_fixed_vstore(bcx, expr, dest)
1054 }
1055 ast::ExprClosure(_, ref decl, ref body) => {
1056 let dest = match dest {
1057 SaveIn(lldest) => closure::Dest::SaveIn(bcx, lldest),
1058 Ignore => closure::Dest::Ignore(bcx.ccx())
1059 };
1060 closure::trans_closure_expr(dest, &**decl, &**body, expr.id, bcx.fcx.param_substs)
1061 .unwrap_or(bcx)
1062 }
1063 ast::ExprCall(ref f, ref args) => {
1064 if bcx.tcx().is_method_call(expr.id) {
1065 trans_overloaded_call(bcx,
1066 expr,
1067 &**f,
1068 &args[..],
1069 Some(dest))
1070 } else {
1071 callee::trans_call(bcx,
1072 expr,
1073 &**f,
1074 callee::ArgExprs(&args[..]),
1075 dest)
1076 }
1077 }
1078 ast::ExprMethodCall(_, _, ref args) => {
1079 callee::trans_method_call(bcx,
1080 expr,
1081 &*args[0],
1082 callee::ArgExprs(&args[..]),
1083 dest)
1084 }
1085 ast::ExprBinary(op, ref lhs, ref rhs) => {
1086 // if not overloaded, would be RvalueDatumExpr
1087 let lhs = unpack_datum!(bcx, trans(bcx, &**lhs));
1088 let rhs_datum = unpack_datum!(bcx, trans(bcx, &**rhs));
1089 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), lhs,
1090 vec![(rhs_datum, rhs.id)], Some(dest),
1091 !ast_util::is_by_value_binop(op.node)).bcx
1092 }
1093 ast::ExprUnary(op, ref subexpr) => {
1094 // if not overloaded, would be RvalueDatumExpr
1095 let arg = unpack_datum!(bcx, trans(bcx, &**subexpr));
1096 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id),
1097 arg, Vec::new(), Some(dest), !ast_util::is_by_value_unop(op)).bcx
1098 }
1099 ast::ExprIndex(ref base, ref idx) => {
1100 // if not overloaded, would be RvalueDatumExpr
1101 let base = unpack_datum!(bcx, trans(bcx, &**base));
1102 let idx_datum = unpack_datum!(bcx, trans(bcx, &**idx));
1103 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), base,
1104 vec![(idx_datum, idx.id)], Some(dest), true).bcx
1105 }
1106 ast::ExprCast(..) => {
1107 // Trait casts used to come this way, now they should be coercions.
1108 bcx.tcx().sess.span_bug(expr.span, "DPS expr_cast (residual trait cast?)")
1109 }
1110 ast::ExprAssignOp(op, ref dst, ref src) => {
1111 trans_assign_op(bcx, expr, op, &**dst, &**src)
1112 }
1113 _ => {
1114 bcx.tcx().sess.span_bug(
1115 expr.span,
1116 &format!("trans_rvalue_dps_unadjusted reached fall-through \
1117 case: {:?}",
1118 expr.node));
1119 }
1120 }
1121 }
1122
1123 fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1124 ref_expr: &ast::Expr,
1125 def: def::Def,
1126 dest: Dest)
1127 -> Block<'blk, 'tcx> {
1128 let _icx = push_ctxt("trans_def_dps_unadjusted");
1129
1130 let lldest = match dest {
1131 SaveIn(lldest) => lldest,
1132 Ignore => { return bcx; }
1133 };
1134
1135 match def {
1136 def::DefVariant(tid, vid, _) => {
1137 let variant_info = ty::enum_variant_with_id(bcx.tcx(), tid, vid);
1138 if !variant_info.args.is_empty() {
1139 // N-ary variant.
1140 let llfn = callee::trans_fn_ref(bcx.ccx(), vid,
1141 ExprId(ref_expr.id),
1142 bcx.fcx.param_substs).val;
1143 Store(bcx, llfn, lldest);
1144 return bcx;
1145 } else {
1146 // Nullary variant.
1147 let ty = expr_ty(bcx, ref_expr);
1148 let repr = adt::represent_type(bcx.ccx(), ty);
1149 adt::trans_set_discr(bcx, &*repr, lldest,
1150 variant_info.disr_val);
1151 return bcx;
1152 }
1153 }
1154 def::DefStruct(_) => {
1155 let ty = expr_ty(bcx, ref_expr);
1156 match ty.sty {
1157 ty::ty_struct(did, _) if ty::has_dtor(bcx.tcx(), did) => {
1158 let repr = adt::represent_type(bcx.ccx(), ty);
1159 adt::trans_set_discr(bcx, &*repr, lldest, 0);
1160 }
1161 _ => {}
1162 }
1163 bcx
1164 }
1165 _ => {
1166 bcx.tcx().sess.span_bug(ref_expr.span, &format!(
1167 "Non-DPS def {:?} referened by {}",
1168 def, bcx.node_id_to_string(ref_expr.id)));
1169 }
1170 }
1171 }
1172
1173 pub fn trans_def_fn_unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1174 ref_expr: &ast::Expr,
1175 def: def::Def,
1176 param_substs: &'tcx subst::Substs<'tcx>)
1177 -> Datum<'tcx, Rvalue> {
1178 let _icx = push_ctxt("trans_def_datum_unadjusted");
1179
1180 match def {
1181 def::DefFn(did, _) |
1182 def::DefStruct(did) | def::DefVariant(_, did, _) |
1183 def::DefMethod(did, def::FromImpl(_)) => {
1184 callee::trans_fn_ref(ccx, did, ExprId(ref_expr.id), param_substs)
1185 }
1186 def::DefMethod(impl_did, def::FromTrait(trait_did)) => {
1187 meth::trans_static_method_callee(ccx, impl_did,
1188 trait_did, ref_expr.id,
1189 param_substs)
1190 }
1191 _ => {
1192 ccx.tcx().sess.span_bug(ref_expr.span, &format!(
1193 "trans_def_fn_unadjusted invoked on: {:?} for {}",
1194 def,
1195 ref_expr.repr(ccx.tcx())));
1196 }
1197 }
1198 }
1199
1200 /// Translates a reference to a local variable or argument. This always results in an lvalue datum.
1201 pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1202 def: def::Def)
1203 -> Datum<'tcx, Lvalue> {
1204 let _icx = push_ctxt("trans_local_var");
1205
1206 match def {
1207 def::DefUpvar(nid, _) => {
1208 // Can't move upvars, so this is never a ZeroMemLastUse.
1209 let local_ty = node_id_type(bcx, nid);
1210 match bcx.fcx.llupvars.borrow().get(&nid) {
1211 Some(&val) => Datum::new(val, local_ty, Lvalue),
1212 None => {
1213 bcx.sess().bug(&format!(
1214 "trans_local_var: no llval for upvar {} found",
1215 nid));
1216 }
1217 }
1218 }
1219 def::DefLocal(nid) => {
1220 let datum = match bcx.fcx.lllocals.borrow().get(&nid) {
1221 Some(&v) => v,
1222 None => {
1223 bcx.sess().bug(&format!(
1224 "trans_local_var: no datum for local/arg {} found",
1225 nid));
1226 }
1227 };
1228 debug!("take_local(nid={}, v={}, ty={})",
1229 nid, bcx.val_to_string(datum.val), bcx.ty_to_string(datum.ty));
1230 datum
1231 }
1232 _ => {
1233 bcx.sess().unimpl(&format!(
1234 "unsupported def type in trans_local_var: {:?}",
1235 def));
1236 }
1237 }
1238 }
1239
1240 /// Helper for enumerating the field types of structs, enums, or records. The optional node ID here
1241 /// is the node ID of the path identifying the enum variant in use. If none, this cannot possibly
1242 /// an enum variant (so, if it is and `node_id_opt` is none, this function panics).
1243 pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
1244 ty: Ty<'tcx>,
1245 node_id_opt: Option<ast::NodeId>,
1246 op: F)
1247 -> R where
1248 F: FnOnce(ty::Disr, &[ty::field<'tcx>]) -> R,
1249 {
1250 match ty.sty {
1251 ty::ty_struct(did, substs) => {
1252 let fields = struct_fields(tcx, did, substs);
1253 let fields = monomorphize::normalize_associated_type(tcx, &fields);
1254 op(0, &fields[..])
1255 }
1256
1257 ty::ty_tup(ref v) => {
1258 op(0, &tup_fields(&v[..]))
1259 }
1260
1261 ty::ty_enum(_, substs) => {
1262 // We want the *variant* ID here, not the enum ID.
1263 match node_id_opt {
1264 None => {
1265 tcx.sess.bug(&format!(
1266 "cannot get field types from the enum type {} \
1267 without a node ID",
1268 ty.repr(tcx)));
1269 }
1270 Some(node_id) => {
1271 let def = tcx.def_map.borrow().get(&node_id).unwrap().full_def();
1272 match def {
1273 def::DefVariant(enum_id, variant_id, _) => {
1274 let variant_info = ty::enum_variant_with_id(tcx, enum_id, variant_id);
1275 let fields = struct_fields(tcx, variant_id, substs);
1276 let fields = monomorphize::normalize_associated_type(tcx, &fields);
1277 op(variant_info.disr_val, &fields[..])
1278 }
1279 _ => {
1280 tcx.sess.bug("resolve didn't map this expr to a \
1281 variant ID")
1282 }
1283 }
1284 }
1285 }
1286 }
1287
1288 _ => {
1289 tcx.sess.bug(&format!(
1290 "cannot get field types from the type {}",
1291 ty.repr(tcx)));
1292 }
1293 }
1294 }
1295
1296 fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1297 fields: &[ast::Field],
1298 base: Option<&ast::Expr>,
1299 expr_span: codemap::Span,
1300 expr_id: ast::NodeId,
1301 ty: Ty<'tcx>,
1302 dest: Dest) -> Block<'blk, 'tcx> {
1303 let _icx = push_ctxt("trans_rec");
1304
1305 let tcx = bcx.tcx();
1306 with_field_tys(tcx, ty, Some(expr_id), |discr, field_tys| {
1307 let mut need_base: Vec<bool> = repeat(true).take(field_tys.len()).collect();
1308
1309 let numbered_fields = fields.iter().map(|field| {
1310 let opt_pos =
1311 field_tys.iter().position(|field_ty|
1312 field_ty.name == field.ident.node.name);
1313 let result = match opt_pos {
1314 Some(i) => {
1315 need_base[i] = false;
1316 (i, &*field.expr)
1317 }
1318 None => {
1319 tcx.sess.span_bug(field.span,
1320 "Couldn't find field in struct type")
1321 }
1322 };
1323 result
1324 }).collect::<Vec<_>>();
1325 let optbase = match base {
1326 Some(base_expr) => {
1327 let mut leftovers = Vec::new();
1328 for (i, b) in need_base.iter().enumerate() {
1329 if *b {
1330 leftovers.push((i, field_tys[i].mt.ty));
1331 }
1332 }
1333 Some(StructBaseInfo {expr: base_expr,
1334 fields: leftovers })
1335 }
1336 None => {
1337 if need_base.iter().any(|b| *b) {
1338 tcx.sess.span_bug(expr_span, "missing fields and no base expr")
1339 }
1340 None
1341 }
1342 };
1343
1344 trans_adt(bcx,
1345 ty,
1346 discr,
1347 &numbered_fields,
1348 optbase,
1349 dest,
1350 DebugLoc::At(expr_id, expr_span))
1351 })
1352 }
1353
1354 /// Information that `trans_adt` needs in order to fill in the fields
1355 /// of a struct copied from a base struct (e.g., from an expression
1356 /// like `Foo { a: b, ..base }`.
1357 ///
1358 /// Note that `fields` may be empty; the base expression must always be
1359 /// evaluated for side-effects.
1360 pub struct StructBaseInfo<'a, 'tcx> {
1361 /// The base expression; will be evaluated after all explicit fields.
1362 expr: &'a ast::Expr,
1363 /// The indices of fields to copy paired with their types.
1364 fields: Vec<(usize, Ty<'tcx>)>
1365 }
1366
1367 /// Constructs an ADT instance:
1368 ///
1369 /// - `fields` should be a list of field indices paired with the
1370 /// expression to store into that field. The initializers will be
1371 /// evaluated in the order specified by `fields`.
1372 ///
1373 /// - `optbase` contains information on the base struct (if any) from
1374 /// which remaining fields are copied; see comments on `StructBaseInfo`.
1375 pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1376 ty: Ty<'tcx>,
1377 discr: ty::Disr,
1378 fields: &[(usize, &ast::Expr)],
1379 optbase: Option<StructBaseInfo<'a, 'tcx>>,
1380 dest: Dest,
1381 debug_location: DebugLoc)
1382 -> Block<'blk, 'tcx> {
1383 let _icx = push_ctxt("trans_adt");
1384 let fcx = bcx.fcx;
1385 let repr = adt::represent_type(bcx.ccx(), ty);
1386
1387 debug_location.apply(bcx.fcx);
1388
1389 // If we don't care about the result, just make a
1390 // temporary stack slot
1391 let addr = match dest {
1392 SaveIn(pos) => pos,
1393 Ignore => alloc_ty(bcx, ty, "temp"),
1394 };
1395
1396 // This scope holds intermediates that must be cleaned should
1397 // panic occur before the ADT as a whole is ready.
1398 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1399
1400 if ty::type_is_simd(bcx.tcx(), ty) {
1401 // Issue 23112: The original logic appeared vulnerable to same
1402 // order-of-eval bug. But, SIMD values are tuple-structs;
1403 // i.e. functional record update (FRU) syntax is unavailable.
1404 //
1405 // To be safe, double-check that we did not get here via FRU.
1406 assert!(optbase.is_none());
1407
1408 // This is the constructor of a SIMD type, such types are
1409 // always primitive machine types and so do not have a
1410 // destructor or require any clean-up.
1411 let llty = type_of::type_of(bcx.ccx(), ty);
1412
1413 // keep a vector as a register, and running through the field
1414 // `insertelement`ing them directly into that register
1415 // (i.e. avoid GEPi and `store`s to an alloca) .
1416 let mut vec_val = C_undef(llty);
1417
1418 for &(i, ref e) in fields {
1419 let block_datum = trans(bcx, &**e);
1420 bcx = block_datum.bcx;
1421 let position = C_uint(bcx.ccx(), i);
1422 let value = block_datum.datum.to_llscalarish(bcx);
1423 vec_val = InsertElement(bcx, vec_val, value, position);
1424 }
1425 Store(bcx, vec_val, addr);
1426 } else if let Some(base) = optbase {
1427 // Issue 23112: If there is a base, then order-of-eval
1428 // requires field expressions eval'ed before base expression.
1429
1430 // First, trans field expressions to temporary scratch values.
1431 let scratch_vals: Vec<_> = fields.iter().map(|&(i, ref e)| {
1432 let datum = unpack_datum!(bcx, trans(bcx, &**e));
1433 (i, datum)
1434 }).collect();
1435
1436 debug_location.apply(bcx.fcx);
1437
1438 // Second, trans the base to the dest.
1439 assert_eq!(discr, 0);
1440
1441 match ty::expr_kind(bcx.tcx(), &*base.expr) {
1442 ty::RvalueDpsExpr | ty::RvalueDatumExpr if !bcx.fcx.type_needs_drop(ty) => {
1443 bcx = trans_into(bcx, &*base.expr, SaveIn(addr));
1444 },
1445 ty::RvalueStmtExpr => bcx.tcx().sess.bug("unexpected expr kind for struct base expr"),
1446 _ => {
1447 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &*base.expr, "base"));
1448 for &(i, t) in &base.fields {
1449 let datum = base_datum.get_element(
1450 bcx, t, |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, i));
1451 assert!(type_is_sized(bcx.tcx(), datum.ty));
1452 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1453 bcx = datum.store_to(bcx, dest);
1454 }
1455 }
1456 }
1457
1458 // Finally, move scratch field values into actual field locations
1459 for (i, datum) in scratch_vals.into_iter() {
1460 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1461 bcx = datum.store_to(bcx, dest);
1462 }
1463 } else {
1464 // No base means we can write all fields directly in place.
1465 for &(i, ref e) in fields {
1466 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1467 let e_ty = expr_ty_adjusted(bcx, &**e);
1468 bcx = trans_into(bcx, &**e, SaveIn(dest));
1469 let scope = cleanup::CustomScope(custom_cleanup_scope);
1470 fcx.schedule_lifetime_end(scope, dest);
1471 fcx.schedule_drop_mem(scope, dest, e_ty);
1472 }
1473 }
1474
1475 adt::trans_set_discr(bcx, &*repr, addr, discr);
1476
1477 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1478
1479 // If we don't care about the result drop the temporary we made
1480 match dest {
1481 SaveIn(_) => bcx,
1482 Ignore => {
1483 bcx = glue::drop_ty(bcx, addr, ty, debug_location);
1484 base::call_lifetime_end(bcx, addr);
1485 bcx
1486 }
1487 }
1488 }
1489
1490
1491 fn trans_immediate_lit<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1492 expr: &ast::Expr,
1493 lit: &ast::Lit)
1494 -> DatumBlock<'blk, 'tcx, Expr> {
1495 // must not be a string constant, that is a RvalueDpsExpr
1496 let _icx = push_ctxt("trans_immediate_lit");
1497 let ty = expr_ty(bcx, expr);
1498 let v = consts::const_lit(bcx.ccx(), expr, lit);
1499 immediate_rvalue_bcx(bcx, v, ty).to_expr_datumblock()
1500 }
1501
1502 fn trans_unary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1503 expr: &ast::Expr,
1504 op: ast::UnOp,
1505 sub_expr: &ast::Expr)
1506 -> DatumBlock<'blk, 'tcx, Expr> {
1507 let ccx = bcx.ccx();
1508 let mut bcx = bcx;
1509 let _icx = push_ctxt("trans_unary_datum");
1510
1511 let method_call = MethodCall::expr(expr.id);
1512
1513 // The only overloaded operator that is translated to a datum
1514 // is an overloaded deref, since it is always yields a `&T`.
1515 // Otherwise, we should be in the RvalueDpsExpr path.
1516 assert!(
1517 op == ast::UnDeref ||
1518 !ccx.tcx().method_map.borrow().contains_key(&method_call));
1519
1520 let un_ty = expr_ty(bcx, expr);
1521
1522 let debug_loc = expr.debug_loc();
1523
1524 match op {
1525 ast::UnNot => {
1526 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1527 let llresult = Not(bcx, datum.to_llscalarish(bcx), debug_loc);
1528 immediate_rvalue_bcx(bcx, llresult, un_ty).to_expr_datumblock()
1529 }
1530 ast::UnNeg => {
1531 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1532 let val = datum.to_llscalarish(bcx);
1533 let (bcx, llneg) = {
1534 if ty::type_is_fp(un_ty) {
1535 let result = FNeg(bcx, val, debug_loc);
1536 (bcx, result)
1537 } else {
1538 let is_signed = ty::type_is_signed(un_ty);
1539 let result = Neg(bcx, val, debug_loc);
1540 let bcx = if bcx.ccx().check_overflow() && is_signed {
1541 let (llty, min) = base::llty_and_min_for_signed_ty(bcx, un_ty);
1542 let is_min = ICmp(bcx, llvm::IntEQ, val,
1543 C_integral(llty, min, true), debug_loc);
1544 with_cond(bcx, is_min, |bcx| {
1545 let msg = InternedString::new(
1546 "attempted to negate with overflow");
1547 controlflow::trans_fail(bcx, expr_info(expr), msg)
1548 })
1549 } else {
1550 bcx
1551 };
1552 (bcx, result)
1553 }
1554 };
1555 immediate_rvalue_bcx(bcx, llneg, un_ty).to_expr_datumblock()
1556 }
1557 ast::UnUniq => {
1558 trans_uniq_expr(bcx, expr, un_ty, sub_expr, expr_ty(bcx, sub_expr))
1559 }
1560 ast::UnDeref => {
1561 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1562 deref_once(bcx, expr, datum, method_call)
1563 }
1564 }
1565 }
1566
1567 fn trans_uniq_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1568 box_expr: &ast::Expr,
1569 box_ty: Ty<'tcx>,
1570 contents: &ast::Expr,
1571 contents_ty: Ty<'tcx>)
1572 -> DatumBlock<'blk, 'tcx, Expr> {
1573 let _icx = push_ctxt("trans_uniq_expr");
1574 let fcx = bcx.fcx;
1575 assert!(type_is_sized(bcx.tcx(), contents_ty));
1576 let llty = type_of::type_of(bcx.ccx(), contents_ty);
1577 let size = llsize_of(bcx.ccx(), llty);
1578 let align = C_uint(bcx.ccx(), type_of::align_of(bcx.ccx(), contents_ty));
1579 let llty_ptr = llty.ptr_to();
1580 let Result { bcx, val } = malloc_raw_dyn(bcx,
1581 llty_ptr,
1582 box_ty,
1583 size,
1584 align,
1585 box_expr.debug_loc());
1586 // Unique boxes do not allocate for zero-size types. The standard library
1587 // may assume that `free` is never called on the pointer returned for
1588 // `Box<ZeroSizeType>`.
1589 let bcx = if llsize_of_alloc(bcx.ccx(), llty) == 0 {
1590 trans_into(bcx, contents, SaveIn(val))
1591 } else {
1592 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1593 fcx.schedule_free_value(cleanup::CustomScope(custom_cleanup_scope),
1594 val, cleanup::HeapExchange, contents_ty);
1595 let bcx = trans_into(bcx, contents, SaveIn(val));
1596 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1597 bcx
1598 };
1599 immediate_rvalue_bcx(bcx, val, box_ty).to_expr_datumblock()
1600 }
1601
1602 fn ref_fat_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1603 lval: Datum<'tcx, Lvalue>)
1604 -> DatumBlock<'blk, 'tcx, Expr> {
1605 let dest_ty = ty::mk_imm_rptr(bcx.tcx(), bcx.tcx().mk_region(ty::ReStatic), lval.ty);
1606 let scratch = rvalue_scratch_datum(bcx, dest_ty, "__fat_ptr");
1607 memcpy_ty(bcx, scratch.val, lval.val, scratch.ty);
1608
1609 DatumBlock::new(bcx, scratch.to_expr_datum())
1610 }
1611
1612 fn trans_addr_of<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1613 expr: &ast::Expr,
1614 subexpr: &ast::Expr)
1615 -> DatumBlock<'blk, 'tcx, Expr> {
1616 let _icx = push_ctxt("trans_addr_of");
1617 let mut bcx = bcx;
1618 let sub_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, subexpr, "addr_of"));
1619 if !type_is_sized(bcx.tcx(), sub_datum.ty) {
1620 // DST lvalue, close to a fat pointer
1621 ref_fat_ptr(bcx, sub_datum)
1622 } else {
1623 // Sized value, ref to a thin pointer
1624 let ty = expr_ty(bcx, expr);
1625 immediate_rvalue_bcx(bcx, sub_datum.val, ty).to_expr_datumblock()
1626 }
1627 }
1628
1629 // Important to get types for both lhs and rhs, because one might be _|_
1630 // and the other not.
1631 fn trans_eager_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1632 binop_expr: &ast::Expr,
1633 binop_ty: Ty<'tcx>,
1634 op: ast::BinOp,
1635 lhs_t: Ty<'tcx>,
1636 lhs: ValueRef,
1637 rhs_t: Ty<'tcx>,
1638 rhs: ValueRef)
1639 -> DatumBlock<'blk, 'tcx, Expr> {
1640 let _icx = push_ctxt("trans_eager_binop");
1641
1642 let tcx = bcx.tcx();
1643 let is_simd = ty::type_is_simd(tcx, lhs_t);
1644 let intype = if is_simd {
1645 ty::simd_type(tcx, lhs_t)
1646 } else {
1647 lhs_t
1648 };
1649 let is_float = ty::type_is_fp(intype);
1650 let is_signed = ty::type_is_signed(intype);
1651 let info = expr_info(binop_expr);
1652
1653 let binop_debug_loc = binop_expr.debug_loc();
1654
1655 let mut bcx = bcx;
1656 let val = match op.node {
1657 ast::BiAdd => {
1658 if is_float {
1659 FAdd(bcx, lhs, rhs, binop_debug_loc)
1660 } else if is_simd {
1661 Add(bcx, lhs, rhs, binop_debug_loc)
1662 } else {
1663 let (newbcx, res) = with_overflow_check(
1664 bcx, OverflowOp::Add, info, lhs_t, lhs, rhs, binop_debug_loc);
1665 bcx = newbcx;
1666 res
1667 }
1668 }
1669 ast::BiSub => {
1670 if is_float {
1671 FSub(bcx, lhs, rhs, binop_debug_loc)
1672 } else if is_simd {
1673 Sub(bcx, lhs, rhs, binop_debug_loc)
1674 } else {
1675 let (newbcx, res) = with_overflow_check(
1676 bcx, OverflowOp::Sub, info, lhs_t, lhs, rhs, binop_debug_loc);
1677 bcx = newbcx;
1678 res
1679 }
1680 }
1681 ast::BiMul => {
1682 if is_float {
1683 FMul(bcx, lhs, rhs, binop_debug_loc)
1684 } else if is_simd {
1685 Mul(bcx, lhs, rhs, binop_debug_loc)
1686 } else {
1687 let (newbcx, res) = with_overflow_check(
1688 bcx, OverflowOp::Mul, info, lhs_t, lhs, rhs, binop_debug_loc);
1689 bcx = newbcx;
1690 res
1691 }
1692 }
1693 ast::BiDiv => {
1694 if is_float {
1695 FDiv(bcx, lhs, rhs, binop_debug_loc)
1696 } else {
1697 // Only zero-check integers; fp /0 is NaN
1698 bcx = base::fail_if_zero_or_overflows(bcx,
1699 expr_info(binop_expr),
1700 op,
1701 lhs,
1702 rhs,
1703 rhs_t);
1704 if is_signed {
1705 SDiv(bcx, lhs, rhs, binop_debug_loc)
1706 } else {
1707 UDiv(bcx, lhs, rhs, binop_debug_loc)
1708 }
1709 }
1710 }
1711 ast::BiRem => {
1712 if is_float {
1713 FRem(bcx, lhs, rhs, binop_debug_loc)
1714 } else {
1715 // Only zero-check integers; fp %0 is NaN
1716 bcx = base::fail_if_zero_or_overflows(bcx,
1717 expr_info(binop_expr),
1718 op, lhs, rhs, rhs_t);
1719 if is_signed {
1720 SRem(bcx, lhs, rhs, binop_debug_loc)
1721 } else {
1722 URem(bcx, lhs, rhs, binop_debug_loc)
1723 }
1724 }
1725 }
1726 ast::BiBitOr => Or(bcx, lhs, rhs, binop_debug_loc),
1727 ast::BiBitAnd => And(bcx, lhs, rhs, binop_debug_loc),
1728 ast::BiBitXor => Xor(bcx, lhs, rhs, binop_debug_loc),
1729 ast::BiShl => {
1730 let (newbcx, res) = with_overflow_check(
1731 bcx, OverflowOp::Shl, info, lhs_t, lhs, rhs, binop_debug_loc);
1732 bcx = newbcx;
1733 res
1734 }
1735 ast::BiShr => {
1736 let (newbcx, res) = with_overflow_check(
1737 bcx, OverflowOp::Shr, info, lhs_t, lhs, rhs, binop_debug_loc);
1738 bcx = newbcx;
1739 res
1740 }
1741 ast::BiEq | ast::BiNe | ast::BiLt | ast::BiGe | ast::BiLe | ast::BiGt => {
1742 if is_simd {
1743 base::compare_simd_types(bcx, lhs, rhs, intype, op.node, binop_debug_loc)
1744 } else {
1745 base::compare_scalar_types(bcx, lhs, rhs, intype, op.node, binop_debug_loc)
1746 }
1747 }
1748 _ => {
1749 bcx.tcx().sess.span_bug(binop_expr.span, "unexpected binop");
1750 }
1751 };
1752
1753 immediate_rvalue_bcx(bcx, val, binop_ty).to_expr_datumblock()
1754 }
1755
1756 // refinement types would obviate the need for this
1757 enum lazy_binop_ty {
1758 lazy_and,
1759 lazy_or,
1760 }
1761
1762 fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1763 binop_expr: &ast::Expr,
1764 op: lazy_binop_ty,
1765 a: &ast::Expr,
1766 b: &ast::Expr)
1767 -> DatumBlock<'blk, 'tcx, Expr> {
1768 let _icx = push_ctxt("trans_lazy_binop");
1769 let binop_ty = expr_ty(bcx, binop_expr);
1770 let fcx = bcx.fcx;
1771
1772 let DatumBlock {bcx: past_lhs, datum: lhs} = trans(bcx, a);
1773 let lhs = lhs.to_llscalarish(past_lhs);
1774
1775 if past_lhs.unreachable.get() {
1776 return immediate_rvalue_bcx(past_lhs, lhs, binop_ty).to_expr_datumblock();
1777 }
1778
1779 let join = fcx.new_id_block("join", binop_expr.id);
1780 let before_rhs = fcx.new_id_block("before_rhs", b.id);
1781
1782 match op {
1783 lazy_and => CondBr(past_lhs, lhs, before_rhs.llbb, join.llbb, DebugLoc::None),
1784 lazy_or => CondBr(past_lhs, lhs, join.llbb, before_rhs.llbb, DebugLoc::None)
1785 }
1786
1787 let DatumBlock {bcx: past_rhs, datum: rhs} = trans(before_rhs, b);
1788 let rhs = rhs.to_llscalarish(past_rhs);
1789
1790 if past_rhs.unreachable.get() {
1791 return immediate_rvalue_bcx(join, lhs, binop_ty).to_expr_datumblock();
1792 }
1793
1794 Br(past_rhs, join.llbb, DebugLoc::None);
1795 let phi = Phi(join, Type::i1(bcx.ccx()), &[lhs, rhs],
1796 &[past_lhs.llbb, past_rhs.llbb]);
1797
1798 return immediate_rvalue_bcx(join, phi, binop_ty).to_expr_datumblock();
1799 }
1800
1801 fn trans_binary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1802 expr: &ast::Expr,
1803 op: ast::BinOp,
1804 lhs: &ast::Expr,
1805 rhs: &ast::Expr)
1806 -> DatumBlock<'blk, 'tcx, Expr> {
1807 let _icx = push_ctxt("trans_binary");
1808 let ccx = bcx.ccx();
1809
1810 // if overloaded, would be RvalueDpsExpr
1811 assert!(!ccx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
1812
1813 match op.node {
1814 ast::BiAnd => {
1815 trans_lazy_binop(bcx, expr, lazy_and, lhs, rhs)
1816 }
1817 ast::BiOr => {
1818 trans_lazy_binop(bcx, expr, lazy_or, lhs, rhs)
1819 }
1820 _ => {
1821 let mut bcx = bcx;
1822 let lhs_datum = unpack_datum!(bcx, trans(bcx, lhs));
1823 let rhs_datum = unpack_datum!(bcx, trans(bcx, rhs));
1824 let binop_ty = expr_ty(bcx, expr);
1825
1826 debug!("trans_binary (expr {}): lhs_datum={}",
1827 expr.id,
1828 lhs_datum.to_string(ccx));
1829 let lhs_ty = lhs_datum.ty;
1830 let lhs = lhs_datum.to_llscalarish(bcx);
1831
1832 debug!("trans_binary (expr {}): rhs_datum={}",
1833 expr.id,
1834 rhs_datum.to_string(ccx));
1835 let rhs_ty = rhs_datum.ty;
1836 let rhs = rhs_datum.to_llscalarish(bcx);
1837 trans_eager_binop(bcx, expr, binop_ty, op,
1838 lhs_ty, lhs, rhs_ty, rhs)
1839 }
1840 }
1841 }
1842
1843 fn trans_overloaded_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1844 expr: &ast::Expr,
1845 method_call: MethodCall,
1846 lhs: Datum<'tcx, Expr>,
1847 rhs: Vec<(Datum<'tcx, Expr>, ast::NodeId)>,
1848 dest: Option<Dest>,
1849 autoref: bool)
1850 -> Result<'blk, 'tcx> {
1851 let method_ty = bcx.tcx().method_map.borrow().get(&method_call).unwrap().ty;
1852 callee::trans_call_inner(bcx,
1853 expr.debug_loc(),
1854 monomorphize_type(bcx, method_ty),
1855 |bcx, arg_cleanup_scope| {
1856 meth::trans_method_callee(bcx,
1857 method_call,
1858 None,
1859 arg_cleanup_scope)
1860 },
1861 callee::ArgOverloadedOp(lhs, rhs, autoref),
1862 dest)
1863 }
1864
1865 fn trans_overloaded_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1866 expr: &ast::Expr,
1867 callee: &'a ast::Expr,
1868 args: &'a [P<ast::Expr>],
1869 dest: Option<Dest>)
1870 -> Block<'blk, 'tcx> {
1871 let method_call = MethodCall::expr(expr.id);
1872 let method_type = bcx.tcx()
1873 .method_map
1874 .borrow()
1875 .get(&method_call)
1876 .unwrap()
1877 .ty;
1878 let mut all_args = vec!(callee);
1879 all_args.extend(args.iter().map(|e| &**e));
1880 unpack_result!(bcx,
1881 callee::trans_call_inner(bcx,
1882 expr.debug_loc(),
1883 monomorphize_type(bcx,
1884 method_type),
1885 |bcx, arg_cleanup_scope| {
1886 meth::trans_method_callee(
1887 bcx,
1888 method_call,
1889 None,
1890 arg_cleanup_scope)
1891 },
1892 callee::ArgOverloadedCall(all_args),
1893 dest));
1894 bcx
1895 }
1896
1897 fn int_cast(bcx: Block,
1898 lldsttype: Type,
1899 llsrctype: Type,
1900 llsrc: ValueRef,
1901 signed: bool)
1902 -> ValueRef {
1903 let _icx = push_ctxt("int_cast");
1904 let srcsz = llsrctype.int_width();
1905 let dstsz = lldsttype.int_width();
1906 return if dstsz == srcsz {
1907 BitCast(bcx, llsrc, lldsttype)
1908 } else if srcsz > dstsz {
1909 TruncOrBitCast(bcx, llsrc, lldsttype)
1910 } else if signed {
1911 SExtOrBitCast(bcx, llsrc, lldsttype)
1912 } else {
1913 ZExtOrBitCast(bcx, llsrc, lldsttype)
1914 }
1915 }
1916
1917 fn float_cast(bcx: Block,
1918 lldsttype: Type,
1919 llsrctype: Type,
1920 llsrc: ValueRef)
1921 -> ValueRef {
1922 let _icx = push_ctxt("float_cast");
1923 let srcsz = llsrctype.float_width();
1924 let dstsz = lldsttype.float_width();
1925 return if dstsz > srcsz {
1926 FPExt(bcx, llsrc, lldsttype)
1927 } else if srcsz > dstsz {
1928 FPTrunc(bcx, llsrc, lldsttype)
1929 } else { llsrc };
1930 }
1931
1932 #[derive(Copy, Clone, PartialEq, Debug)]
1933 pub enum cast_kind {
1934 cast_pointer,
1935 cast_integral,
1936 cast_float,
1937 cast_enum,
1938 cast_other,
1939 }
1940
1941 pub fn cast_type_kind<'tcx>(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>) -> cast_kind {
1942 match t.sty {
1943 ty::ty_char => cast_integral,
1944 ty::ty_float(..) => cast_float,
1945 ty::ty_rptr(_, mt) | ty::ty_ptr(mt) => {
1946 if type_is_sized(tcx, mt.ty) {
1947 cast_pointer
1948 } else {
1949 cast_other
1950 }
1951 }
1952 ty::ty_bare_fn(..) => cast_pointer,
1953 ty::ty_int(..) => cast_integral,
1954 ty::ty_uint(..) => cast_integral,
1955 ty::ty_bool => cast_integral,
1956 ty::ty_enum(..) => cast_enum,
1957 _ => cast_other
1958 }
1959 }
1960
1961 pub fn cast_is_noop<'tcx>(t_in: Ty<'tcx>, t_out: Ty<'tcx>) -> bool {
1962 match (ty::deref(t_in, true), ty::deref(t_out, true)) {
1963 (Some(ty::mt{ ty: t_in, .. }), Some(ty::mt{ ty: t_out, .. })) => {
1964 t_in == t_out
1965 }
1966 _ => false
1967 }
1968 }
1969
1970 fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1971 expr: &ast::Expr,
1972 id: ast::NodeId)
1973 -> DatumBlock<'blk, 'tcx, Expr> {
1974 let _icx = push_ctxt("trans_cast");
1975 let mut bcx = bcx;
1976 let ccx = bcx.ccx();
1977
1978 let t_in = expr_ty_adjusted(bcx, expr);
1979 let t_out = node_id_type(bcx, id);
1980 let k_in = cast_type_kind(bcx.tcx(), t_in);
1981 let k_out = cast_type_kind(bcx.tcx(), t_out);
1982 let s_in = k_in == cast_integral && ty::type_is_signed(t_in);
1983 let ll_t_in = type_of::arg_type_of(ccx, t_in);
1984 let ll_t_out = type_of::arg_type_of(ccx, t_out);
1985
1986 // Convert the value to be cast into a ValueRef, either by-ref or
1987 // by-value as appropriate given its type:
1988 let mut datum = unpack_datum!(bcx, trans(bcx, expr));
1989
1990 let datum_ty = monomorphize_type(bcx, datum.ty);
1991 if cast_is_noop(datum_ty, t_out) {
1992 datum.ty = t_out;
1993 return DatumBlock::new(bcx, datum);
1994 }
1995
1996 let newval = match (k_in, k_out) {
1997 (cast_integral, cast_integral) => {
1998 let llexpr = datum.to_llscalarish(bcx);
1999 int_cast(bcx, ll_t_out, ll_t_in, llexpr, s_in)
2000 }
2001 (cast_float, cast_float) => {
2002 let llexpr = datum.to_llscalarish(bcx);
2003 float_cast(bcx, ll_t_out, ll_t_in, llexpr)
2004 }
2005 (cast_integral, cast_float) => {
2006 let llexpr = datum.to_llscalarish(bcx);
2007 if s_in {
2008 SIToFP(bcx, llexpr, ll_t_out)
2009 } else { UIToFP(bcx, llexpr, ll_t_out) }
2010 }
2011 (cast_float, cast_integral) => {
2012 let llexpr = datum.to_llscalarish(bcx);
2013 if ty::type_is_signed(t_out) {
2014 FPToSI(bcx, llexpr, ll_t_out)
2015 } else { FPToUI(bcx, llexpr, ll_t_out) }
2016 }
2017 (cast_integral, cast_pointer) => {
2018 let llexpr = datum.to_llscalarish(bcx);
2019 IntToPtr(bcx, llexpr, ll_t_out)
2020 }
2021 (cast_pointer, cast_integral) => {
2022 let llexpr = datum.to_llscalarish(bcx);
2023 PtrToInt(bcx, llexpr, ll_t_out)
2024 }
2025 (cast_pointer, cast_pointer) => {
2026 let llexpr = datum.to_llscalarish(bcx);
2027 PointerCast(bcx, llexpr, ll_t_out)
2028 }
2029 (cast_enum, cast_integral) |
2030 (cast_enum, cast_float) => {
2031 let mut bcx = bcx;
2032 let repr = adt::represent_type(ccx, t_in);
2033 let datum = unpack_datum!(
2034 bcx, datum.to_lvalue_datum(bcx, "trans_imm_cast", expr.id));
2035 let llexpr_ptr = datum.to_llref();
2036 let lldiscrim_a =
2037 adt::trans_get_discr(bcx, &*repr, llexpr_ptr, Some(Type::i64(ccx)));
2038 match k_out {
2039 cast_integral => int_cast(bcx, ll_t_out,
2040 val_ty(lldiscrim_a),
2041 lldiscrim_a, true),
2042 cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
2043 _ => {
2044 ccx.sess().bug(&format!("translating unsupported cast: \
2045 {} ({:?}) -> {} ({:?})",
2046 t_in.repr(bcx.tcx()),
2047 k_in,
2048 t_out.repr(bcx.tcx()),
2049 k_out))
2050 }
2051 }
2052 }
2053 _ => ccx.sess().bug(&format!("translating unsupported cast: \
2054 {} ({:?}) -> {} ({:?})",
2055 t_in.repr(bcx.tcx()),
2056 k_in,
2057 t_out.repr(bcx.tcx()),
2058 k_out))
2059 };
2060 return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
2061 }
2062
2063 fn trans_assign_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2064 expr: &ast::Expr,
2065 op: ast::BinOp,
2066 dst: &ast::Expr,
2067 src: &ast::Expr)
2068 -> Block<'blk, 'tcx> {
2069 let _icx = push_ctxt("trans_assign_op");
2070 let mut bcx = bcx;
2071
2072 debug!("trans_assign_op(expr={})", bcx.expr_to_string(expr));
2073
2074 // User-defined operator methods cannot be used with `+=` etc right now
2075 assert!(!bcx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
2076
2077 // Evaluate LHS (destination), which should be an lvalue
2078 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, dst, "assign_op"));
2079 assert!(!bcx.fcx.type_needs_drop(dst_datum.ty));
2080 let dst_ty = dst_datum.ty;
2081 let dst = load_ty(bcx, dst_datum.val, dst_datum.ty);
2082
2083 // Evaluate RHS
2084 let rhs_datum = unpack_datum!(bcx, trans(bcx, &*src));
2085 let rhs_ty = rhs_datum.ty;
2086 let rhs = rhs_datum.to_llscalarish(bcx);
2087
2088 // Perform computation and store the result
2089 let result_datum = unpack_datum!(
2090 bcx, trans_eager_binop(bcx, expr, dst_datum.ty, op,
2091 dst_ty, dst, rhs_ty, rhs));
2092 return result_datum.store_to(bcx, dst_datum.val);
2093 }
2094
2095 fn auto_ref<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2096 datum: Datum<'tcx, Expr>,
2097 expr: &ast::Expr)
2098 -> DatumBlock<'blk, 'tcx, Expr> {
2099 let mut bcx = bcx;
2100
2101 // Ensure cleanup of `datum` if not already scheduled and obtain
2102 // a "by ref" pointer.
2103 let lv_datum = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "autoref", expr.id));
2104
2105 // Compute final type. Note that we are loose with the region and
2106 // mutability, since those things don't matter in trans.
2107 let referent_ty = lv_datum.ty;
2108 let ptr_ty = ty::mk_imm_rptr(bcx.tcx(), bcx.tcx().mk_region(ty::ReStatic), referent_ty);
2109
2110 // Get the pointer.
2111 let llref = lv_datum.to_llref();
2112
2113 // Construct the resulting datum, using what was the "by ref"
2114 // ValueRef of type `referent_ty` to be the "by value" ValueRef
2115 // of type `&referent_ty`.
2116 DatumBlock::new(bcx, Datum::new(llref, ptr_ty, RvalueExpr(Rvalue::new(ByValue))))
2117 }
2118
2119 fn deref_multiple<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2120 expr: &ast::Expr,
2121 datum: Datum<'tcx, Expr>,
2122 times: usize)
2123 -> DatumBlock<'blk, 'tcx, Expr> {
2124 let mut bcx = bcx;
2125 let mut datum = datum;
2126 for i in 0..times {
2127 let method_call = MethodCall::autoderef(expr.id, i as u32);
2128 datum = unpack_datum!(bcx, deref_once(bcx, expr, datum, method_call));
2129 }
2130 DatumBlock { bcx: bcx, datum: datum }
2131 }
2132
2133 fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2134 expr: &ast::Expr,
2135 datum: Datum<'tcx, Expr>,
2136 method_call: MethodCall)
2137 -> DatumBlock<'blk, 'tcx, Expr> {
2138 let ccx = bcx.ccx();
2139
2140 debug!("deref_once(expr={}, datum={}, method_call={:?})",
2141 expr.repr(bcx.tcx()),
2142 datum.to_string(ccx),
2143 method_call);
2144
2145 let mut bcx = bcx;
2146
2147 // Check for overloaded deref.
2148 let method_ty = ccx.tcx().method_map.borrow()
2149 .get(&method_call).map(|method| method.ty);
2150 let datum = match method_ty {
2151 Some(method_ty) => {
2152 let method_ty = monomorphize_type(bcx, method_ty);
2153
2154 // Overloaded. Evaluate `trans_overloaded_op`, which will
2155 // invoke the user's deref() method, which basically
2156 // converts from the `Smaht<T>` pointer that we have into
2157 // a `&T` pointer. We can then proceed down the normal
2158 // path (below) to dereference that `&T`.
2159 let datum = if method_call.autoderef == 0 {
2160 datum
2161 } else {
2162 // Always perform an AutoPtr when applying an overloaded auto-deref
2163 unpack_datum!(bcx, auto_ref(bcx, datum, expr))
2164 };
2165
2166 let ref_ty = // invoked methods have their LB regions instantiated
2167 ty::no_late_bound_regions(
2168 ccx.tcx(), &ty::ty_fn_ret(method_ty)).unwrap().unwrap();
2169 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_deref");
2170
2171 unpack_result!(bcx, trans_overloaded_op(bcx, expr, method_call,
2172 datum, Vec::new(), Some(SaveIn(scratch.val)),
2173 false));
2174 scratch.to_expr_datum()
2175 }
2176 None => {
2177 // Not overloaded. We already have a pointer we know how to deref.
2178 datum
2179 }
2180 };
2181
2182 let r = match datum.ty.sty {
2183 ty::ty_uniq(content_ty) => {
2184 if type_is_sized(bcx.tcx(), content_ty) {
2185 deref_owned_pointer(bcx, expr, datum, content_ty)
2186 } else {
2187 // A fat pointer and a DST lvalue have the same representation
2188 // just different types. Since there is no temporary for `*e`
2189 // here (because it is unsized), we cannot emulate the sized
2190 // object code path for running drop glue and free. Instead,
2191 // we schedule cleanup for `e`, turning it into an lvalue.
2192 let datum = unpack_datum!(
2193 bcx, datum.to_lvalue_datum(bcx, "deref", expr.id));
2194
2195 let datum = Datum::new(datum.val, content_ty, LvalueExpr);
2196 DatumBlock::new(bcx, datum)
2197 }
2198 }
2199
2200 ty::ty_ptr(ty::mt { ty: content_ty, .. }) |
2201 ty::ty_rptr(_, ty::mt { ty: content_ty, .. }) => {
2202 if type_is_sized(bcx.tcx(), content_ty) {
2203 let ptr = datum.to_llscalarish(bcx);
2204
2205 // Always generate an lvalue datum, even if datum.mode is
2206 // an rvalue. This is because datum.mode is only an
2207 // rvalue for non-owning pointers like &T or *T, in which
2208 // case cleanup *is* scheduled elsewhere, by the true
2209 // owner (or, in the case of *T, by the user).
2210 DatumBlock::new(bcx, Datum::new(ptr, content_ty, LvalueExpr))
2211 } else {
2212 // A fat pointer and a DST lvalue have the same representation
2213 // just different types.
2214 DatumBlock::new(bcx, Datum::new(datum.val, content_ty, LvalueExpr))
2215 }
2216 }
2217
2218 _ => {
2219 bcx.tcx().sess.span_bug(
2220 expr.span,
2221 &format!("deref invoked on expr of illegal type {}",
2222 datum.ty.repr(bcx.tcx())));
2223 }
2224 };
2225
2226 debug!("deref_once(expr={}, method_call={:?}, result={})",
2227 expr.id, method_call, r.datum.to_string(ccx));
2228
2229 return r;
2230
2231 /// We microoptimize derefs of owned pointers a bit here. Basically, the idea is to make the
2232 /// deref of an rvalue result in an rvalue. This helps to avoid intermediate stack slots in the
2233 /// resulting LLVM. The idea here is that, if the `Box<T>` pointer is an rvalue, then we can
2234 /// schedule a *shallow* free of the `Box<T>` pointer, and then return a ByRef rvalue into the
2235 /// pointer. Because the free is shallow, it is legit to return an rvalue, because we know that
2236 /// the contents are not yet scheduled to be freed. The language rules ensure that the contents
2237 /// will be used (or moved) before the free occurs.
2238 fn deref_owned_pointer<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2239 expr: &ast::Expr,
2240 datum: Datum<'tcx, Expr>,
2241 content_ty: Ty<'tcx>)
2242 -> DatumBlock<'blk, 'tcx, Expr> {
2243 match datum.kind {
2244 RvalueExpr(Rvalue { mode: ByRef }) => {
2245 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2246 let ptr = Load(bcx, datum.val);
2247 if !type_is_zero_size(bcx.ccx(), content_ty) {
2248 bcx.fcx.schedule_free_value(scope, ptr, cleanup::HeapExchange, content_ty);
2249 }
2250 }
2251 RvalueExpr(Rvalue { mode: ByValue }) => {
2252 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2253 if !type_is_zero_size(bcx.ccx(), content_ty) {
2254 bcx.fcx.schedule_free_value(scope, datum.val, cleanup::HeapExchange,
2255 content_ty);
2256 }
2257 }
2258 LvalueExpr => { }
2259 }
2260
2261 // If we had an rvalue in, we produce an rvalue out.
2262 let (llptr, kind) = match datum.kind {
2263 LvalueExpr => {
2264 (Load(bcx, datum.val), LvalueExpr)
2265 }
2266 RvalueExpr(Rvalue { mode: ByRef }) => {
2267 (Load(bcx, datum.val), RvalueExpr(Rvalue::new(ByRef)))
2268 }
2269 RvalueExpr(Rvalue { mode: ByValue }) => {
2270 (datum.val, RvalueExpr(Rvalue::new(ByRef)))
2271 }
2272 };
2273
2274 let datum = Datum { ty: content_ty, val: llptr, kind: kind };
2275 DatumBlock { bcx: bcx, datum: datum }
2276 }
2277 }
2278
2279 #[derive(Debug)]
2280 enum OverflowOp {
2281 Add,
2282 Sub,
2283 Mul,
2284 Shl,
2285 Shr,
2286 }
2287
2288 impl OverflowOp {
2289 fn codegen_strategy(&self) -> OverflowCodegen {
2290 use self::OverflowCodegen::{ViaIntrinsic, ViaInputCheck};
2291 match *self {
2292 OverflowOp::Add => ViaIntrinsic(OverflowOpViaIntrinsic::Add),
2293 OverflowOp::Sub => ViaIntrinsic(OverflowOpViaIntrinsic::Sub),
2294 OverflowOp::Mul => ViaIntrinsic(OverflowOpViaIntrinsic::Mul),
2295
2296 OverflowOp::Shl => ViaInputCheck(OverflowOpViaInputCheck::Shl),
2297 OverflowOp::Shr => ViaInputCheck(OverflowOpViaInputCheck::Shr),
2298 }
2299 }
2300 }
2301
2302 enum OverflowCodegen {
2303 ViaIntrinsic(OverflowOpViaIntrinsic),
2304 ViaInputCheck(OverflowOpViaInputCheck),
2305 }
2306
2307 enum OverflowOpViaInputCheck { Shl, Shr, }
2308
2309 #[derive(Debug)]
2310 enum OverflowOpViaIntrinsic { Add, Sub, Mul, }
2311
2312 impl OverflowOpViaIntrinsic {
2313 fn to_intrinsic<'blk, 'tcx>(&self, bcx: Block<'blk, 'tcx>, lhs_ty: Ty) -> ValueRef {
2314 let name = self.to_intrinsic_name(bcx.tcx(), lhs_ty);
2315 bcx.ccx().get_intrinsic(&name)
2316 }
2317 fn to_intrinsic_name(&self, tcx: &ty::ctxt, ty: Ty) -> &'static str {
2318 use syntax::ast::IntTy::*;
2319 use syntax::ast::UintTy::*;
2320 use middle::ty::{ty_int, ty_uint};
2321
2322 let new_sty = match ty.sty {
2323 ty_int(TyIs) => match &tcx.sess.target.target.target_pointer_width[..] {
2324 "32" => ty_int(TyI32),
2325 "64" => ty_int(TyI64),
2326 _ => panic!("unsupported target word size")
2327 },
2328 ty_uint(TyUs) => match &tcx.sess.target.target.target_pointer_width[..] {
2329 "32" => ty_uint(TyU32),
2330 "64" => ty_uint(TyU64),
2331 _ => panic!("unsupported target word size")
2332 },
2333 ref t @ ty_uint(_) | ref t @ ty_int(_) => t.clone(),
2334 _ => panic!("tried to get overflow intrinsic for {:?} applied to non-int type",
2335 *self)
2336 };
2337
2338 match *self {
2339 OverflowOpViaIntrinsic::Add => match new_sty {
2340 ty_int(TyI8) => "llvm.sadd.with.overflow.i8",
2341 ty_int(TyI16) => "llvm.sadd.with.overflow.i16",
2342 ty_int(TyI32) => "llvm.sadd.with.overflow.i32",
2343 ty_int(TyI64) => "llvm.sadd.with.overflow.i64",
2344
2345 ty_uint(TyU8) => "llvm.uadd.with.overflow.i8",
2346 ty_uint(TyU16) => "llvm.uadd.with.overflow.i16",
2347 ty_uint(TyU32) => "llvm.uadd.with.overflow.i32",
2348 ty_uint(TyU64) => "llvm.uadd.with.overflow.i64",
2349
2350 _ => unreachable!(),
2351 },
2352 OverflowOpViaIntrinsic::Sub => match new_sty {
2353 ty_int(TyI8) => "llvm.ssub.with.overflow.i8",
2354 ty_int(TyI16) => "llvm.ssub.with.overflow.i16",
2355 ty_int(TyI32) => "llvm.ssub.with.overflow.i32",
2356 ty_int(TyI64) => "llvm.ssub.with.overflow.i64",
2357
2358 ty_uint(TyU8) => "llvm.usub.with.overflow.i8",
2359 ty_uint(TyU16) => "llvm.usub.with.overflow.i16",
2360 ty_uint(TyU32) => "llvm.usub.with.overflow.i32",
2361 ty_uint(TyU64) => "llvm.usub.with.overflow.i64",
2362
2363 _ => unreachable!(),
2364 },
2365 OverflowOpViaIntrinsic::Mul => match new_sty {
2366 ty_int(TyI8) => "llvm.smul.with.overflow.i8",
2367 ty_int(TyI16) => "llvm.smul.with.overflow.i16",
2368 ty_int(TyI32) => "llvm.smul.with.overflow.i32",
2369 ty_int(TyI64) => "llvm.smul.with.overflow.i64",
2370
2371 ty_uint(TyU8) => "llvm.umul.with.overflow.i8",
2372 ty_uint(TyU16) => "llvm.umul.with.overflow.i16",
2373 ty_uint(TyU32) => "llvm.umul.with.overflow.i32",
2374 ty_uint(TyU64) => "llvm.umul.with.overflow.i64",
2375
2376 _ => unreachable!(),
2377 },
2378 }
2379 }
2380
2381 fn build_intrinsic_call<'blk, 'tcx>(&self, bcx: Block<'blk, 'tcx>,
2382 info: NodeIdAndSpan,
2383 lhs_t: Ty<'tcx>, lhs: ValueRef,
2384 rhs: ValueRef,
2385 binop_debug_loc: DebugLoc)
2386 -> (Block<'blk, 'tcx>, ValueRef) {
2387 let llfn = self.to_intrinsic(bcx, lhs_t);
2388
2389 let val = Call(bcx, llfn, &[lhs, rhs], None, binop_debug_loc);
2390 let result = ExtractValue(bcx, val, 0); // iN operation result
2391 let overflow = ExtractValue(bcx, val, 1); // i1 "did it overflow?"
2392
2393 let cond = ICmp(bcx, llvm::IntEQ, overflow, C_integral(Type::i1(bcx.ccx()), 1, false),
2394 binop_debug_loc);
2395
2396 let expect = bcx.ccx().get_intrinsic(&"llvm.expect.i1");
2397 Call(bcx, expect, &[cond, C_integral(Type::i1(bcx.ccx()), 0, false)],
2398 None, binop_debug_loc);
2399
2400 let bcx =
2401 base::with_cond(bcx, cond, |bcx|
2402 controlflow::trans_fail(bcx, info,
2403 InternedString::new("arithmetic operation overflowed")));
2404
2405 (bcx, result)
2406 }
2407 }
2408
2409 impl OverflowOpViaInputCheck {
2410 fn build_with_input_check<'blk, 'tcx>(&self,
2411 bcx: Block<'blk, 'tcx>,
2412 info: NodeIdAndSpan,
2413 lhs_t: Ty<'tcx>,
2414 lhs: ValueRef,
2415 rhs: ValueRef,
2416 binop_debug_loc: DebugLoc)
2417 -> (Block<'blk, 'tcx>, ValueRef)
2418 {
2419 let lhs_llty = val_ty(lhs);
2420 let rhs_llty = val_ty(rhs);
2421
2422 // Panic if any bits are set outside of bits that we always
2423 // mask in.
2424 //
2425 // Note that the mask's value is derived from the LHS type
2426 // (since that is where the 32/64 distinction is relevant) but
2427 // the mask's type must match the RHS type (since they will
2428 // both be fed into a and-binop)
2429 let invert_mask = !shift_mask_val(lhs_llty);
2430 let invert_mask = C_integral(rhs_llty, invert_mask, true);
2431
2432 let outer_bits = And(bcx, rhs, invert_mask, binop_debug_loc);
2433 let cond = ICmp(bcx, llvm::IntNE, outer_bits,
2434 C_integral(rhs_llty, 0, false), binop_debug_loc);
2435 let result = match *self {
2436 OverflowOpViaInputCheck::Shl =>
2437 build_unchecked_lshift(bcx, lhs, rhs, binop_debug_loc),
2438 OverflowOpViaInputCheck::Shr =>
2439 build_unchecked_rshift(bcx, lhs_t, lhs, rhs, binop_debug_loc),
2440 };
2441 let bcx =
2442 base::with_cond(bcx, cond, |bcx|
2443 controlflow::trans_fail(bcx, info,
2444 InternedString::new("shift operation overflowed")));
2445
2446 (bcx, result)
2447 }
2448 }
2449
2450 fn shift_mask_val(llty: Type) -> u64 {
2451 // i8/u8 can shift by at most 7, i16/u16 by at most 15, etc.
2452 llty.int_width() - 1
2453 }
2454
2455 // To avoid UB from LLVM, these two functions mask RHS with an
2456 // appropriate mask unconditionally (i.e. the fallback behavior for
2457 // all shifts). For 32- and 64-bit types, this matches the semantics
2458 // of Java. (See related discussion on #1877 and #10183.)
2459
2460 fn build_unchecked_lshift<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2461 lhs: ValueRef,
2462 rhs: ValueRef,
2463 binop_debug_loc: DebugLoc) -> ValueRef {
2464 let rhs = base::cast_shift_expr_rhs(bcx, ast::BinOp_::BiShl, lhs, rhs);
2465 // #1877, #10183: Ensure that input is always valid
2466 let rhs = shift_mask_rhs(bcx, rhs, binop_debug_loc);
2467 Shl(bcx, lhs, rhs, binop_debug_loc)
2468 }
2469
2470 fn build_unchecked_rshift<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2471 lhs_t: Ty<'tcx>,
2472 lhs: ValueRef,
2473 rhs: ValueRef,
2474 binop_debug_loc: DebugLoc) -> ValueRef {
2475 let rhs = base::cast_shift_expr_rhs(bcx, ast::BinOp_::BiShr, lhs, rhs);
2476 // #1877, #10183: Ensure that input is always valid
2477 let rhs = shift_mask_rhs(bcx, rhs, binop_debug_loc);
2478 let is_signed = ty::type_is_signed(lhs_t);
2479 if is_signed {
2480 AShr(bcx, lhs, rhs, binop_debug_loc)
2481 } else {
2482 LShr(bcx, lhs, rhs, binop_debug_loc)
2483 }
2484 }
2485
2486 fn shift_mask_rhs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2487 rhs: ValueRef,
2488 debug_loc: DebugLoc) -> ValueRef {
2489 let rhs_llty = val_ty(rhs);
2490 let mask = shift_mask_val(rhs_llty);
2491 And(bcx, rhs, C_integral(rhs_llty, mask, false), debug_loc)
2492 }
2493
2494 fn with_overflow_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, oop: OverflowOp, info: NodeIdAndSpan,
2495 lhs_t: Ty<'tcx>, lhs: ValueRef,
2496 rhs: ValueRef,
2497 binop_debug_loc: DebugLoc)
2498 -> (Block<'blk, 'tcx>, ValueRef) {
2499 if bcx.unreachable.get() { return (bcx, _Undef(lhs)); }
2500 if bcx.ccx().check_overflow() {
2501
2502 match oop.codegen_strategy() {
2503 OverflowCodegen::ViaIntrinsic(oop) =>
2504 oop.build_intrinsic_call(bcx, info, lhs_t, lhs, rhs, binop_debug_loc),
2505 OverflowCodegen::ViaInputCheck(oop) =>
2506 oop.build_with_input_check(bcx, info, lhs_t, lhs, rhs, binop_debug_loc),
2507 }
2508 } else {
2509 let res = match oop {
2510 OverflowOp::Add => Add(bcx, lhs, rhs, binop_debug_loc),
2511 OverflowOp::Sub => Sub(bcx, lhs, rhs, binop_debug_loc),
2512 OverflowOp::Mul => Mul(bcx, lhs, rhs, binop_debug_loc),
2513
2514 OverflowOp::Shl =>
2515 build_unchecked_lshift(bcx, lhs, rhs, binop_debug_loc),
2516 OverflowOp::Shr =>
2517 build_unchecked_rshift(bcx, lhs_t, lhs, rhs, binop_debug_loc),
2518 };
2519 (bcx, res)
2520 }
2521 }