]>
Commit | Line | Data |
---|---|---|
54a0048b SL |
1 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
32a655c1 | 11 | use llvm::{self, ValueRef, BasicBlockRef}; |
3157f602 | 12 | use rustc::middle::lang_items; |
cc61c64b | 13 | use rustc::middle::const_val::{ConstEvalErr, ConstInt, ErrKind}; |
ea8adc8c | 14 | use rustc::ty::{self, Ty, TypeFoldable}; |
cc61c64b | 15 | use rustc::ty::layout::{self, LayoutTyper}; |
ea8adc8c | 16 | use rustc::traits; |
c30ab7b3 | 17 | use rustc::mir; |
54a0048b | 18 | use abi::{Abi, FnType, ArgType}; |
cc61c64b | 19 | use adt; |
32a655c1 | 20 | use base::{self, Lifetime}; |
cc61c64b | 21 | use callee; |
32a655c1 | 22 | use builder::Builder; |
7cac9316 | 23 | use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_undef}; |
3157f602 | 24 | use consts; |
8bb4bdeb | 25 | use machine::llalign_of_min; |
54a0048b | 26 | use meth; |
cc61c64b XL |
27 | use monomorphize; |
28 | use type_of; | |
54a0048b | 29 | use type_::Type; |
3157f602 | 30 | |
476ff2be | 31 | use syntax::symbol::Symbol; |
041b39d2 | 32 | use syntax_pos::Pos; |
54a0048b | 33 | |
32a655c1 SL |
34 | use std::cmp; |
35 | ||
3157f602 | 36 | use super::{MirContext, LocalRef}; |
a7813a04 | 37 | use super::constant::Const; |
32a655c1 | 38 | use super::lvalue::{Alignment, LvalueRef}; |
54a0048b | 39 | use super::operand::OperandRef; |
c30ab7b3 SL |
40 | use super::operand::OperandValue::{Pair, Ref, Immediate}; |
41 | ||
32a655c1 | 42 | impl<'a, 'tcx> MirContext<'a, 'tcx> { |
7cac9316 | 43 | pub fn trans_block(&mut self, bb: mir::BasicBlock) { |
32a655c1 SL |
44 | let mut bcx = self.get_builder(bb); |
45 | let data = &self.mir[bb]; | |
3157f602 XL |
46 | |
47 | debug!("trans_block({:?}={:?})", bb, data); | |
48 | ||
7cac9316 XL |
49 | for statement in &data.statements { |
50 | bcx = self.trans_statement(bcx, statement); | |
51 | } | |
52 | ||
53 | self.trans_terminator(bcx, bb, data.terminator()); | |
54 | } | |
55 | ||
56 | fn trans_terminator(&mut self, | |
57 | mut bcx: Builder<'a, 'tcx>, | |
58 | bb: mir::BasicBlock, | |
59 | terminator: &mir::Terminator<'tcx>) | |
60 | { | |
61 | debug!("trans_terminator: {:?}", terminator); | |
32a655c1 | 62 | |
3157f602 | 63 | // Create the cleanup bundle, if needed. |
7cac9316 XL |
64 | let tcx = bcx.tcx(); |
65 | let span = terminator.source_info.span; | |
66 | let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb); | |
67 | let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref()); | |
68 | ||
32a655c1 SL |
69 | let cleanup_pad = funclet.map(|lp| lp.cleanuppad()); |
70 | let cleanup_bundle = funclet.map(|l| l.bundle()); | |
3157f602 | 71 | |
7cac9316 XL |
72 | let lltarget = |this: &mut Self, target: mir::BasicBlock| { |
73 | let lltarget = this.blocks[target]; | |
74 | let target_funclet = this.cleanup_kinds[target].funclet_bb(target); | |
75 | match (funclet_bb, target_funclet) { | |
76 | (None, None) => (lltarget, false), | |
77 | (Some(f), Some(t_f)) | |
78 | if f == t_f || !base::wants_msvc_seh(tcx.sess) | |
79 | => (lltarget, false), | |
80 | (None, Some(_)) => { | |
81 | // jump *into* cleanup - need a landing pad if GNU | |
82 | (this.landing_pad_to(target), false) | |
83 | } | |
84 | (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator), | |
85 | (Some(_), Some(_)) => { | |
86 | (this.landing_pad_to(target), true) | |
3157f602 | 87 | } |
3157f602 | 88 | } |
54a0048b | 89 | }; |
3157f602 XL |
90 | |
91 | let llblock = |this: &mut Self, target: mir::BasicBlock| { | |
7cac9316 XL |
92 | let (lltarget, is_cleanupret) = lltarget(this, target); |
93 | if is_cleanupret { | |
94 | // MSVC cross-funclet jump - need a trampoline | |
95 | ||
96 | debug!("llblock: creating cleanup trampoline for {:?}", target); | |
97 | let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target); | |
98 | let trampoline = this.new_block(name); | |
99 | trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget)); | |
100 | trampoline.llbb() | |
101 | } else { | |
102 | lltarget | |
103 | } | |
104 | }; | |
3157f602 | 105 | |
7cac9316 XL |
106 | let funclet_br = |this: &mut Self, bcx: Builder, target: mir::BasicBlock| { |
107 | let (lltarget, is_cleanupret) = lltarget(this, target); | |
108 | if is_cleanupret { | |
109 | // micro-optimization: generate a `ret` rather than a jump | |
110 | // to a trampoline. | |
111 | bcx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget)); | |
112 | } else { | |
113 | bcx.br(lltarget); | |
114 | } | |
115 | }; | |
3157f602 | 116 | |
7cac9316 XL |
117 | let do_call = | |
118 | this: &mut Self, | |
119 | bcx: Builder<'a, 'tcx>, | |
120 | fn_ty: FnType<'tcx>, | |
121 | fn_ptr: ValueRef, | |
122 | llargs: &[ValueRef], | |
ea8adc8c | 123 | destination: Option<(ReturnDest, Ty<'tcx>, mir::BasicBlock)>, |
7cac9316 XL |
124 | cleanup: Option<mir::BasicBlock> |
125 | | { | |
126 | if let Some(cleanup) = cleanup { | |
127 | let ret_bcx = if let Some((_, _, target)) = destination { | |
128 | this.blocks[target] | |
129 | } else { | |
130 | this.unreachable_block() | |
131 | }; | |
132 | let invokeret = bcx.invoke(fn_ptr, | |
133 | &llargs, | |
134 | ret_bcx, | |
135 | llblock(this, cleanup), | |
136 | cleanup_bundle); | |
137 | fn_ty.apply_attrs_callsite(invokeret); | |
138 | ||
139 | if let Some((ret_dest, ret_ty, target)) = destination { | |
140 | let ret_bcx = this.get_builder(target); | |
141 | this.set_debug_loc(&ret_bcx, terminator.source_info); | |
142 | let op = OperandRef { | |
143 | val: Immediate(invokeret), | |
144 | ty: ret_ty, | |
145 | }; | |
146 | this.store_return(&ret_bcx, ret_dest, &fn_ty.ret, op); | |
3157f602 XL |
147 | } |
148 | } else { | |
7cac9316 XL |
149 | let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle); |
150 | fn_ty.apply_attrs_callsite(llret); | |
041b39d2 XL |
151 | if this.mir[bb].is_cleanup { |
152 | // Cleanup is always the cold path. Don't inline | |
153 | // drop glue. Also, when there is a deeply-nested | |
154 | // struct, there are "symmetry" issues that cause | |
155 | // exponential inlining - see issue #41696. | |
156 | llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret); | |
157 | } | |
7cac9316 XL |
158 | |
159 | if let Some((ret_dest, ret_ty, target)) = destination { | |
160 | let op = OperandRef { | |
161 | val: Immediate(llret), | |
162 | ty: ret_ty, | |
163 | }; | |
164 | this.store_return(&bcx, ret_dest, &fn_ty.ret, op); | |
165 | funclet_br(this, bcx, target); | |
3157f602 | 166 | } else { |
7cac9316 | 167 | bcx.unreachable(); |
3157f602 XL |
168 | } |
169 | } | |
54a0048b SL |
170 | }; |
171 | ||
32a655c1 | 172 | self.set_debug_loc(&bcx, terminator.source_info); |
a7813a04 | 173 | match terminator.kind { |
54a0048b SL |
174 | mir::TerminatorKind::Resume => { |
175 | if let Some(cleanup_pad) = cleanup_pad { | |
176 | bcx.cleanup_ret(cleanup_pad, None); | |
177 | } else { | |
178 | let ps = self.get_personality_slot(&bcx); | |
32a655c1 SL |
179 | let lp = bcx.load(ps, None); |
180 | Lifetime::End.call(&bcx, ps); | |
181 | if !bcx.sess().target.target.options.custom_unwind_resume { | |
182 | bcx.resume(lp); | |
183 | } else { | |
184 | let exc_ptr = bcx.extract_value(lp, 0); | |
185 | bcx.call(bcx.ccx.eh_unwind_resume(), &[exc_ptr], cleanup_bundle); | |
186 | bcx.unreachable(); | |
187 | } | |
54a0048b SL |
188 | } |
189 | } | |
190 | ||
191 | mir::TerminatorKind::Goto { target } => { | |
3157f602 | 192 | funclet_br(self, bcx, target); |
54a0048b SL |
193 | } |
194 | ||
8bb4bdeb XL |
195 | mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => { |
196 | let discr = self.trans_operand(&bcx, discr); | |
197 | if switch_ty == bcx.tcx().types.bool { | |
198 | let lltrue = llblock(self, targets[0]); | |
199 | let llfalse = llblock(self, targets[1]); | |
200 | if let [ConstInt::U8(0)] = values[..] { | |
201 | bcx.cond_br(discr.immediate(), llfalse, lltrue); | |
202 | } else { | |
203 | bcx.cond_br(discr.immediate(), lltrue, llfalse); | |
a7813a04 | 204 | } |
8bb4bdeb XL |
205 | } else { |
206 | let (otherwise, targets) = targets.split_last().unwrap(); | |
207 | let switch = bcx.switch(discr.immediate(), | |
208 | llblock(self, *otherwise), values.len()); | |
209 | for (value, target) in values.iter().zip(targets) { | |
210 | let val = Const::from_constint(bcx.ccx, value); | |
211 | let llbb = llblock(self, *target); | |
212 | bcx.add_case(switch, val.llval, llbb) | |
a7813a04 | 213 | } |
54a0048b SL |
214 | } |
215 | } | |
216 | ||
54a0048b | 217 | mir::TerminatorKind::Return => { |
32a655c1 | 218 | let ret = self.fn_ty.ret; |
3157f602 XL |
219 | if ret.is_ignore() || ret.is_indirect() { |
220 | bcx.ret_void(); | |
221 | return; | |
222 | } | |
223 | ||
224 | let llval = if let Some(cast_ty) = ret.cast { | |
c30ab7b3 | 225 | let op = match self.locals[mir::RETURN_POINTER] { |
3157f602 XL |
226 | LocalRef::Operand(Some(op)) => op, |
227 | LocalRef::Operand(None) => bug!("use of return before def"), | |
228 | LocalRef::Lvalue(tr_lvalue) => { | |
229 | OperandRef { | |
32a655c1 | 230 | val: Ref(tr_lvalue.llval, tr_lvalue.alignment), |
3157f602 XL |
231 | ty: tr_lvalue.ty.to_ty(bcx.tcx()) |
232 | } | |
233 | } | |
234 | }; | |
235 | let llslot = match op.val { | |
236 | Immediate(_) | Pair(..) => { | |
cc61c64b | 237 | let llscratch = bcx.alloca(ret.memory_ty(bcx.ccx), "ret", None); |
32a655c1 | 238 | self.store_operand(&bcx, llscratch, None, op); |
3157f602 XL |
239 | llscratch |
240 | } | |
32a655c1 SL |
241 | Ref(llval, align) => { |
242 | assert_eq!(align, Alignment::AbiAligned, | |
243 | "return pointer is unaligned!"); | |
244 | llval | |
245 | } | |
3157f602 | 246 | }; |
32a655c1 SL |
247 | let load = bcx.load( |
248 | bcx.pointercast(llslot, cast_ty.ptr_to()), | |
cc61c64b | 249 | Some(ret.layout.align(bcx.ccx).abi() as u32)); |
3157f602 XL |
250 | load |
251 | } else { | |
c30ab7b3 | 252 | let op = self.trans_consume(&bcx, &mir::Lvalue::Local(mir::RETURN_POINTER)); |
32a655c1 SL |
253 | if let Ref(llval, align) = op.val { |
254 | base::load_ty(&bcx, llval, align, op.ty) | |
476ff2be SL |
255 | } else { |
256 | op.pack_if_pair(&bcx).immediate() | |
257 | } | |
3157f602 XL |
258 | }; |
259 | bcx.ret(llval); | |
260 | } | |
261 | ||
262 | mir::TerminatorKind::Unreachable => { | |
263 | bcx.unreachable(); | |
54a0048b SL |
264 | } |
265 | ||
3157f602 | 266 | mir::TerminatorKind::Drop { ref location, target, unwind } => { |
041b39d2 | 267 | let ty = location.ty(self.mir, bcx.tcx()).to_ty(bcx.tcx()); |
32a655c1 | 268 | let ty = self.monomorphize(&ty); |
ea8adc8c | 269 | let drop_fn = monomorphize::resolve_drop_in_place(bcx.ccx.tcx(), ty); |
3157f602 | 270 | |
cc61c64b XL |
271 | if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def { |
272 | // we don't actually need to drop anything. | |
3157f602 | 273 | funclet_br(self, bcx, target); |
cc61c64b | 274 | return |
54a0048b | 275 | } |
3157f602 | 276 | |
cc61c64b | 277 | let lvalue = self.trans_lvalue(&bcx, location); |
7cac9316 | 278 | let fn_ty = FnType::of_instance(bcx.ccx, &drop_fn); |
cc61c64b XL |
279 | let (drop_fn, need_extra) = match ty.sty { |
280 | ty::TyDynamic(..) => (meth::DESTRUCTOR.get_fn(&bcx, lvalue.llextra), | |
281 | false), | |
cc61c64b XL |
282 | _ => (callee::get_fn(bcx.ccx, drop_fn), lvalue.has_extra()) |
283 | }; | |
284 | let args = &[lvalue.llval, lvalue.llextra][..1 + need_extra as usize]; | |
7cac9316 XL |
285 | do_call(self, bcx, fn_ty, drop_fn, args, |
286 | Some((ReturnDest::Nothing, tcx.mk_nil(), target)), | |
287 | unwind); | |
3157f602 XL |
288 | } |
289 | ||
290 | mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => { | |
291 | let cond = self.trans_operand(&bcx, cond).immediate(); | |
32a655c1 | 292 | let mut const_cond = common::const_to_opt_u128(cond, false).map(|c| c == 1); |
5bcae85e SL |
293 | |
294 | // This case can currently arise only from functions marked | |
295 | // with #[rustc_inherit_overflow_checks] and inlined from | |
296 | // another crate (mostly core::num generic/#[inline] fns), | |
297 | // while the current crate doesn't use overflow checks. | |
298 | // NOTE: Unlike binops, negation doesn't have its own | |
299 | // checked operation, just a comparison with the minimum | |
300 | // value, so we have to check for the assert message. | |
32a655c1 | 301 | if !bcx.ccx.check_overflow() { |
5bcae85e SL |
302 | use rustc_const_math::ConstMathErr::Overflow; |
303 | use rustc_const_math::Op::Neg; | |
304 | ||
305 | if let mir::AssertMessage::Math(Overflow(Neg)) = *msg { | |
306 | const_cond = Some(expected); | |
307 | } | |
308 | } | |
3157f602 XL |
309 | |
310 | // Don't translate the panic block if success if known. | |
311 | if const_cond == Some(expected) { | |
312 | funclet_br(self, bcx, target); | |
313 | return; | |
314 | } | |
315 | ||
316 | // Pass the condition through llvm.expect for branch hinting. | |
32a655c1 SL |
317 | let expect = bcx.ccx.get_intrinsic(&"llvm.expect.i1"); |
318 | let cond = bcx.call(expect, &[cond, C_bool(bcx.ccx, expected)], None); | |
3157f602 XL |
319 | |
320 | // Create the failure block and the conditional branch to it. | |
321 | let lltarget = llblock(self, target); | |
32a655c1 | 322 | let panic_block = self.new_block("panic"); |
3157f602 | 323 | if expected { |
32a655c1 | 324 | bcx.cond_br(cond, lltarget, panic_block.llbb()); |
3157f602 | 325 | } else { |
32a655c1 | 326 | bcx.cond_br(cond, panic_block.llbb(), lltarget); |
3157f602 XL |
327 | } |
328 | ||
329 | // After this point, bcx is the block for the call to panic. | |
32a655c1 SL |
330 | bcx = panic_block; |
331 | self.set_debug_loc(&bcx, terminator.source_info); | |
3157f602 XL |
332 | |
333 | // Get the location information. | |
ea8adc8c | 334 | let loc = bcx.sess().codemap().lookup_char_pos(span.lo()); |
476ff2be | 335 | let filename = Symbol::intern(&loc.file.name).as_str(); |
32a655c1 SL |
336 | let filename = C_str_slice(bcx.ccx, filename); |
337 | let line = C_u32(bcx.ccx, loc.line as u32); | |
041b39d2 | 338 | let col = C_u32(bcx.ccx, loc.col.to_usize() as u32 + 1); |
3157f602 XL |
339 | |
340 | // Put together the arguments to the panic entry point. | |
341 | let (lang_item, args, const_err) = match *msg { | |
342 | mir::AssertMessage::BoundsCheck { ref len, ref index } => { | |
343 | let len = self.trans_operand(&mut bcx, len).immediate(); | |
344 | let index = self.trans_operand(&mut bcx, index).immediate(); | |
345 | ||
32a655c1 SL |
346 | let const_err = common::const_to_opt_u128(len, false) |
347 | .and_then(|len| common::const_to_opt_u128(index, false) | |
348 | .map(|index| ErrKind::IndexOutOfBounds { | |
349 | len: len as u64, | |
350 | index: index as u64 | |
351 | })); | |
352 | ||
041b39d2 XL |
353 | let file_line_col = C_struct(bcx.ccx, &[filename, line, col], false); |
354 | let align = llalign_of_min(bcx.ccx, common::val_ty(file_line_col)); | |
355 | let file_line_col = consts::addr_of(bcx.ccx, | |
356 | file_line_col, | |
357 | align, | |
358 | "panic_bounds_check_loc"); | |
3157f602 | 359 | (lang_items::PanicBoundsCheckFnLangItem, |
041b39d2 | 360 | vec![file_line_col, index, len], |
3157f602 XL |
361 | const_err) |
362 | } | |
363 | mir::AssertMessage::Math(ref err) => { | |
476ff2be | 364 | let msg_str = Symbol::intern(err.description()).as_str(); |
32a655c1 | 365 | let msg_str = C_str_slice(bcx.ccx, msg_str); |
041b39d2 XL |
366 | let msg_file_line_col = C_struct(bcx.ccx, |
367 | &[msg_str, filename, line, col], | |
3157f602 | 368 | false); |
041b39d2 XL |
369 | let align = llalign_of_min(bcx.ccx, common::val_ty(msg_file_line_col)); |
370 | let msg_file_line_col = consts::addr_of(bcx.ccx, | |
371 | msg_file_line_col, | |
372 | align, | |
373 | "panic_loc"); | |
3157f602 | 374 | (lang_items::PanicFnLangItem, |
041b39d2 | 375 | vec![msg_file_line_col], |
3157f602 XL |
376 | Some(ErrKind::Math(err.clone()))) |
377 | } | |
ea8adc8c XL |
378 | mir::AssertMessage::GeneratorResumedAfterReturn | |
379 | mir::AssertMessage::GeneratorResumedAfterPanic => { | |
380 | let str = if let mir::AssertMessage::GeneratorResumedAfterReturn = *msg { | |
381 | "generator resumed after completion" | |
382 | } else { | |
383 | "generator resumed after panicking" | |
384 | }; | |
385 | let msg_str = Symbol::intern(str).as_str(); | |
386 | let msg_str = C_str_slice(bcx.ccx, msg_str); | |
387 | let msg_file_line_col = C_struct(bcx.ccx, | |
388 | &[msg_str, filename, line, col], | |
389 | false); | |
390 | let align = llalign_of_min(bcx.ccx, common::val_ty(msg_file_line_col)); | |
391 | let msg_file_line_col = consts::addr_of(bcx.ccx, | |
392 | msg_file_line_col, | |
393 | align, | |
394 | "panic_loc"); | |
395 | (lang_items::PanicFnLangItem, | |
396 | vec![msg_file_line_col], | |
397 | None) | |
398 | } | |
3157f602 XL |
399 | }; |
400 | ||
401 | // If we know we always panic, and the error message | |
402 | // is also constant, then we can produce a warning. | |
403 | if const_cond == Some(!expected) { | |
404 | if let Some(err) = const_err { | |
9e0c209e SL |
405 | let err = ConstEvalErr{ span: span, kind: err }; |
406 | let mut diag = bcx.tcx().sess.struct_span_warn( | |
407 | span, "this expression will panic at run-time"); | |
cc61c64b | 408 | err.note(bcx.tcx(), span, "expression", &mut diag); |
9e0c209e | 409 | diag.emit(); |
3157f602 XL |
410 | } |
411 | } | |
412 | ||
413 | // Obtain the panic entry point. | |
414 | let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item); | |
cc61c64b | 415 | let instance = ty::Instance::mono(bcx.tcx(), def_id); |
7cac9316 | 416 | let fn_ty = FnType::of_instance(bcx.ccx, &instance); |
cc61c64b | 417 | let llfn = callee::get_fn(bcx.ccx, instance); |
3157f602 XL |
418 | |
419 | // Translate the actual panic invoke/call. | |
7cac9316 | 420 | do_call(self, bcx, fn_ty, llfn, &args, None, cleanup); |
54a0048b SL |
421 | } |
422 | ||
3157f602 | 423 | mir::TerminatorKind::DropAndReplace { .. } => { |
7cac9316 | 424 | bug!("undesugared DropAndReplace in trans: {:?}", terminator); |
3157f602 XL |
425 | } |
426 | ||
7cac9316 | 427 | mir::TerminatorKind::Call { ref func, ref args, ref destination, cleanup } => { |
54a0048b SL |
428 | // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar. |
429 | let callee = self.trans_operand(&bcx, func); | |
430 | ||
041b39d2 XL |
431 | let (instance, mut llfn) = match callee.ty.sty { |
432 | ty::TyFnDef(def_id, substs) => { | |
ea8adc8c XL |
433 | (Some(ty::Instance::resolve(bcx.ccx.tcx(), |
434 | ty::ParamEnv::empty(traits::Reveal::All), | |
435 | def_id, | |
436 | substs).unwrap()), | |
041b39d2 | 437 | None) |
54a0048b | 438 | } |
041b39d2 XL |
439 | ty::TyFnPtr(_) => { |
440 | (None, Some(callee.immediate())) | |
54a0048b SL |
441 | } |
442 | _ => bug!("{} is not callable", callee.ty) | |
443 | }; | |
cc61c64b | 444 | let def = instance.map(|i| i.def); |
041b39d2 | 445 | let sig = callee.ty.fn_sig(bcx.tcx()); |
8bb4bdeb XL |
446 | let sig = bcx.tcx().erase_late_bound_regions_and_normalize(&sig); |
447 | let abi = sig.abi; | |
54a0048b SL |
448 | |
449 | // Handle intrinsics old trans wants Expr's for, ourselves. | |
cc61c64b XL |
450 | let intrinsic = match def { |
451 | Some(ty::InstanceDef::Intrinsic(def_id)) | |
ea8adc8c | 452 | => Some(bcx.tcx().item_name(def_id)), |
54a0048b SL |
453 | _ => None |
454 | }; | |
cc61c64b | 455 | let intrinsic = intrinsic.as_ref().map(|s| &s[..]); |
54a0048b SL |
456 | |
457 | if intrinsic == Some("transmute") { | |
458 | let &(ref dest, target) = destination.as_ref().unwrap(); | |
32a655c1 | 459 | self.trans_transmute(&bcx, &args[0], dest); |
3157f602 | 460 | funclet_br(self, bcx, target); |
54a0048b SL |
461 | return; |
462 | } | |
463 | ||
476ff2be | 464 | let extra_args = &args[sig.inputs().len()..]; |
54a0048b | 465 | let extra_args = extra_args.iter().map(|op_arg| { |
041b39d2 | 466 | let op_ty = op_arg.ty(self.mir, bcx.tcx()); |
32a655c1 | 467 | self.monomorphize(&op_ty) |
54a0048b | 468 | }).collect::<Vec<_>>(); |
32a655c1 | 469 | |
cc61c64b XL |
470 | let fn_ty = match def { |
471 | Some(ty::InstanceDef::Virtual(..)) => { | |
472 | FnType::new_vtable(bcx.ccx, sig, &extra_args) | |
473 | } | |
474 | Some(ty::InstanceDef::DropGlue(_, None)) => { | |
475 | // empty drop glue - a nop. | |
476 | let &(_, target) = destination.as_ref().unwrap(); | |
32a655c1 SL |
477 | funclet_br(self, bcx, target); |
478 | return; | |
479 | } | |
cc61c64b XL |
480 | _ => FnType::new(bcx.ccx, sig, &extra_args) |
481 | }; | |
54a0048b SL |
482 | |
483 | // The arguments we'll be passing. Plus one to account for outptr, if used. | |
484 | let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize; | |
485 | let mut llargs = Vec::with_capacity(arg_count); | |
486 | ||
487 | // Prepare the return value destination | |
488 | let ret_dest = if let Some((ref dest, _)) = *destination { | |
cc61c64b XL |
489 | let is_intrinsic = intrinsic.is_some(); |
490 | self.make_return_dest(&bcx, dest, &fn_ty.ret, &mut llargs, | |
491 | is_intrinsic) | |
54a0048b SL |
492 | } else { |
493 | ReturnDest::Nothing | |
494 | }; | |
495 | ||
496 | // Split the rust-call tupled arguments off. | |
497 | let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() { | |
498 | let (tup, args) = args.split_last().unwrap(); | |
499 | (args, Some(tup)) | |
500 | } else { | |
501 | (&args[..], None) | |
502 | }; | |
503 | ||
a7813a04 XL |
504 | let is_shuffle = intrinsic.map_or(false, |name| { |
505 | name.starts_with("simd_shuffle") | |
506 | }); | |
54a0048b SL |
507 | let mut idx = 0; |
508 | for arg in first_args { | |
a7813a04 XL |
509 | // The indices passed to simd_shuffle* in the |
510 | // third argument must be constant. This is | |
511 | // checked by const-qualification, which also | |
512 | // promotes any complex rvalues to constants. | |
513 | if is_shuffle && idx == 2 { | |
514 | match *arg { | |
515 | mir::Operand::Consume(_) => { | |
3157f602 | 516 | span_bug!(span, "shuffle indices must be constant"); |
a7813a04 XL |
517 | } |
518 | mir::Operand::Constant(ref constant) => { | |
519 | let val = self.trans_constant(&bcx, constant); | |
520 | llargs.push(val.llval); | |
521 | idx += 1; | |
522 | continue; | |
523 | } | |
524 | } | |
525 | } | |
526 | ||
abe05a73 XL |
527 | let mut op = self.trans_operand(&bcx, arg); |
528 | ||
529 | // The callee needs to own the argument memory if we pass it | |
530 | // by-ref, so make a local copy of non-immediate constants. | |
531 | if let (&mir::Operand::Constant(_), Ref(..)) = (arg, op.val) { | |
532 | let tmp = LvalueRef::alloca(&bcx, op.ty, "const"); | |
533 | self.store_operand(&bcx, tmp.llval, tmp.alignment.to_align(), op); | |
534 | op.val = Ref(tmp.llval, tmp.alignment); | |
535 | } | |
536 | ||
3157f602 | 537 | self.trans_argument(&bcx, op, &mut llargs, &fn_ty, |
cc61c64b | 538 | &mut idx, &mut llfn, &def); |
54a0048b SL |
539 | } |
540 | if let Some(tup) = untuple { | |
541 | self.trans_arguments_untupled(&bcx, tup, &mut llargs, &fn_ty, | |
cc61c64b | 542 | &mut idx, &mut llfn, &def) |
54a0048b SL |
543 | } |
544 | ||
cc61c64b XL |
545 | if intrinsic.is_some() && intrinsic != Some("drop_in_place") { |
546 | use intrinsic::trans_intrinsic_call; | |
54a0048b | 547 | |
cc61c64b XL |
548 | let (dest, llargs) = match ret_dest { |
549 | _ if fn_ty.ret.is_indirect() => { | |
550 | (llargs[0], &llargs[1..]) | |
54a0048b | 551 | } |
cc61c64b XL |
552 | ReturnDest::Nothing => { |
553 | (C_undef(fn_ty.ret.memory_ty(bcx.ccx).ptr_to()), &llargs[..]) | |
54a0048b | 554 | } |
cc61c64b XL |
555 | ReturnDest::IndirectOperand(dst, _) | |
556 | ReturnDest::Store(dst) => (dst, &llargs[..]), | |
557 | ReturnDest::DirectOperand(_) => | |
558 | bug!("Cannot use direct operand with an intrinsic call") | |
559 | }; | |
54a0048b | 560 | |
cc61c64b | 561 | let callee_ty = common::instance_ty( |
ea8adc8c | 562 | bcx.ccx.tcx(), instance.as_ref().unwrap()); |
cc61c64b XL |
563 | trans_intrinsic_call(&bcx, callee_ty, &fn_ty, &llargs, dest, |
564 | terminator.source_info.span); | |
565 | ||
566 | if let ReturnDest::IndirectOperand(dst, _) = ret_dest { | |
567 | // Make a fake operand for store_return | |
568 | let op = OperandRef { | |
569 | val: Ref(dst, Alignment::AbiAligned), | |
570 | ty: sig.output(), | |
571 | }; | |
572 | self.store_return(&bcx, ret_dest, &fn_ty.ret, op); | |
573 | } | |
574 | ||
575 | if let Some((_, target)) = *destination { | |
576 | funclet_br(self, bcx, target); | |
577 | } else { | |
578 | bcx.unreachable(); | |
54a0048b | 579 | } |
cc61c64b XL |
580 | |
581 | return; | |
582 | } | |
583 | ||
584 | let fn_ptr = match (llfn, instance) { | |
585 | (Some(llfn), _) => llfn, | |
586 | (None, Some(instance)) => callee::get_fn(bcx.ccx, instance), | |
587 | _ => span_bug!(span, "no llfn for call"), | |
54a0048b SL |
588 | }; |
589 | ||
7cac9316 XL |
590 | do_call(self, bcx, fn_ty, fn_ptr, &llargs, |
591 | destination.as_ref().map(|&(_, target)| (ret_dest, sig.output(), target)), | |
592 | cleanup); | |
54a0048b | 593 | } |
ea8adc8c | 594 | mir::TerminatorKind::GeneratorDrop | |
abe05a73 XL |
595 | mir::TerminatorKind::Yield { .. } | |
596 | mir::TerminatorKind::FalseEdges { .. } => bug!("generator ops in trans"), | |
54a0048b SL |
597 | } |
598 | } | |
599 | ||
600 | fn trans_argument(&mut self, | |
32a655c1 | 601 | bcx: &Builder<'a, 'tcx>, |
3157f602 | 602 | op: OperandRef<'tcx>, |
54a0048b | 603 | llargs: &mut Vec<ValueRef>, |
cc61c64b | 604 | fn_ty: &FnType<'tcx>, |
54a0048b | 605 | next_idx: &mut usize, |
cc61c64b XL |
606 | llfn: &mut Option<ValueRef>, |
607 | def: &Option<ty::InstanceDef<'tcx>>) { | |
3157f602 XL |
608 | if let Pair(a, b) = op.val { |
609 | // Treat the values in a fat pointer separately. | |
32a655c1 | 610 | if common::type_is_fat_ptr(bcx.ccx, op.ty) { |
3157f602 XL |
611 | let (ptr, meta) = (a, b); |
612 | if *next_idx == 0 { | |
cc61c64b XL |
613 | if let Some(ty::InstanceDef::Virtual(_, idx)) = *def { |
614 | let llmeth = meth::VirtualIndex::from_index(idx).get_fn(bcx, meta); | |
32a655c1 | 615 | let llty = fn_ty.llvm_type(bcx.ccx).ptr_to(); |
cc61c64b | 616 | *llfn = Some(bcx.pointercast(llmeth, llty)); |
3157f602 | 617 | } |
54a0048b | 618 | } |
3157f602 XL |
619 | |
620 | let imm_op = |x| OperandRef { | |
621 | val: Immediate(x), | |
622 | // We won't be checking the type again. | |
623 | ty: bcx.tcx().types.err | |
624 | }; | |
cc61c64b XL |
625 | self.trans_argument(bcx, imm_op(ptr), llargs, fn_ty, next_idx, llfn, def); |
626 | self.trans_argument(bcx, imm_op(meta), llargs, fn_ty, next_idx, llfn, def); | |
3157f602 | 627 | return; |
54a0048b | 628 | } |
54a0048b SL |
629 | } |
630 | ||
631 | let arg = &fn_ty.args[*next_idx]; | |
632 | *next_idx += 1; | |
633 | ||
634 | // Fill padding with undef value, where applicable. | |
635 | if let Some(ty) = arg.pad { | |
636 | llargs.push(C_undef(ty)); | |
637 | } | |
638 | ||
639 | if arg.is_ignore() { | |
640 | return; | |
641 | } | |
642 | ||
643 | // Force by-ref if we have to load through a cast pointer. | |
32a655c1 | 644 | let (mut llval, align, by_ref) = match op.val { |
3157f602 XL |
645 | Immediate(_) | Pair(..) => { |
646 | if arg.is_indirect() || arg.cast.is_some() { | |
cc61c64b | 647 | let llscratch = bcx.alloca(arg.memory_ty(bcx.ccx), "arg", None); |
32a655c1 SL |
648 | self.store_operand(bcx, llscratch, None, op); |
649 | (llscratch, Alignment::AbiAligned, true) | |
3157f602 | 650 | } else { |
32a655c1 | 651 | (op.pack_if_pair(bcx).immediate(), Alignment::AbiAligned, false) |
3157f602 | 652 | } |
54a0048b | 653 | } |
32a655c1 SL |
654 | Ref(llval, Alignment::Packed) if arg.is_indirect() => { |
655 | // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I | |
656 | // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't | |
657 | // have scary latent bugs around. | |
658 | ||
cc61c64b | 659 | let llscratch = bcx.alloca(arg.memory_ty(bcx.ccx), "arg", None); |
32a655c1 SL |
660 | base::memcpy_ty(bcx, llscratch, llval, op.ty, Some(1)); |
661 | (llscratch, Alignment::AbiAligned, true) | |
662 | } | |
663 | Ref(llval, align) => (llval, align, true) | |
54a0048b SL |
664 | }; |
665 | ||
666 | if by_ref && !arg.is_indirect() { | |
667 | // Have to load the argument, maybe while casting it. | |
cc61c64b | 668 | if arg.layout.ty == bcx.tcx().types.bool { |
54a0048b | 669 | // We store bools as i8 so we need to truncate to i1. |
32a655c1 | 670 | llval = bcx.load_range_assert(llval, 0, 2, llvm::False, None); |
cc61c64b | 671 | llval = bcx.trunc(llval, Type::i1(bcx.ccx)); |
54a0048b | 672 | } else if let Some(ty) = arg.cast { |
32a655c1 | 673 | llval = bcx.load(bcx.pointercast(llval, ty.ptr_to()), |
cc61c64b | 674 | align.min_with(arg.layout.align(bcx.ccx).abi() as u32)); |
54a0048b | 675 | } else { |
32a655c1 | 676 | llval = bcx.load(llval, align.to_align()); |
54a0048b SL |
677 | } |
678 | } | |
679 | ||
680 | llargs.push(llval); | |
681 | } | |
682 | ||
683 | fn trans_arguments_untupled(&mut self, | |
32a655c1 | 684 | bcx: &Builder<'a, 'tcx>, |
54a0048b SL |
685 | operand: &mir::Operand<'tcx>, |
686 | llargs: &mut Vec<ValueRef>, | |
cc61c64b | 687 | fn_ty: &FnType<'tcx>, |
54a0048b | 688 | next_idx: &mut usize, |
cc61c64b XL |
689 | llfn: &mut Option<ValueRef>, |
690 | def: &Option<ty::InstanceDef<'tcx>>) { | |
a7813a04 | 691 | let tuple = self.trans_operand(bcx, operand); |
54a0048b | 692 | |
a7813a04 | 693 | let arg_types = match tuple.ty.sty { |
8bb4bdeb | 694 | ty::TyTuple(ref tys, _) => tys, |
a7813a04 XL |
695 | _ => span_bug!(self.mir.span, |
696 | "bad final argument to \"rust-call\" fn {:?}", tuple.ty) | |
54a0048b SL |
697 | }; |
698 | ||
a7813a04 XL |
699 | // Handle both by-ref and immediate tuples. |
700 | match tuple.val { | |
32a655c1 | 701 | Ref(llval, align) => { |
a7813a04 | 702 | for (n, &ty) in arg_types.iter().enumerate() { |
32a655c1 SL |
703 | let ptr = LvalueRef::new_sized_ty(llval, tuple.ty, align); |
704 | let (ptr, align) = ptr.trans_field_ptr(bcx, n); | |
705 | let val = if common::type_is_fat_ptr(bcx.ccx, ty) { | |
706 | let (lldata, llextra) = base::load_fat_ptr(bcx, ptr, align, ty); | |
3157f602 | 707 | Pair(lldata, llextra) |
a7813a04 XL |
708 | } else { |
709 | // trans_argument will load this if it needs to | |
32a655c1 | 710 | Ref(ptr, align) |
a7813a04 | 711 | }; |
3157f602 | 712 | let op = OperandRef { |
3b2f2976 XL |
713 | val, |
714 | ty, | |
3157f602 | 715 | }; |
cc61c64b | 716 | self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def); |
a7813a04 XL |
717 | } |
718 | ||
719 | } | |
720 | Immediate(llval) => { | |
32a655c1 | 721 | let l = bcx.ccx.layout_of(tuple.ty); |
476ff2be SL |
722 | let v = if let layout::Univariant { ref variant, .. } = *l { |
723 | variant | |
724 | } else { | |
725 | bug!("Not a tuple."); | |
726 | }; | |
a7813a04 | 727 | for (n, &ty) in arg_types.iter().enumerate() { |
cc61c64b XL |
728 | let mut elem = bcx.extract_value( |
729 | llval, adt::struct_llfields_index(v, n)); | |
a7813a04 | 730 | // Truncate bools to i1, if needed |
32a655c1 SL |
731 | if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx) { |
732 | elem = bcx.trunc(elem, Type::i1(bcx.ccx)); | |
a7813a04 XL |
733 | } |
734 | // If the tuple is immediate, the elements are as well | |
3157f602 XL |
735 | let op = OperandRef { |
736 | val: Immediate(elem), | |
3b2f2976 | 737 | ty, |
3157f602 | 738 | }; |
cc61c64b | 739 | self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def); |
3157f602 XL |
740 | } |
741 | } | |
742 | Pair(a, b) => { | |
743 | let elems = [a, b]; | |
744 | for (n, &ty) in arg_types.iter().enumerate() { | |
745 | let mut elem = elems[n]; | |
746 | // Truncate bools to i1, if needed | |
32a655c1 SL |
747 | if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx) { |
748 | elem = bcx.trunc(elem, Type::i1(bcx.ccx)); | |
3157f602 XL |
749 | } |
750 | // Pair is always made up of immediates | |
751 | let op = OperandRef { | |
752 | val: Immediate(elem), | |
3b2f2976 | 753 | ty, |
3157f602 | 754 | }; |
cc61c64b | 755 | self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def); |
a7813a04 XL |
756 | } |
757 | } | |
54a0048b | 758 | } |
a7813a04 | 759 | |
54a0048b SL |
760 | } |
761 | ||
32a655c1 SL |
762 | fn get_personality_slot(&mut self, bcx: &Builder<'a, 'tcx>) -> ValueRef { |
763 | let ccx = bcx.ccx; | |
54a0048b SL |
764 | if let Some(slot) = self.llpersonalityslot { |
765 | slot | |
766 | } else { | |
767 | let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false); | |
cc61c64b | 768 | let slot = bcx.alloca(llretty, "personalityslot", None); |
32a655c1 | 769 | self.llpersonalityslot = Some(slot); |
32a655c1 | 770 | slot |
54a0048b SL |
771 | } |
772 | } | |
773 | ||
3157f602 | 774 | /// Return the landingpad wrapper around the given basic block |
54a0048b SL |
775 | /// |
776 | /// No-op in MSVC SEH scheme. | |
32a655c1 | 777 | fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> BasicBlockRef { |
3157f602 XL |
778 | if let Some(block) = self.landing_pads[target_bb] { |
779 | return block; | |
780 | } | |
781 | ||
cc61c64b XL |
782 | let block = self.blocks[target_bb]; |
783 | let landing_pad = self.landing_pad_uncached(block); | |
784 | self.landing_pads[target_bb] = Some(landing_pad); | |
785 | landing_pad | |
786 | } | |
787 | ||
788 | fn landing_pad_uncached(&mut self, target_bb: BasicBlockRef) -> BasicBlockRef { | |
32a655c1 | 789 | if base::wants_msvc_seh(self.ccx.sess()) { |
7cac9316 | 790 | span_bug!(self.mir.span, "landing pad was not inserted?") |
54a0048b | 791 | } |
3157f602 | 792 | |
32a655c1 | 793 | let bcx = self.new_block("cleanup"); |
3157f602 | 794 | |
32a655c1 SL |
795 | let ccx = bcx.ccx; |
796 | let llpersonality = self.ccx.eh_personality(); | |
54a0048b | 797 | let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false); |
32a655c1 | 798 | let llretval = bcx.landing_pad(llretty, llpersonality, 1, self.llfn); |
54a0048b SL |
799 | bcx.set_cleanup(llretval); |
800 | let slot = self.get_personality_slot(&bcx); | |
cc61c64b | 801 | Lifetime::Start.call(&bcx, slot); |
32a655c1 | 802 | bcx.store(llretval, slot, None); |
cc61c64b | 803 | bcx.br(target_bb); |
32a655c1 | 804 | bcx.llbb() |
54a0048b SL |
805 | } |
806 | ||
32a655c1 | 807 | fn unreachable_block(&mut self) -> BasicBlockRef { |
54a0048b | 808 | self.unreachable_block.unwrap_or_else(|| { |
32a655c1 SL |
809 | let bl = self.new_block("unreachable"); |
810 | bl.unreachable(); | |
811 | self.unreachable_block = Some(bl.llbb()); | |
812 | bl.llbb() | |
54a0048b SL |
813 | }) |
814 | } | |
815 | ||
32a655c1 SL |
816 | pub fn new_block(&self, name: &str) -> Builder<'a, 'tcx> { |
817 | Builder::new_block(self.ccx, self.llfn, name) | |
818 | } | |
819 | ||
820 | pub fn get_builder(&self, bb: mir::BasicBlock) -> Builder<'a, 'tcx> { | |
821 | let builder = Builder::with_ccx(self.ccx); | |
822 | builder.position_at_end(self.blocks[bb]); | |
823 | builder | |
54a0048b SL |
824 | } |
825 | ||
32a655c1 | 826 | fn make_return_dest(&mut self, bcx: &Builder<'a, 'tcx>, |
54a0048b SL |
827 | dest: &mir::Lvalue<'tcx>, fn_ret_ty: &ArgType, |
828 | llargs: &mut Vec<ValueRef>, is_intrinsic: bool) -> ReturnDest { | |
829 | // If the return is ignored, we can just return a do-nothing ReturnDest | |
830 | if fn_ret_ty.is_ignore() { | |
831 | return ReturnDest::Nothing; | |
832 | } | |
c30ab7b3 | 833 | let dest = if let mir::Lvalue::Local(index) = *dest { |
5bcae85e | 834 | let ret_ty = self.monomorphized_lvalue_ty(dest); |
3157f602 XL |
835 | match self.locals[index] { |
836 | LocalRef::Lvalue(dest) => dest, | |
837 | LocalRef::Operand(None) => { | |
838 | // Handle temporary lvalues, specifically Operand ones, as | |
839 | // they don't have allocas | |
840 | return if fn_ret_ty.is_indirect() { | |
841 | // Odd, but possible, case, we have an operand temporary, | |
842 | // but the calling convention has an indirect return. | |
32a655c1 SL |
843 | let tmp = LvalueRef::alloca(bcx, ret_ty, "tmp_ret"); |
844 | llargs.push(tmp.llval); | |
845 | ReturnDest::IndirectOperand(tmp.llval, index) | |
3157f602 XL |
846 | } else if is_intrinsic { |
847 | // Currently, intrinsics always need a location to store | |
848 | // the result. so we create a temporary alloca for the | |
849 | // result | |
32a655c1 SL |
850 | let tmp = LvalueRef::alloca(bcx, ret_ty, "tmp_ret"); |
851 | ReturnDest::IndirectOperand(tmp.llval, index) | |
3157f602 XL |
852 | } else { |
853 | ReturnDest::DirectOperand(index) | |
854 | }; | |
855 | } | |
856 | LocalRef::Operand(Some(_)) => { | |
857 | bug!("lvalue local already assigned to"); | |
54a0048b SL |
858 | } |
859 | } | |
3157f602 XL |
860 | } else { |
861 | self.trans_lvalue(bcx, dest) | |
54a0048b SL |
862 | }; |
863 | if fn_ret_ty.is_indirect() { | |
8bb4bdeb XL |
864 | match dest.alignment { |
865 | Alignment::AbiAligned => { | |
866 | llargs.push(dest.llval); | |
867 | ReturnDest::Nothing | |
868 | }, | |
869 | Alignment::Packed => { | |
870 | // Currently, MIR code generation does not create calls | |
871 | // that store directly to fields of packed structs (in | |
872 | // fact, the calls it creates write only to temps), | |
873 | // | |
874 | // If someone changes that, please update this code path | |
875 | // to create a temporary. | |
876 | span_bug!(self.mir.span, "can't directly store to unaligned value"); | |
877 | } | |
878 | } | |
54a0048b SL |
879 | } else { |
880 | ReturnDest::Store(dest.llval) | |
881 | } | |
882 | } | |
883 | ||
32a655c1 SL |
884 | fn trans_transmute(&mut self, bcx: &Builder<'a, 'tcx>, |
885 | src: &mir::Operand<'tcx>, | |
886 | dst: &mir::Lvalue<'tcx>) { | |
887 | if let mir::Lvalue::Local(index) = *dst { | |
888 | match self.locals[index] { | |
889 | LocalRef::Lvalue(lvalue) => self.trans_transmute_into(bcx, src, &lvalue), | |
890 | LocalRef::Operand(None) => { | |
891 | let lvalue_ty = self.monomorphized_lvalue_ty(dst); | |
892 | assert!(!lvalue_ty.has_erasable_regions()); | |
893 | let lvalue = LvalueRef::alloca(bcx, lvalue_ty, "transmute_temp"); | |
894 | self.trans_transmute_into(bcx, src, &lvalue); | |
895 | let op = self.trans_load(bcx, lvalue.llval, lvalue.alignment, lvalue_ty); | |
896 | self.locals[index] = LocalRef::Operand(Some(op)); | |
897 | } | |
898 | LocalRef::Operand(Some(_)) => { | |
899 | let ty = self.monomorphized_lvalue_ty(dst); | |
900 | assert!(common::type_is_zero_size(bcx.ccx, ty), | |
901 | "assigning to initialized SSAtemp"); | |
902 | } | |
903 | } | |
904 | } else { | |
905 | let dst = self.trans_lvalue(bcx, dst); | |
906 | self.trans_transmute_into(bcx, src, &dst); | |
907 | } | |
908 | } | |
909 | ||
910 | fn trans_transmute_into(&mut self, bcx: &Builder<'a, 'tcx>, | |
911 | src: &mir::Operand<'tcx>, | |
912 | dst: &LvalueRef<'tcx>) { | |
8bb4bdeb | 913 | let val = self.trans_operand(bcx, src); |
32a655c1 | 914 | let llty = type_of::type_of(bcx.ccx, val.ty); |
54a0048b | 915 | let cast_ptr = bcx.pointercast(dst.llval, llty.ptr_to()); |
32a655c1 | 916 | let in_type = val.ty; |
7cac9316 | 917 | let out_type = dst.ty.to_ty(bcx.tcx()); |
cc61c64b | 918 | let llalign = cmp::min(bcx.ccx.align_of(in_type), bcx.ccx.align_of(out_type)); |
32a655c1 | 919 | self.store_operand(bcx, cast_ptr, Some(llalign), val); |
54a0048b SL |
920 | } |
921 | ||
3157f602 | 922 | |
54a0048b SL |
923 | // Stores the return value of a function call into it's final location. |
924 | fn store_return(&mut self, | |
32a655c1 | 925 | bcx: &Builder<'a, 'tcx>, |
54a0048b | 926 | dest: ReturnDest, |
cc61c64b | 927 | ret_ty: &ArgType<'tcx>, |
54a0048b SL |
928 | op: OperandRef<'tcx>) { |
929 | use self::ReturnDest::*; | |
930 | ||
931 | match dest { | |
932 | Nothing => (), | |
933 | Store(dst) => ret_ty.store(bcx, op.immediate(), dst), | |
3157f602 | 934 | IndirectOperand(tmp, index) => { |
32a655c1 | 935 | let op = self.trans_load(bcx, tmp, Alignment::AbiAligned, op.ty); |
3157f602 | 936 | self.locals[index] = LocalRef::Operand(Some(op)); |
54a0048b | 937 | } |
3157f602 XL |
938 | DirectOperand(index) => { |
939 | // If there is a cast, we have to store and reload. | |
940 | let op = if ret_ty.cast.is_some() { | |
32a655c1 SL |
941 | let tmp = LvalueRef::alloca(bcx, op.ty, "tmp_ret"); |
942 | ret_ty.store(bcx, op.immediate(), tmp.llval); | |
943 | self.trans_load(bcx, tmp.llval, tmp.alignment, op.ty) | |
54a0048b | 944 | } else { |
3157f602 | 945 | op.unpack_if_pair(bcx) |
54a0048b | 946 | }; |
3157f602 | 947 | self.locals[index] = LocalRef::Operand(Some(op)); |
54a0048b SL |
948 | } |
949 | } | |
950 | } | |
951 | } | |
952 | ||
953 | enum ReturnDest { | |
954 | // Do nothing, the return value is indirect or ignored | |
955 | Nothing, | |
956 | // Store the return value to the pointer | |
957 | Store(ValueRef), | |
3157f602 XL |
958 | // Stores an indirect return value to an operand local lvalue |
959 | IndirectOperand(ValueRef, mir::Local), | |
960 | // Stores a direct return value to an operand local lvalue | |
961 | DirectOperand(mir::Local) | |
54a0048b | 962 | } |