]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/mir/analyze.rs
New upstream version 1.22.1+dfsg1
[rustc.git] / src / librustc_trans / mir / analyze.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! An analysis to determine which locals require allocas and
12 //! which do not.
13
14 use rustc_data_structures::bitvec::BitVector;
15 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
16 use rustc::middle::const_val::ConstVal;
17 use rustc::mir::{self, Location, TerminatorKind, Literal};
18 use rustc::mir::visit::{Visitor, LvalueContext};
19 use rustc::mir::traversal;
20 use rustc::ty;
21 use common;
22 use super::MirContext;
23
24 pub fn lvalue_locals<'a, 'tcx>(mircx: &MirContext<'a, 'tcx>) -> BitVector {
25 let mir = mircx.mir;
26 let mut analyzer = LocalAnalyzer::new(mircx);
27
28 analyzer.visit_mir(mir);
29
30 for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() {
31 let ty = mircx.monomorphize(&ty);
32 debug!("local {} has type {:?}", index, ty);
33 if ty.is_scalar() ||
34 ty.is_box() ||
35 ty.is_region_ptr() ||
36 ty.is_simd() ||
37 common::type_is_zero_size(mircx.ccx, ty)
38 {
39 // These sorts of types are immediates that we can store
40 // in an ValueRef without an alloca.
41 assert!(common::type_is_immediate(mircx.ccx, ty) ||
42 common::type_is_fat_ptr(mircx.ccx, ty));
43 } else if common::type_is_imm_pair(mircx.ccx, ty) {
44 // We allow pairs and uses of any of their 2 fields.
45 } else {
46 // These sorts of types require an alloca. Note that
47 // type_is_immediate() may *still* be true, particularly
48 // for newtypes, but we currently force some types
49 // (e.g. structs) into an alloca unconditionally, just so
50 // that we don't have to deal with having two pathways
51 // (gep vs extractvalue etc).
52 analyzer.mark_as_lvalue(mir::Local::new(index));
53 }
54 }
55
56 analyzer.lvalue_locals
57 }
58
59 struct LocalAnalyzer<'mir, 'a: 'mir, 'tcx: 'a> {
60 cx: &'mir MirContext<'a, 'tcx>,
61 lvalue_locals: BitVector,
62 seen_assigned: BitVector
63 }
64
65 impl<'mir, 'a, 'tcx> LocalAnalyzer<'mir, 'a, 'tcx> {
66 fn new(mircx: &'mir MirContext<'a, 'tcx>) -> LocalAnalyzer<'mir, 'a, 'tcx> {
67 LocalAnalyzer {
68 cx: mircx,
69 lvalue_locals: BitVector::new(mircx.mir.local_decls.len()),
70 seen_assigned: BitVector::new(mircx.mir.local_decls.len())
71 }
72 }
73
74 fn mark_as_lvalue(&mut self, local: mir::Local) {
75 debug!("marking {:?} as lvalue", local);
76 self.lvalue_locals.insert(local.index());
77 }
78
79 fn mark_assigned(&mut self, local: mir::Local) {
80 if !self.seen_assigned.insert(local.index()) {
81 self.mark_as_lvalue(local);
82 }
83 }
84 }
85
86 impl<'mir, 'a, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'a, 'tcx> {
87 fn visit_assign(&mut self,
88 block: mir::BasicBlock,
89 lvalue: &mir::Lvalue<'tcx>,
90 rvalue: &mir::Rvalue<'tcx>,
91 location: Location) {
92 debug!("visit_assign(block={:?}, lvalue={:?}, rvalue={:?})", block, lvalue, rvalue);
93
94 if let mir::Lvalue::Local(index) = *lvalue {
95 self.mark_assigned(index);
96 if !self.cx.rvalue_creates_operand(rvalue) {
97 self.mark_as_lvalue(index);
98 }
99 } else {
100 self.visit_lvalue(lvalue, LvalueContext::Store, location);
101 }
102
103 self.visit_rvalue(rvalue, location);
104 }
105
106 fn visit_terminator_kind(&mut self,
107 block: mir::BasicBlock,
108 kind: &mir::TerminatorKind<'tcx>,
109 location: Location) {
110 match *kind {
111 mir::TerminatorKind::Call {
112 func: mir::Operand::Constant(box mir::Constant {
113 literal: Literal::Value {
114 value: &ty::Const { val: ConstVal::Function(def_id, _), .. }, ..
115 }, ..
116 }),
117 ref args, ..
118 } if Some(def_id) == self.cx.ccx.tcx().lang_items().box_free_fn() => {
119 // box_free(x) shares with `drop x` the property that it
120 // is not guaranteed to be statically dominated by the
121 // definition of x, so x must always be in an alloca.
122 if let mir::Operand::Consume(ref lvalue) = args[0] {
123 self.visit_lvalue(lvalue, LvalueContext::Drop, location);
124 }
125 }
126 _ => {}
127 }
128
129 self.super_terminator_kind(block, kind, location);
130 }
131
132 fn visit_lvalue(&mut self,
133 lvalue: &mir::Lvalue<'tcx>,
134 context: LvalueContext<'tcx>,
135 location: Location) {
136 debug!("visit_lvalue(lvalue={:?}, context={:?})", lvalue, context);
137
138 if let mir::Lvalue::Projection(ref proj) = *lvalue {
139 // Allow uses of projections of immediate pair fields.
140 if let LvalueContext::Consume = context {
141 if let mir::Lvalue::Local(_) = proj.base {
142 if let mir::ProjectionElem::Field(..) = proj.elem {
143 let ty = proj.base.ty(self.cx.mir, self.cx.ccx.tcx());
144
145 let ty = self.cx.monomorphize(&ty.to_ty(self.cx.ccx.tcx()));
146 if common::type_is_imm_pair(self.cx.ccx, ty) {
147 return;
148 }
149 }
150 }
151 }
152
153 // A deref projection only reads the pointer, never needs the lvalue.
154 if let mir::ProjectionElem::Deref = proj.elem {
155 return self.visit_lvalue(&proj.base, LvalueContext::Consume, location);
156 }
157 }
158
159 self.super_lvalue(lvalue, context, location);
160 }
161
162 fn visit_local(&mut self,
163 &index: &mir::Local,
164 context: LvalueContext<'tcx>,
165 _: Location) {
166 match context {
167 LvalueContext::Call => {
168 self.mark_assigned(index);
169 }
170
171 LvalueContext::StorageLive |
172 LvalueContext::StorageDead |
173 LvalueContext::Validate |
174 LvalueContext::Inspect |
175 LvalueContext::Consume => {}
176
177 LvalueContext::Store |
178 LvalueContext::Borrow { .. } |
179 LvalueContext::Projection(..) => {
180 self.mark_as_lvalue(index);
181 }
182
183 LvalueContext::Drop => {
184 let ty = mir::Lvalue::Local(index).ty(self.cx.mir, self.cx.ccx.tcx());
185 let ty = self.cx.monomorphize(&ty.to_ty(self.cx.ccx.tcx()));
186
187 // Only need the lvalue if we're actually dropping it.
188 if self.cx.ccx.shared().type_needs_drop(ty) {
189 self.mark_as_lvalue(index);
190 }
191 }
192 }
193 }
194 }
195
196 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
197 pub enum CleanupKind {
198 NotCleanup,
199 Funclet,
200 Internal { funclet: mir::BasicBlock }
201 }
202
203 impl CleanupKind {
204 pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
205 match self {
206 CleanupKind::NotCleanup => None,
207 CleanupKind::Funclet => Some(for_bb),
208 CleanupKind::Internal { funclet } => Some(funclet),
209 }
210 }
211 }
212
213 pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
214 fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
215 mir: &mir::Mir<'tcx>) {
216 for (bb, data) in mir.basic_blocks().iter_enumerated() {
217 match data.terminator().kind {
218 TerminatorKind::Goto { .. } |
219 TerminatorKind::Resume |
220 TerminatorKind::Return |
221 TerminatorKind::GeneratorDrop |
222 TerminatorKind::Unreachable |
223 TerminatorKind::SwitchInt { .. } |
224 TerminatorKind::Yield { .. } => {
225 /* nothing to do */
226 }
227 TerminatorKind::Call { cleanup: unwind, .. } |
228 TerminatorKind::Assert { cleanup: unwind, .. } |
229 TerminatorKind::DropAndReplace { unwind, .. } |
230 TerminatorKind::Drop { unwind, .. } => {
231 if let Some(unwind) = unwind {
232 debug!("cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
233 bb, data, unwind);
234 result[unwind] = CleanupKind::Funclet;
235 }
236 }
237 }
238 }
239 }
240
241 fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
242 mir: &mir::Mir<'tcx>) {
243 let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
244
245 let mut set_successor = |funclet: mir::BasicBlock, succ| {
246 match funclet_succs[funclet] {
247 ref mut s @ None => {
248 debug!("set_successor: updating successor of {:?} to {:?}",
249 funclet, succ);
250 *s = Some(succ);
251 },
252 Some(s) => if s != succ {
253 span_bug!(mir.span, "funclet {:?} has 2 parents - {:?} and {:?}",
254 funclet, s, succ);
255 }
256 }
257 };
258
259 for (bb, data) in traversal::reverse_postorder(mir) {
260 let funclet = match result[bb] {
261 CleanupKind::NotCleanup => continue,
262 CleanupKind::Funclet => bb,
263 CleanupKind::Internal { funclet } => funclet,
264 };
265
266 debug!("cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
267 bb, data, result[bb], funclet);
268
269 for &succ in data.terminator().successors().iter() {
270 let kind = result[succ];
271 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}",
272 funclet, succ, kind);
273 match kind {
274 CleanupKind::NotCleanup => {
275 result[succ] = CleanupKind::Internal { funclet: funclet };
276 }
277 CleanupKind::Funclet => {
278 if funclet != succ {
279 set_successor(funclet, succ);
280 }
281 }
282 CleanupKind::Internal { funclet: succ_funclet } => {
283 if funclet != succ_funclet {
284 // `succ` has 2 different funclet going into it, so it must
285 // be a funclet by itself.
286
287 debug!("promoting {:?} to a funclet and updating {:?}", succ,
288 succ_funclet);
289 result[succ] = CleanupKind::Funclet;
290 set_successor(succ_funclet, succ);
291 set_successor(funclet, succ);
292 }
293 }
294 }
295 }
296 }
297 }
298
299 let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
300
301 discover_masters(&mut result, mir);
302 propagate(&mut result, mir);
303 debug!("cleanup_kinds: result={:?}", result);
304 result
305 }