]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/mir/analyze.rs
New upstream version 1.21.0+dfsg1
[rustc.git] / src / librustc_trans / mir / analyze.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! An analysis to determine which locals require allocas and
12 //! which do not.
13
14 use rustc_data_structures::bitvec::BitVector;
15 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
16 use rustc::middle::const_val::ConstVal;
17 use rustc::mir::{self, Location, TerminatorKind, Literal};
18 use rustc::mir::visit::{Visitor, LvalueContext};
19 use rustc::mir::traversal;
20 use common;
21 use super::MirContext;
22
23 pub fn lvalue_locals<'a, 'tcx>(mircx: &MirContext<'a, 'tcx>) -> BitVector {
24 let mir = mircx.mir;
25 let mut analyzer = LocalAnalyzer::new(mircx);
26
27 analyzer.visit_mir(mir);
28
29 for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() {
30 let ty = mircx.monomorphize(&ty);
31 debug!("local {} has type {:?}", index, ty);
32 if ty.is_scalar() ||
33 ty.is_box() ||
34 ty.is_region_ptr() ||
35 ty.is_simd() ||
36 common::type_is_zero_size(mircx.ccx, ty)
37 {
38 // These sorts of types are immediates that we can store
39 // in an ValueRef without an alloca.
40 assert!(common::type_is_immediate(mircx.ccx, ty) ||
41 common::type_is_fat_ptr(mircx.ccx, ty));
42 } else if common::type_is_imm_pair(mircx.ccx, ty) {
43 // We allow pairs and uses of any of their 2 fields.
44 } else {
45 // These sorts of types require an alloca. Note that
46 // type_is_immediate() may *still* be true, particularly
47 // for newtypes, but we currently force some types
48 // (e.g. structs) into an alloca unconditionally, just so
49 // that we don't have to deal with having two pathways
50 // (gep vs extractvalue etc).
51 analyzer.mark_as_lvalue(mir::Local::new(index));
52 }
53 }
54
55 analyzer.lvalue_locals
56 }
57
58 struct LocalAnalyzer<'mir, 'a: 'mir, 'tcx: 'a> {
59 cx: &'mir MirContext<'a, 'tcx>,
60 lvalue_locals: BitVector,
61 seen_assigned: BitVector
62 }
63
64 impl<'mir, 'a, 'tcx> LocalAnalyzer<'mir, 'a, 'tcx> {
65 fn new(mircx: &'mir MirContext<'a, 'tcx>) -> LocalAnalyzer<'mir, 'a, 'tcx> {
66 LocalAnalyzer {
67 cx: mircx,
68 lvalue_locals: BitVector::new(mircx.mir.local_decls.len()),
69 seen_assigned: BitVector::new(mircx.mir.local_decls.len())
70 }
71 }
72
73 fn mark_as_lvalue(&mut self, local: mir::Local) {
74 debug!("marking {:?} as lvalue", local);
75 self.lvalue_locals.insert(local.index());
76 }
77
78 fn mark_assigned(&mut self, local: mir::Local) {
79 if !self.seen_assigned.insert(local.index()) {
80 self.mark_as_lvalue(local);
81 }
82 }
83 }
84
85 impl<'mir, 'a, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'a, 'tcx> {
86 fn visit_assign(&mut self,
87 block: mir::BasicBlock,
88 lvalue: &mir::Lvalue<'tcx>,
89 rvalue: &mir::Rvalue<'tcx>,
90 location: Location) {
91 debug!("visit_assign(block={:?}, lvalue={:?}, rvalue={:?})", block, lvalue, rvalue);
92
93 if let mir::Lvalue::Local(index) = *lvalue {
94 self.mark_assigned(index);
95 if !self.cx.rvalue_creates_operand(rvalue) {
96 self.mark_as_lvalue(index);
97 }
98 } else {
99 self.visit_lvalue(lvalue, LvalueContext::Store, location);
100 }
101
102 self.visit_rvalue(rvalue, location);
103 }
104
105 fn visit_terminator_kind(&mut self,
106 block: mir::BasicBlock,
107 kind: &mir::TerminatorKind<'tcx>,
108 location: Location) {
109 match *kind {
110 mir::TerminatorKind::Call {
111 func: mir::Operand::Constant(box mir::Constant {
112 literal: Literal::Value {
113 value: ConstVal::Function(def_id, _), ..
114 }, ..
115 }),
116 ref args, ..
117 } if Some(def_id) == self.cx.ccx.tcx().lang_items.box_free_fn() => {
118 // box_free(x) shares with `drop x` the property that it
119 // is not guaranteed to be statically dominated by the
120 // definition of x, so x must always be in an alloca.
121 if let mir::Operand::Consume(ref lvalue) = args[0] {
122 self.visit_lvalue(lvalue, LvalueContext::Drop, location);
123 }
124 }
125 _ => {}
126 }
127
128 self.super_terminator_kind(block, kind, location);
129 }
130
131 fn visit_lvalue(&mut self,
132 lvalue: &mir::Lvalue<'tcx>,
133 context: LvalueContext<'tcx>,
134 location: Location) {
135 debug!("visit_lvalue(lvalue={:?}, context={:?})", lvalue, context);
136
137 // Allow uses of projections of immediate pair fields.
138 if let mir::Lvalue::Projection(ref proj) = *lvalue {
139 if let mir::Lvalue::Local(_) = proj.base {
140 let ty = proj.base.ty(self.cx.mir, self.cx.ccx.tcx());
141
142 let ty = self.cx.monomorphize(&ty.to_ty(self.cx.ccx.tcx()));
143 if common::type_is_imm_pair(self.cx.ccx, ty) {
144 if let mir::ProjectionElem::Field(..) = proj.elem {
145 if let LvalueContext::Consume = context {
146 return;
147 }
148 }
149 }
150 }
151 }
152
153 if let mir::Lvalue::Local(index) = *lvalue {
154 match context {
155 LvalueContext::Call => {
156 self.mark_assigned(index);
157 }
158
159 LvalueContext::StorageLive |
160 LvalueContext::StorageDead |
161 LvalueContext::Validate |
162 LvalueContext::Inspect |
163 LvalueContext::Consume => {}
164
165 LvalueContext::Store |
166 LvalueContext::Borrow { .. } |
167 LvalueContext::Projection(..) => {
168 self.mark_as_lvalue(index);
169 }
170
171 LvalueContext::Drop => {
172 let ty = lvalue.ty(self.cx.mir, self.cx.ccx.tcx());
173 let ty = self.cx.monomorphize(&ty.to_ty(self.cx.ccx.tcx()));
174
175 // Only need the lvalue if we're actually dropping it.
176 if self.cx.ccx.shared().type_needs_drop(ty) {
177 self.mark_as_lvalue(index);
178 }
179 }
180 }
181 }
182
183 // A deref projection only reads the pointer, never needs the lvalue.
184 if let mir::Lvalue::Projection(ref proj) = *lvalue {
185 if let mir::ProjectionElem::Deref = proj.elem {
186 return self.visit_lvalue(&proj.base, LvalueContext::Consume, location);
187 }
188 }
189
190 self.super_lvalue(lvalue, context, location);
191 }
192 }
193
194 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
195 pub enum CleanupKind {
196 NotCleanup,
197 Funclet,
198 Internal { funclet: mir::BasicBlock }
199 }
200
201 impl CleanupKind {
202 pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
203 match self {
204 CleanupKind::NotCleanup => None,
205 CleanupKind::Funclet => Some(for_bb),
206 CleanupKind::Internal { funclet } => Some(funclet),
207 }
208 }
209 }
210
211 pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
212 fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
213 mir: &mir::Mir<'tcx>) {
214 for (bb, data) in mir.basic_blocks().iter_enumerated() {
215 match data.terminator().kind {
216 TerminatorKind::Goto { .. } |
217 TerminatorKind::Resume |
218 TerminatorKind::Return |
219 TerminatorKind::Unreachable |
220 TerminatorKind::SwitchInt { .. } => {
221 /* nothing to do */
222 }
223 TerminatorKind::Call { cleanup: unwind, .. } |
224 TerminatorKind::Assert { cleanup: unwind, .. } |
225 TerminatorKind::DropAndReplace { unwind, .. } |
226 TerminatorKind::Drop { unwind, .. } => {
227 if let Some(unwind) = unwind {
228 debug!("cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
229 bb, data, unwind);
230 result[unwind] = CleanupKind::Funclet;
231 }
232 }
233 }
234 }
235 }
236
237 fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
238 mir: &mir::Mir<'tcx>) {
239 let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
240
241 let mut set_successor = |funclet: mir::BasicBlock, succ| {
242 match funclet_succs[funclet] {
243 ref mut s @ None => {
244 debug!("set_successor: updating successor of {:?} to {:?}",
245 funclet, succ);
246 *s = Some(succ);
247 },
248 Some(s) => if s != succ {
249 span_bug!(mir.span, "funclet {:?} has 2 parents - {:?} and {:?}",
250 funclet, s, succ);
251 }
252 }
253 };
254
255 for (bb, data) in traversal::reverse_postorder(mir) {
256 let funclet = match result[bb] {
257 CleanupKind::NotCleanup => continue,
258 CleanupKind::Funclet => bb,
259 CleanupKind::Internal { funclet } => funclet,
260 };
261
262 debug!("cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
263 bb, data, result[bb], funclet);
264
265 for &succ in data.terminator().successors().iter() {
266 let kind = result[succ];
267 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}",
268 funclet, succ, kind);
269 match kind {
270 CleanupKind::NotCleanup => {
271 result[succ] = CleanupKind::Internal { funclet: funclet };
272 }
273 CleanupKind::Funclet => {
274 if funclet != succ {
275 set_successor(funclet, succ);
276 }
277 }
278 CleanupKind::Internal { funclet: succ_funclet } => {
279 if funclet != succ_funclet {
280 // `succ` has 2 different funclet going into it, so it must
281 // be a funclet by itself.
282
283 debug!("promoting {:?} to a funclet and updating {:?}", succ,
284 succ_funclet);
285 result[succ] = CleanupKind::Funclet;
286 set_successor(succ_funclet, succ);
287 set_successor(funclet, succ);
288 }
289 }
290 }
291 }
292 }
293 }
294
295 let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
296
297 discover_masters(&mut result, mir);
298 propagate(&mut result, mir);
299 debug!("cleanup_kinds: result={:?}", result);
300 result
301 }