]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_codegen_ssa/src/mir/analyze.rs
22c1f05974ddde2fa0317b94d23d0c855bdc0e44
[rustc.git] / compiler / rustc_codegen_ssa / src / mir / analyze.rs
1 //! An analysis to determine which locals require allocas and
2 //! which do not.
3
4 use super::FunctionCx;
5 use crate::traits::*;
6 use rustc_data_structures::graph::dominators::Dominators;
7 use rustc_index::bit_set::BitSet;
8 use rustc_index::{IndexSlice, IndexVec};
9 use rustc_middle::mir::traversal;
10 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
11 use rustc_middle::mir::{self, Location, TerminatorKind};
12 use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
13
14 pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
15 fx: &FunctionCx<'a, 'tcx, Bx>,
16 ) -> BitSet<mir::Local> {
17 let mir = fx.mir;
18 let dominators = mir.basic_blocks.dominators();
19 let locals = mir
20 .local_decls
21 .iter()
22 .map(|decl| {
23 let ty = fx.monomorphize(decl.ty);
24 let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
25 if layout.is_zst() {
26 LocalKind::ZST
27 } else if fx.cx.is_backend_immediate(layout) || fx.cx.is_backend_scalar_pair(layout) {
28 LocalKind::Unused
29 } else {
30 LocalKind::Memory
31 }
32 })
33 .collect();
34
35 let mut analyzer = LocalAnalyzer { fx, dominators, locals };
36
37 // Arguments get assigned to by means of the function being called
38 for arg in mir.args_iter() {
39 analyzer.assign(arg, DefLocation::Argument);
40 }
41
42 // If there exists a local definition that dominates all uses of that local,
43 // the definition should be visited first. Traverse blocks in an order that
44 // is a topological sort of dominance partial order.
45 for (bb, data) in traversal::reverse_postorder(&mir) {
46 analyzer.visit_basic_block_data(bb, data);
47 }
48
49 let mut non_ssa_locals = BitSet::new_empty(analyzer.locals.len());
50 for (local, kind) in analyzer.locals.iter_enumerated() {
51 if matches!(kind, LocalKind::Memory) {
52 non_ssa_locals.insert(local);
53 }
54 }
55
56 non_ssa_locals
57 }
58
59 #[derive(Copy, Clone, PartialEq, Eq)]
60 enum LocalKind {
61 ZST,
62 /// A local that requires an alloca.
63 Memory,
64 /// A scalar or a scalar pair local that is neither defined nor used.
65 Unused,
66 /// A scalar or a scalar pair local with a single definition that dominates all uses.
67 SSA(DefLocation),
68 }
69
70 #[derive(Copy, Clone, PartialEq, Eq)]
71 enum DefLocation {
72 Argument,
73 Body(Location),
74 }
75
76 impl DefLocation {
77 fn dominates(self, location: Location, dominators: &Dominators<mir::BasicBlock>) -> bool {
78 match self {
79 DefLocation::Argument => true,
80 DefLocation::Body(def) => def.successor_within_block().dominates(location, dominators),
81 }
82 }
83 }
84
85 struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
86 fx: &'mir FunctionCx<'a, 'tcx, Bx>,
87 dominators: &'mir Dominators<mir::BasicBlock>,
88 locals: IndexVec<mir::Local, LocalKind>,
89 }
90
91 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
92 fn assign(&mut self, local: mir::Local, location: DefLocation) {
93 let kind = &mut self.locals[local];
94 match *kind {
95 LocalKind::ZST => {}
96 LocalKind::Memory => {}
97 LocalKind::Unused => *kind = LocalKind::SSA(location),
98 LocalKind::SSA(_) => *kind = LocalKind::Memory,
99 }
100 }
101
102 fn process_place(
103 &mut self,
104 place_ref: &mir::PlaceRef<'tcx>,
105 context: PlaceContext,
106 location: Location,
107 ) {
108 let cx = self.fx.cx;
109
110 if let Some((place_base, elem)) = place_ref.last_projection() {
111 let mut base_context = if context.is_mutating_use() {
112 PlaceContext::MutatingUse(MutatingUseContext::Projection)
113 } else {
114 PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
115 };
116
117 // Allow uses of projections that are ZSTs or from scalar fields.
118 let is_consume = matches!(
119 context,
120 PlaceContext::NonMutatingUse(
121 NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
122 )
123 );
124 if is_consume {
125 let base_ty = place_base.ty(self.fx.mir, cx.tcx());
126 let base_ty = self.fx.monomorphize(base_ty);
127
128 // ZSTs don't require any actual memory access.
129 let elem_ty = base_ty.projection_ty(cx.tcx(), self.fx.monomorphize(elem)).ty;
130 let span = self.fx.mir.local_decls[place_ref.local].source_info.span;
131 if cx.spanned_layout_of(elem_ty, span).is_zst() {
132 return;
133 }
134
135 if let mir::ProjectionElem::Field(..) = elem {
136 let layout = cx.spanned_layout_of(base_ty.ty, span);
137 if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
138 // Recurse with the same context, instead of `Projection`,
139 // potentially stopping at non-operand projections,
140 // which would trigger `not_ssa` on locals.
141 base_context = context;
142 }
143 }
144 }
145
146 if let mir::ProjectionElem::Deref = elem {
147 // Deref projections typically only read the pointer.
148 base_context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
149 }
150
151 self.process_place(&place_base, base_context, location);
152 // HACK(eddyb) this emulates the old `visit_projection_elem`, this
153 // entire `visit_place`-like `process_place` method should be rewritten,
154 // now that we have moved to the "slice of projections" representation.
155 if let mir::ProjectionElem::Index(local) = elem {
156 self.visit_local(
157 local,
158 PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
159 location,
160 );
161 }
162 } else {
163 self.visit_local(place_ref.local, context, location);
164 }
165 }
166 }
167
168 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
169 for LocalAnalyzer<'mir, 'a, 'tcx, Bx>
170 {
171 fn visit_assign(
172 &mut self,
173 place: &mir::Place<'tcx>,
174 rvalue: &mir::Rvalue<'tcx>,
175 location: Location,
176 ) {
177 debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
178
179 if let Some(local) = place.as_local() {
180 self.assign(local, DefLocation::Body(location));
181 if self.locals[local] != LocalKind::Memory {
182 let decl_span = self.fx.mir.local_decls[local].source_info.span;
183 if !self.fx.rvalue_creates_operand(rvalue, decl_span) {
184 self.locals[local] = LocalKind::Memory;
185 }
186 }
187 } else {
188 self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location);
189 }
190
191 self.visit_rvalue(rvalue, location);
192 }
193
194 fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
195 debug!("visit_place(place={:?}, context={:?})", place, context);
196 self.process_place(&place.as_ref(), context, location);
197 }
198
199 fn visit_local(&mut self, local: mir::Local, context: PlaceContext, location: Location) {
200 match context {
201 PlaceContext::MutatingUse(MutatingUseContext::Call)
202 | PlaceContext::MutatingUse(MutatingUseContext::Yield) => {
203 self.assign(local, DefLocation::Body(location));
204 }
205
206 PlaceContext::NonUse(_)
207 | PlaceContext::NonMutatingUse(NonMutatingUseContext::PlaceMention)
208 | PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
209
210 PlaceContext::NonMutatingUse(
211 NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
212 ) => match &mut self.locals[local] {
213 LocalKind::ZST => {}
214 LocalKind::Memory => {}
215 LocalKind::SSA(def) if def.dominates(location, &self.dominators) => {}
216 // Reads from uninitialized variables (e.g., in dead code, after
217 // optimizations) require locals to be in (uninitialized) memory.
218 // N.B., there can be uninitialized reads of a local visited after
219 // an assignment to that local, if they happen on disjoint paths.
220 kind @ (LocalKind::Unused | LocalKind::SSA(_)) => {
221 *kind = LocalKind::Memory;
222 }
223 },
224
225 PlaceContext::MutatingUse(
226 MutatingUseContext::Store
227 | MutatingUseContext::Deinit
228 | MutatingUseContext::SetDiscriminant
229 | MutatingUseContext::AsmOutput
230 | MutatingUseContext::Borrow
231 | MutatingUseContext::AddressOf
232 | MutatingUseContext::Projection,
233 )
234 | PlaceContext::NonMutatingUse(
235 NonMutatingUseContext::Inspect
236 | NonMutatingUseContext::SharedBorrow
237 | NonMutatingUseContext::ShallowBorrow
238 | NonMutatingUseContext::AddressOf
239 | NonMutatingUseContext::Projection,
240 ) => {
241 self.locals[local] = LocalKind::Memory;
242 }
243
244 PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
245 let kind = &mut self.locals[local];
246 if *kind != LocalKind::Memory {
247 let ty = self.fx.mir.local_decls[local].ty;
248 let ty = self.fx.monomorphize(ty);
249 if self.fx.cx.type_needs_drop(ty) {
250 // Only need the place if we're actually dropping it.
251 *kind = LocalKind::Memory;
252 }
253 }
254 }
255 }
256 }
257 }
258
259 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
260 pub enum CleanupKind {
261 NotCleanup,
262 Funclet,
263 Internal { funclet: mir::BasicBlock },
264 }
265
266 impl CleanupKind {
267 pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
268 match self {
269 CleanupKind::NotCleanup => None,
270 CleanupKind::Funclet => Some(for_bb),
271 CleanupKind::Internal { funclet } => Some(funclet),
272 }
273 }
274 }
275
276 /// MSVC requires unwinding code to be split to a tree of *funclets*, where each funclet can only
277 /// branch to itself or to its parent. Luckily, the code we generates matches this pattern.
278 /// Recover that structure in an analyze pass.
279 pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
280 fn discover_masters<'tcx>(
281 result: &mut IndexSlice<mir::BasicBlock, CleanupKind>,
282 mir: &mir::Body<'tcx>,
283 ) {
284 for (bb, data) in mir.basic_blocks.iter_enumerated() {
285 match data.terminator().kind {
286 TerminatorKind::Goto { .. }
287 | TerminatorKind::Resume
288 | TerminatorKind::Terminate
289 | TerminatorKind::Return
290 | TerminatorKind::GeneratorDrop
291 | TerminatorKind::Unreachable
292 | TerminatorKind::SwitchInt { .. }
293 | TerminatorKind::Yield { .. }
294 | TerminatorKind::FalseEdge { .. }
295 | TerminatorKind::FalseUnwind { .. } => { /* nothing to do */ }
296 TerminatorKind::Call { unwind, .. }
297 | TerminatorKind::InlineAsm { unwind, .. }
298 | TerminatorKind::Assert { unwind, .. }
299 | TerminatorKind::Drop { unwind, .. } => {
300 if let mir::UnwindAction::Cleanup(unwind) = unwind {
301 debug!(
302 "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
303 bb, data, unwind
304 );
305 result[unwind] = CleanupKind::Funclet;
306 }
307 }
308 }
309 }
310 }
311
312 fn propagate<'tcx>(
313 result: &mut IndexSlice<mir::BasicBlock, CleanupKind>,
314 mir: &mir::Body<'tcx>,
315 ) {
316 let mut funclet_succs = IndexVec::from_elem(None, &mir.basic_blocks);
317
318 let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
319 ref mut s @ None => {
320 debug!("set_successor: updating successor of {:?} to {:?}", funclet, succ);
321 *s = Some(succ);
322 }
323 Some(s) => {
324 if s != succ {
325 span_bug!(
326 mir.span,
327 "funclet {:?} has 2 parents - {:?} and {:?}",
328 funclet,
329 s,
330 succ
331 );
332 }
333 }
334 };
335
336 for (bb, data) in traversal::reverse_postorder(mir) {
337 let funclet = match result[bb] {
338 CleanupKind::NotCleanup => continue,
339 CleanupKind::Funclet => bb,
340 CleanupKind::Internal { funclet } => funclet,
341 };
342
343 debug!(
344 "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
345 bb, data, result[bb], funclet
346 );
347
348 for succ in data.terminator().successors() {
349 let kind = result[succ];
350 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
351 match kind {
352 CleanupKind::NotCleanup => {
353 result[succ] = CleanupKind::Internal { funclet };
354 }
355 CleanupKind::Funclet => {
356 if funclet != succ {
357 set_successor(funclet, succ);
358 }
359 }
360 CleanupKind::Internal { funclet: succ_funclet } => {
361 if funclet != succ_funclet {
362 // `succ` has 2 different funclet going into it, so it must
363 // be a funclet by itself.
364
365 debug!(
366 "promoting {:?} to a funclet and updating {:?}",
367 succ, succ_funclet
368 );
369 result[succ] = CleanupKind::Funclet;
370 set_successor(succ_funclet, succ);
371 set_successor(funclet, succ);
372 }
373 }
374 }
375 }
376 }
377 }
378
379 let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, &mir.basic_blocks);
380
381 discover_masters(&mut result, mir);
382 propagate(&mut result, mir);
383 debug!("cleanup_kinds: result={:?}", result);
384 result
385 }