]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_codegen_ssa/src/mir/analyze.rs
New upstream version 1.64.0+dfsg1
[rustc.git] / compiler / rustc_codegen_ssa / src / mir / analyze.rs
1 //! An analysis to determine which locals require allocas and
2 //! which do not.
3
4 use super::FunctionCx;
5 use crate::traits::*;
6 use rustc_data_structures::graph::dominators::Dominators;
7 use rustc_index::bit_set::BitSet;
8 use rustc_index::vec::IndexVec;
9 use rustc_middle::mir::traversal;
10 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
11 use rustc_middle::mir::{self, Location, TerminatorKind};
12 use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
13
14 pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
15 fx: &FunctionCx<'a, 'tcx, Bx>,
16 ) -> BitSet<mir::Local> {
17 let mir = fx.mir;
18 let dominators = mir.basic_blocks.dominators();
19 let locals = mir
20 .local_decls
21 .iter()
22 .map(|decl| {
23 let ty = fx.monomorphize(decl.ty);
24 let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
25 if layout.is_zst() {
26 LocalKind::ZST
27 } else if fx.cx.is_backend_immediate(layout) || fx.cx.is_backend_scalar_pair(layout) {
28 LocalKind::Unused
29 } else {
30 LocalKind::Memory
31 }
32 })
33 .collect();
34
35 let mut analyzer = LocalAnalyzer { fx, dominators, locals };
36
37 // Arguments get assigned to by means of the function being called
38 for arg in mir.args_iter() {
39 analyzer.assign(arg, mir::START_BLOCK.start_location());
40 }
41
42 // If there exists a local definition that dominates all uses of that local,
43 // the definition should be visited first. Traverse blocks in an order that
44 // is a topological sort of dominance partial order.
45 for (bb, data) in traversal::reverse_postorder(&mir) {
46 analyzer.visit_basic_block_data(bb, data);
47 }
48
49 let mut non_ssa_locals = BitSet::new_empty(analyzer.locals.len());
50 for (local, kind) in analyzer.locals.iter_enumerated() {
51 if matches!(kind, LocalKind::Memory) {
52 non_ssa_locals.insert(local);
53 }
54 }
55
56 non_ssa_locals
57 }
58
59 #[derive(Copy, Clone, PartialEq, Eq)]
60 enum LocalKind {
61 ZST,
62 /// A local that requires an alloca.
63 Memory,
64 /// A scalar or a scalar pair local that is neither defined nor used.
65 Unused,
66 /// A scalar or a scalar pair local with a single definition that dominates all uses.
67 SSA(mir::Location),
68 }
69
70 struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
71 fx: &'mir FunctionCx<'a, 'tcx, Bx>,
72 dominators: Dominators<mir::BasicBlock>,
73 locals: IndexVec<mir::Local, LocalKind>,
74 }
75
76 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
77 fn assign(&mut self, local: mir::Local, location: Location) {
78 let kind = &mut self.locals[local];
79 match *kind {
80 LocalKind::ZST => {}
81 LocalKind::Memory => {}
82 LocalKind::Unused => {
83 *kind = LocalKind::SSA(location);
84 }
85 LocalKind::SSA(_) => {
86 *kind = LocalKind::Memory;
87 }
88 }
89 }
90
91 fn process_place(
92 &mut self,
93 place_ref: &mir::PlaceRef<'tcx>,
94 context: PlaceContext,
95 location: Location,
96 ) {
97 let cx = self.fx.cx;
98
99 if let Some((place_base, elem)) = place_ref.last_projection() {
100 let mut base_context = if context.is_mutating_use() {
101 PlaceContext::MutatingUse(MutatingUseContext::Projection)
102 } else {
103 PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
104 };
105
106 // Allow uses of projections that are ZSTs or from scalar fields.
107 let is_consume = matches!(
108 context,
109 PlaceContext::NonMutatingUse(
110 NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
111 )
112 );
113 if is_consume {
114 let base_ty = place_base.ty(self.fx.mir, cx.tcx());
115 let base_ty = self.fx.monomorphize(base_ty);
116
117 // ZSTs don't require any actual memory access.
118 let elem_ty = base_ty.projection_ty(cx.tcx(), self.fx.monomorphize(elem)).ty;
119 let span = self.fx.mir.local_decls[place_ref.local].source_info.span;
120 if cx.spanned_layout_of(elem_ty, span).is_zst() {
121 return;
122 }
123
124 if let mir::ProjectionElem::Field(..) = elem {
125 let layout = cx.spanned_layout_of(base_ty.ty, span);
126 if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
127 // Recurse with the same context, instead of `Projection`,
128 // potentially stopping at non-operand projections,
129 // which would trigger `not_ssa` on locals.
130 base_context = context;
131 }
132 }
133 }
134
135 if let mir::ProjectionElem::Deref = elem {
136 // Deref projections typically only read the pointer.
137 base_context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
138 }
139
140 self.process_place(&place_base, base_context, location);
141 // HACK(eddyb) this emulates the old `visit_projection_elem`, this
142 // entire `visit_place`-like `process_place` method should be rewritten,
143 // now that we have moved to the "slice of projections" representation.
144 if let mir::ProjectionElem::Index(local) = elem {
145 self.visit_local(
146 local,
147 PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
148 location,
149 );
150 }
151 } else {
152 self.visit_local(place_ref.local, context, location);
153 }
154 }
155 }
156
157 impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
158 for LocalAnalyzer<'mir, 'a, 'tcx, Bx>
159 {
160 fn visit_assign(
161 &mut self,
162 place: &mir::Place<'tcx>,
163 rvalue: &mir::Rvalue<'tcx>,
164 location: Location,
165 ) {
166 debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
167
168 if let Some(local) = place.as_local() {
169 self.assign(local, location);
170 if self.locals[local] != LocalKind::Memory {
171 let decl_span = self.fx.mir.local_decls[local].source_info.span;
172 if !self.fx.rvalue_creates_operand(rvalue, decl_span) {
173 self.locals[local] = LocalKind::Memory;
174 }
175 }
176 } else {
177 self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location);
178 }
179
180 self.visit_rvalue(rvalue, location);
181 }
182
183 fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
184 debug!("visit_place(place={:?}, context={:?})", place, context);
185 self.process_place(&place.as_ref(), context, location);
186 }
187
188 fn visit_local(&mut self, local: mir::Local, context: PlaceContext, location: Location) {
189 match context {
190 PlaceContext::MutatingUse(MutatingUseContext::Call)
191 | PlaceContext::MutatingUse(MutatingUseContext::Yield) => {
192 self.assign(local, location);
193 }
194
195 PlaceContext::NonUse(_) | PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
196
197 PlaceContext::NonMutatingUse(
198 NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
199 ) => match &mut self.locals[local] {
200 LocalKind::ZST => {}
201 LocalKind::Memory => {}
202 LocalKind::SSA(def) if def.dominates(location, &self.dominators) => {}
203 // Reads from uninitialized variables (e.g., in dead code, after
204 // optimizations) require locals to be in (uninitialized) memory.
205 // N.B., there can be uninitialized reads of a local visited after
206 // an assignment to that local, if they happen on disjoint paths.
207 kind @ (LocalKind::Unused | LocalKind::SSA(_)) => {
208 *kind = LocalKind::Memory;
209 }
210 },
211
212 PlaceContext::MutatingUse(
213 MutatingUseContext::Store
214 | MutatingUseContext::Deinit
215 | MutatingUseContext::SetDiscriminant
216 | MutatingUseContext::AsmOutput
217 | MutatingUseContext::Borrow
218 | MutatingUseContext::AddressOf
219 | MutatingUseContext::Projection,
220 )
221 | PlaceContext::NonMutatingUse(
222 NonMutatingUseContext::Inspect
223 | NonMutatingUseContext::SharedBorrow
224 | NonMutatingUseContext::UniqueBorrow
225 | NonMutatingUseContext::ShallowBorrow
226 | NonMutatingUseContext::AddressOf
227 | NonMutatingUseContext::Projection,
228 ) => {
229 self.locals[local] = LocalKind::Memory;
230 }
231
232 PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
233 let kind = &mut self.locals[local];
234 if *kind != LocalKind::Memory {
235 let ty = self.fx.mir.local_decls[local].ty;
236 let ty = self.fx.monomorphize(ty);
237 if self.fx.cx.type_needs_drop(ty) {
238 // Only need the place if we're actually dropping it.
239 *kind = LocalKind::Memory;
240 }
241 }
242 }
243 }
244 }
245 }
246
247 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
248 pub enum CleanupKind {
249 NotCleanup,
250 Funclet,
251 Internal { funclet: mir::BasicBlock },
252 }
253
254 impl CleanupKind {
255 pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
256 match self {
257 CleanupKind::NotCleanup => None,
258 CleanupKind::Funclet => Some(for_bb),
259 CleanupKind::Internal { funclet } => Some(funclet),
260 }
261 }
262 }
263
264 pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
265 fn discover_masters<'tcx>(
266 result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
267 mir: &mir::Body<'tcx>,
268 ) {
269 for (bb, data) in mir.basic_blocks().iter_enumerated() {
270 match data.terminator().kind {
271 TerminatorKind::Goto { .. }
272 | TerminatorKind::Resume
273 | TerminatorKind::Abort
274 | TerminatorKind::Return
275 | TerminatorKind::GeneratorDrop
276 | TerminatorKind::Unreachable
277 | TerminatorKind::SwitchInt { .. }
278 | TerminatorKind::Yield { .. }
279 | TerminatorKind::FalseEdge { .. }
280 | TerminatorKind::FalseUnwind { .. } => { /* nothing to do */ }
281 TerminatorKind::Call { cleanup: unwind, .. }
282 | TerminatorKind::InlineAsm { cleanup: unwind, .. }
283 | TerminatorKind::Assert { cleanup: unwind, .. }
284 | TerminatorKind::DropAndReplace { unwind, .. }
285 | TerminatorKind::Drop { unwind, .. } => {
286 if let Some(unwind) = unwind {
287 debug!(
288 "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
289 bb, data, unwind
290 );
291 result[unwind] = CleanupKind::Funclet;
292 }
293 }
294 }
295 }
296 }
297
298 fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>, mir: &mir::Body<'tcx>) {
299 let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
300
301 let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
302 ref mut s @ None => {
303 debug!("set_successor: updating successor of {:?} to {:?}", funclet, succ);
304 *s = Some(succ);
305 }
306 Some(s) => {
307 if s != succ {
308 span_bug!(
309 mir.span,
310 "funclet {:?} has 2 parents - {:?} and {:?}",
311 funclet,
312 s,
313 succ
314 );
315 }
316 }
317 };
318
319 for (bb, data) in traversal::reverse_postorder(mir) {
320 let funclet = match result[bb] {
321 CleanupKind::NotCleanup => continue,
322 CleanupKind::Funclet => bb,
323 CleanupKind::Internal { funclet } => funclet,
324 };
325
326 debug!(
327 "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
328 bb, data, result[bb], funclet
329 );
330
331 for succ in data.terminator().successors() {
332 let kind = result[succ];
333 debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
334 match kind {
335 CleanupKind::NotCleanup => {
336 result[succ] = CleanupKind::Internal { funclet };
337 }
338 CleanupKind::Funclet => {
339 if funclet != succ {
340 set_successor(funclet, succ);
341 }
342 }
343 CleanupKind::Internal { funclet: succ_funclet } => {
344 if funclet != succ_funclet {
345 // `succ` has 2 different funclet going into it, so it must
346 // be a funclet by itself.
347
348 debug!(
349 "promoting {:?} to a funclet and updating {:?}",
350 succ, succ_funclet
351 );
352 result[succ] = CleanupKind::Funclet;
353 set_successor(succ_funclet, succ);
354 set_successor(funclet, succ);
355 }
356 }
357 }
358 }
359 }
360 }
361
362 let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
363
364 discover_masters(&mut result, mir);
365 propagate(&mut result, mir);
366 debug!("cleanup_kinds: result={:?}", result);
367 result
368 }