1 //! An analysis to determine which locals require allocas and
6 use rustc_data_structures
::graph
::dominators
::Dominators
;
7 use rustc_index
::bit_set
::BitSet
;
8 use rustc_index
::{IndexSlice, IndexVec}
;
9 use rustc_middle
::mir
::traversal
;
10 use rustc_middle
::mir
::visit
::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}
;
11 use rustc_middle
::mir
::{self, Location, TerminatorKind}
;
12 use rustc_middle
::ty
::layout
::{HasTyCtxt, LayoutOf}
;
14 pub fn non_ssa_locals
<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>>(
15 fx
: &FunctionCx
<'a
, 'tcx
, Bx
>,
16 ) -> BitSet
<mir
::Local
> {
18 let dominators
= mir
.basic_blocks
.dominators();
23 let ty
= fx
.monomorphize(decl
.ty
);
24 let layout
= fx
.cx
.spanned_layout_of(ty
, decl
.source_info
.span
);
27 } else if fx
.cx
.is_backend_immediate(layout
) || fx
.cx
.is_backend_scalar_pair(layout
) {
35 let mut analyzer
= LocalAnalyzer { fx, dominators, locals }
;
37 // Arguments get assigned to by means of the function being called
38 for arg
in mir
.args_iter() {
39 analyzer
.assign(arg
, DefLocation
::Argument
);
42 // If there exists a local definition that dominates all uses of that local,
43 // the definition should be visited first. Traverse blocks in an order that
44 // is a topological sort of dominance partial order.
45 for (bb
, data
) in traversal
::reverse_postorder(&mir
) {
46 analyzer
.visit_basic_block_data(bb
, data
);
49 let mut non_ssa_locals
= BitSet
::new_empty(analyzer
.locals
.len());
50 for (local
, kind
) in analyzer
.locals
.iter_enumerated() {
51 if matches
!(kind
, LocalKind
::Memory
) {
52 non_ssa_locals
.insert(local
);
59 #[derive(Copy, Clone, PartialEq, Eq)]
62 /// A local that requires an alloca.
64 /// A scalar or a scalar pair local that is neither defined nor used.
66 /// A scalar or a scalar pair local with a single definition that dominates all uses.
70 #[derive(Copy, Clone, PartialEq, Eq)]
77 fn dominates(self, location
: Location
, dominators
: &Dominators
<mir
::BasicBlock
>) -> bool
{
79 DefLocation
::Argument
=> true,
80 DefLocation
::Body(def
) => def
.successor_within_block().dominates(location
, dominators
),
85 struct LocalAnalyzer
<'mir
, 'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> {
86 fx
: &'mir FunctionCx
<'a
, 'tcx
, Bx
>,
87 dominators
: &'mir Dominators
<mir
::BasicBlock
>,
88 locals
: IndexVec
<mir
::Local
, LocalKind
>,
91 impl<'mir
, 'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> LocalAnalyzer
<'mir
, 'a
, 'tcx
, Bx
> {
92 fn assign(&mut self, local
: mir
::Local
, location
: DefLocation
) {
93 let kind
= &mut self.locals
[local
];
96 LocalKind
::Memory
=> {}
97 LocalKind
::Unused
=> *kind
= LocalKind
::SSA(location
),
98 LocalKind
::SSA(_
) => *kind
= LocalKind
::Memory
,
104 place_ref
: &mir
::PlaceRef
<'tcx
>,
105 context
: PlaceContext
,
110 if let Some((place_base
, elem
)) = place_ref
.last_projection() {
111 let mut base_context
= if context
.is_mutating_use() {
112 PlaceContext
::MutatingUse(MutatingUseContext
::Projection
)
114 PlaceContext
::NonMutatingUse(NonMutatingUseContext
::Projection
)
117 // Allow uses of projections that are ZSTs or from scalar fields.
118 let is_consume
= matches
!(
120 PlaceContext
::NonMutatingUse(
121 NonMutatingUseContext
::Copy
| NonMutatingUseContext
::Move
,
125 let base_ty
= place_base
.ty(self.fx
.mir
, cx
.tcx());
126 let base_ty
= self.fx
.monomorphize(base_ty
);
128 // ZSTs don't require any actual memory access.
129 let elem_ty
= base_ty
.projection_ty(cx
.tcx(), self.fx
.monomorphize(elem
)).ty
;
130 let span
= self.fx
.mir
.local_decls
[place_ref
.local
].source_info
.span
;
131 if cx
.spanned_layout_of(elem_ty
, span
).is_zst() {
135 if let mir
::ProjectionElem
::Field(..) = elem
{
136 let layout
= cx
.spanned_layout_of(base_ty
.ty
, span
);
137 if cx
.is_backend_immediate(layout
) || cx
.is_backend_scalar_pair(layout
) {
138 // Recurse with the same context, instead of `Projection`,
139 // potentially stopping at non-operand projections,
140 // which would trigger `not_ssa` on locals.
141 base_context
= context
;
146 if let mir
::ProjectionElem
::Deref
= elem
{
147 // Deref projections typically only read the pointer.
148 base_context
= PlaceContext
::NonMutatingUse(NonMutatingUseContext
::Copy
);
151 self.process_place(&place_base
, base_context
, location
);
152 // HACK(eddyb) this emulates the old `visit_projection_elem`, this
153 // entire `visit_place`-like `process_place` method should be rewritten,
154 // now that we have moved to the "slice of projections" representation.
155 if let mir
::ProjectionElem
::Index(local
) = elem
{
158 PlaceContext
::NonMutatingUse(NonMutatingUseContext
::Copy
),
163 self.visit_local(place_ref
.local
, context
, location
);
168 impl<'mir
, 'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> Visitor
<'tcx
>
169 for LocalAnalyzer
<'mir
, 'a
, 'tcx
, Bx
>
173 place
: &mir
::Place
<'tcx
>,
174 rvalue
: &mir
::Rvalue
<'tcx
>,
177 debug
!("visit_assign(place={:?}, rvalue={:?})", place
, rvalue
);
179 if let Some(local
) = place
.as_local() {
180 self.assign(local
, DefLocation
::Body(location
));
181 if self.locals
[local
] != LocalKind
::Memory
{
182 let decl_span
= self.fx
.mir
.local_decls
[local
].source_info
.span
;
183 if !self.fx
.rvalue_creates_operand(rvalue
, decl_span
) {
184 self.locals
[local
] = LocalKind
::Memory
;
188 self.visit_place(place
, PlaceContext
::MutatingUse(MutatingUseContext
::Store
), location
);
191 self.visit_rvalue(rvalue
, location
);
194 fn visit_place(&mut self, place
: &mir
::Place
<'tcx
>, context
: PlaceContext
, location
: Location
) {
195 debug
!("visit_place(place={:?}, context={:?})", place
, context
);
196 self.process_place(&place
.as_ref(), context
, location
);
199 fn visit_local(&mut self, local
: mir
::Local
, context
: PlaceContext
, location
: Location
) {
201 PlaceContext
::MutatingUse(MutatingUseContext
::Call
)
202 | PlaceContext
::MutatingUse(MutatingUseContext
::Yield
) => {
203 self.assign(local
, DefLocation
::Body(location
));
206 PlaceContext
::NonUse(_
)
207 | PlaceContext
::NonMutatingUse(NonMutatingUseContext
::PlaceMention
)
208 | PlaceContext
::MutatingUse(MutatingUseContext
::Retag
) => {}
210 PlaceContext
::NonMutatingUse(
211 NonMutatingUseContext
::Copy
| NonMutatingUseContext
::Move
,
212 ) => match &mut self.locals
[local
] {
214 LocalKind
::Memory
=> {}
215 LocalKind
::SSA(def
) if def
.dominates(location
, &self.dominators
) => {}
216 // Reads from uninitialized variables (e.g., in dead code, after
217 // optimizations) require locals to be in (uninitialized) memory.
218 // N.B., there can be uninitialized reads of a local visited after
219 // an assignment to that local, if they happen on disjoint paths.
220 kind @
(LocalKind
::Unused
| LocalKind
::SSA(_
)) => {
221 *kind
= LocalKind
::Memory
;
225 PlaceContext
::MutatingUse(
226 MutatingUseContext
::Store
227 | MutatingUseContext
::Deinit
228 | MutatingUseContext
::SetDiscriminant
229 | MutatingUseContext
::AsmOutput
230 | MutatingUseContext
::Borrow
231 | MutatingUseContext
::AddressOf
232 | MutatingUseContext
::Projection
,
234 | PlaceContext
::NonMutatingUse(
235 NonMutatingUseContext
::Inspect
236 | NonMutatingUseContext
::SharedBorrow
237 | NonMutatingUseContext
::ShallowBorrow
238 | NonMutatingUseContext
::AddressOf
239 | NonMutatingUseContext
::Projection
,
241 self.locals
[local
] = LocalKind
::Memory
;
244 PlaceContext
::MutatingUse(MutatingUseContext
::Drop
) => {
245 let kind
= &mut self.locals
[local
];
246 if *kind
!= LocalKind
::Memory
{
247 let ty
= self.fx
.mir
.local_decls
[local
].ty
;
248 let ty
= self.fx
.monomorphize(ty
);
249 if self.fx
.cx
.type_needs_drop(ty
) {
250 // Only need the place if we're actually dropping it.
251 *kind
= LocalKind
::Memory
;
259 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
260 pub enum CleanupKind
{
263 Internal { funclet: mir::BasicBlock }
,
267 pub fn funclet_bb(self, for_bb
: mir
::BasicBlock
) -> Option
<mir
::BasicBlock
> {
269 CleanupKind
::NotCleanup
=> None
,
270 CleanupKind
::Funclet
=> Some(for_bb
),
271 CleanupKind
::Internal { funclet }
=> Some(funclet
),
276 /// MSVC requires unwinding code to be split to a tree of *funclets*, where each funclet can only
277 /// branch to itself or to its parent. Luckily, the code we generates matches this pattern.
278 /// Recover that structure in an analyze pass.
279 pub fn cleanup_kinds(mir
: &mir
::Body
<'_
>) -> IndexVec
<mir
::BasicBlock
, CleanupKind
> {
280 fn discover_masters
<'tcx
>(
281 result
: &mut IndexSlice
<mir
::BasicBlock
, CleanupKind
>,
282 mir
: &mir
::Body
<'tcx
>,
284 for (bb
, data
) in mir
.basic_blocks
.iter_enumerated() {
285 match data
.terminator().kind
{
286 TerminatorKind
::Goto { .. }
287 | TerminatorKind
::Resume
288 | TerminatorKind
::Terminate
289 | TerminatorKind
::Return
290 | TerminatorKind
::GeneratorDrop
291 | TerminatorKind
::Unreachable
292 | TerminatorKind
::SwitchInt { .. }
293 | TerminatorKind
::Yield { .. }
294 | TerminatorKind
::FalseEdge { .. }
295 | TerminatorKind
::FalseUnwind { .. }
=> { /* nothing to do */ }
296 TerminatorKind
::Call { unwind, .. }
297 | TerminatorKind
::InlineAsm { unwind, .. }
298 | TerminatorKind
::Assert { unwind, .. }
299 | TerminatorKind
::Drop { unwind, .. }
=> {
300 if let mir
::UnwindAction
::Cleanup(unwind
) = unwind
{
302 "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
305 result
[unwind
] = CleanupKind
::Funclet
;
313 result
: &mut IndexSlice
<mir
::BasicBlock
, CleanupKind
>,
314 mir
: &mir
::Body
<'tcx
>,
316 let mut funclet_succs
= IndexVec
::from_elem(None
, &mir
.basic_blocks
);
318 let mut set_successor
= |funclet
: mir
::BasicBlock
, succ
| match funclet_succs
[funclet
] {
319 ref mut s @ None
=> {
320 debug
!("set_successor: updating successor of {:?} to {:?}", funclet
, succ
);
327 "funclet {:?} has 2 parents - {:?} and {:?}",
336 for (bb
, data
) in traversal
::reverse_postorder(mir
) {
337 let funclet
= match result
[bb
] {
338 CleanupKind
::NotCleanup
=> continue,
339 CleanupKind
::Funclet
=> bb
,
340 CleanupKind
::Internal { funclet }
=> funclet
,
344 "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
345 bb
, data
, result
[bb
], funclet
348 for succ
in data
.terminator().successors() {
349 let kind
= result
[succ
];
350 debug
!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet
, succ
, kind
);
352 CleanupKind
::NotCleanup
=> {
353 result
[succ
] = CleanupKind
::Internal { funclet }
;
355 CleanupKind
::Funclet
=> {
357 set_successor(funclet
, succ
);
360 CleanupKind
::Internal { funclet: succ_funclet }
=> {
361 if funclet
!= succ_funclet
{
362 // `succ` has 2 different funclet going into it, so it must
363 // be a funclet by itself.
366 "promoting {:?} to a funclet and updating {:?}",
369 result
[succ
] = CleanupKind
::Funclet
;
370 set_successor(succ_funclet
, succ
);
371 set_successor(funclet
, succ
);
379 let mut result
= IndexVec
::from_elem(CleanupKind
::NotCleanup
, &mir
.basic_blocks
);
381 discover_masters(&mut result
, mir
);
382 propagate(&mut result
, mir
);
383 debug
!("cleanup_kinds: result={:?}", result
);