]> git.proxmox.com Git - rustc.git/blob - src/llvm/lib/CodeGen/SjLjEHPrepare.cpp
Imported Upstream version 1.0.0-alpha.2
[rustc.git] / src / llvm / lib / CodeGen / SjLjEHPrepare.cpp
1 //===- SjLjEHPrepare.cpp - Eliminate Invoke & Unwind instructions ---------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This transformation is designed for use by code generators which use SjLj
11 // based exception handling.
12 //
13 //===----------------------------------------------------------------------===//
14
15 #include "llvm/CodeGen/Passes.h"
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/SetVector.h"
18 #include "llvm/ADT/SmallPtrSet.h"
19 #include "llvm/ADT/SmallVector.h"
20 #include "llvm/ADT/Statistic.h"
21 #include "llvm/IR/Constants.h"
22 #include "llvm/IR/DataLayout.h"
23 #include "llvm/IR/DerivedTypes.h"
24 #include "llvm/IR/IRBuilder.h"
25 #include "llvm/IR/Instructions.h"
26 #include "llvm/IR/Intrinsics.h"
27 #include "llvm/IR/LLVMContext.h"
28 #include "llvm/IR/Module.h"
29 #include "llvm/Pass.h"
30 #include "llvm/Support/CommandLine.h"
31 #include "llvm/Support/Debug.h"
32 #include "llvm/Support/raw_ostream.h"
33 #include "llvm/Target/TargetLowering.h"
34 #include "llvm/Target/TargetSubtargetInfo.h"
35 #include "llvm/Transforms/Scalar.h"
36 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
37 #include "llvm/Transforms/Utils/Local.h"
38 #include <set>
39 using namespace llvm;
40
41 #define DEBUG_TYPE "sjljehprepare"
42
43 STATISTIC(NumInvokes, "Number of invokes replaced");
44 STATISTIC(NumSpilled, "Number of registers live across unwind edges");
45
46 namespace {
47 class SjLjEHPrepare : public FunctionPass {
48 const TargetMachine *TM;
49 Type *FunctionContextTy;
50 Constant *RegisterFn;
51 Constant *UnregisterFn;
52 Constant *BuiltinSetjmpFn;
53 Constant *FrameAddrFn;
54 Constant *StackAddrFn;
55 Constant *StackRestoreFn;
56 Constant *LSDAAddrFn;
57 Value *PersonalityFn;
58 Constant *CallSiteFn;
59 Constant *FuncCtxFn;
60 AllocaInst *FuncCtx;
61
62 public:
63 static char ID; // Pass identification, replacement for typeid
64 explicit SjLjEHPrepare(const TargetMachine *TM) : FunctionPass(ID), TM(TM) {}
65 bool doInitialization(Module &M) override;
66 bool runOnFunction(Function &F) override;
67
68 void getAnalysisUsage(AnalysisUsage &AU) const override {}
69 const char *getPassName() const override {
70 return "SJLJ Exception Handling preparation";
71 }
72
73 private:
74 bool setupEntryBlockAndCallSites(Function &F);
75 void substituteLPadValues(LandingPadInst *LPI, Value *ExnVal, Value *SelVal);
76 Value *setupFunctionContext(Function &F, ArrayRef<LandingPadInst *> LPads);
77 void lowerIncomingArguments(Function &F);
78 void lowerAcrossUnwindEdges(Function &F, ArrayRef<InvokeInst *> Invokes);
79 void insertCallSiteStore(Instruction *I, int Number);
80 };
81 } // end anonymous namespace
82
83 char SjLjEHPrepare::ID = 0;
84
85 // Public Interface To the SjLjEHPrepare pass.
86 FunctionPass *llvm::createSjLjEHPreparePass(const TargetMachine *TM) {
87 return new SjLjEHPrepare(TM);
88 }
89 // doInitialization - Set up decalarations and types needed to process
90 // exceptions.
91 bool SjLjEHPrepare::doInitialization(Module &M) {
92 // Build the function context structure.
93 // builtin_setjmp uses a five word jbuf
94 Type *VoidPtrTy = Type::getInt8PtrTy(M.getContext());
95 Type *Int32Ty = Type::getInt32Ty(M.getContext());
96 FunctionContextTy = StructType::get(VoidPtrTy, // __prev
97 Int32Ty, // call_site
98 ArrayType::get(Int32Ty, 4), // __data
99 VoidPtrTy, // __personality
100 VoidPtrTy, // __lsda
101 ArrayType::get(VoidPtrTy, 5), // __jbuf
102 nullptr);
103 RegisterFn = M.getOrInsertFunction(
104 "_Unwind_SjLj_Register", Type::getVoidTy(M.getContext()),
105 PointerType::getUnqual(FunctionContextTy), (Type *)nullptr);
106 UnregisterFn = M.getOrInsertFunction(
107 "_Unwind_SjLj_Unregister", Type::getVoidTy(M.getContext()),
108 PointerType::getUnqual(FunctionContextTy), (Type *)nullptr);
109 FrameAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::frameaddress);
110 StackAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::stacksave);
111 StackRestoreFn = Intrinsic::getDeclaration(&M, Intrinsic::stackrestore);
112 BuiltinSetjmpFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_setjmp);
113 LSDAAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_lsda);
114 CallSiteFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_callsite);
115 FuncCtxFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_functioncontext);
116 PersonalityFn = nullptr;
117
118 return true;
119 }
120
121 /// insertCallSiteStore - Insert a store of the call-site value to the
122 /// function context
123 void SjLjEHPrepare::insertCallSiteStore(Instruction *I, int Number) {
124 IRBuilder<> Builder(I);
125
126 // Get a reference to the call_site field.
127 Type *Int32Ty = Type::getInt32Ty(I->getContext());
128 Value *Zero = ConstantInt::get(Int32Ty, 0);
129 Value *One = ConstantInt::get(Int32Ty, 1);
130 Value *Idxs[2] = { Zero, One };
131 Value *CallSite = Builder.CreateGEP(FuncCtx, Idxs, "call_site");
132
133 // Insert a store of the call-site number
134 ConstantInt *CallSiteNoC =
135 ConstantInt::get(Type::getInt32Ty(I->getContext()), Number);
136 Builder.CreateStore(CallSiteNoC, CallSite, true /*volatile*/);
137 }
138
139 /// MarkBlocksLiveIn - Insert BB and all of its predescessors into LiveBBs until
140 /// we reach blocks we've already seen.
141 static void MarkBlocksLiveIn(BasicBlock *BB,
142 SmallPtrSetImpl<BasicBlock *> &LiveBBs) {
143 if (!LiveBBs.insert(BB).second)
144 return; // already been here.
145
146 for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
147 MarkBlocksLiveIn(*PI, LiveBBs);
148 }
149
150 /// substituteLPadValues - Substitute the values returned by the landingpad
151 /// instruction with those returned by the personality function.
152 void SjLjEHPrepare::substituteLPadValues(LandingPadInst *LPI, Value *ExnVal,
153 Value *SelVal) {
154 SmallVector<Value *, 8> UseWorkList(LPI->user_begin(), LPI->user_end());
155 while (!UseWorkList.empty()) {
156 Value *Val = UseWorkList.pop_back_val();
157 ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(Val);
158 if (!EVI)
159 continue;
160 if (EVI->getNumIndices() != 1)
161 continue;
162 if (*EVI->idx_begin() == 0)
163 EVI->replaceAllUsesWith(ExnVal);
164 else if (*EVI->idx_begin() == 1)
165 EVI->replaceAllUsesWith(SelVal);
166 if (EVI->getNumUses() == 0)
167 EVI->eraseFromParent();
168 }
169
170 if (LPI->getNumUses() == 0)
171 return;
172
173 // There are still some uses of LPI. Construct an aggregate with the exception
174 // values and replace the LPI with that aggregate.
175 Type *LPadType = LPI->getType();
176 Value *LPadVal = UndefValue::get(LPadType);
177 IRBuilder<> Builder(
178 std::next(BasicBlock::iterator(cast<Instruction>(SelVal))));
179 LPadVal = Builder.CreateInsertValue(LPadVal, ExnVal, 0, "lpad.val");
180 LPadVal = Builder.CreateInsertValue(LPadVal, SelVal, 1, "lpad.val");
181
182 LPI->replaceAllUsesWith(LPadVal);
183 }
184
185 /// setupFunctionContext - Allocate the function context on the stack and fill
186 /// it with all of the data that we know at this point.
187 Value *SjLjEHPrepare::setupFunctionContext(Function &F,
188 ArrayRef<LandingPadInst *> LPads) {
189 BasicBlock *EntryBB = F.begin();
190
191 // Create an alloca for the incoming jump buffer ptr and the new jump buffer
192 // that needs to be restored on all exits from the function. This is an alloca
193 // because the value needs to be added to the global context list.
194 const TargetLowering *TLI = TM->getSubtargetImpl()->getTargetLowering();
195 unsigned Align =
196 TLI->getDataLayout()->getPrefTypeAlignment(FunctionContextTy);
197 FuncCtx = new AllocaInst(FunctionContextTy, nullptr, Align, "fn_context",
198 EntryBB->begin());
199
200 // Fill in the function context structure.
201 for (unsigned I = 0, E = LPads.size(); I != E; ++I) {
202 LandingPadInst *LPI = LPads[I];
203 IRBuilder<> Builder(LPI->getParent()->getFirstInsertionPt());
204
205 // Reference the __data field.
206 Value *FCData = Builder.CreateConstGEP2_32(FuncCtx, 0, 2, "__data");
207
208 // The exception values come back in context->__data[0].
209 Value *ExceptionAddr =
210 Builder.CreateConstGEP2_32(FCData, 0, 0, "exception_gep");
211 Value *ExnVal = Builder.CreateLoad(ExceptionAddr, true, "exn_val");
212 ExnVal = Builder.CreateIntToPtr(ExnVal, Builder.getInt8PtrTy());
213
214 Value *SelectorAddr =
215 Builder.CreateConstGEP2_32(FCData, 0, 1, "exn_selector_gep");
216 Value *SelVal = Builder.CreateLoad(SelectorAddr, true, "exn_selector_val");
217
218 substituteLPadValues(LPI, ExnVal, SelVal);
219 }
220
221 // Personality function
222 IRBuilder<> Builder(EntryBB->getTerminator());
223 if (!PersonalityFn)
224 PersonalityFn = LPads[0]->getPersonalityFn();
225 Value *PersonalityFieldPtr =
226 Builder.CreateConstGEP2_32(FuncCtx, 0, 3, "pers_fn_gep");
227 Builder.CreateStore(
228 Builder.CreateBitCast(PersonalityFn, Builder.getInt8PtrTy()),
229 PersonalityFieldPtr, /*isVolatile=*/true);
230
231 // LSDA address
232 Value *LSDA = Builder.CreateCall(LSDAAddrFn, "lsda_addr");
233 Value *LSDAFieldPtr = Builder.CreateConstGEP2_32(FuncCtx, 0, 4, "lsda_gep");
234 Builder.CreateStore(LSDA, LSDAFieldPtr, /*isVolatile=*/true);
235
236 return FuncCtx;
237 }
238
239 /// lowerIncomingArguments - To avoid having to handle incoming arguments
240 /// specially, we lower each arg to a copy instruction in the entry block. This
241 /// ensures that the argument value itself cannot be live out of the entry
242 /// block.
243 void SjLjEHPrepare::lowerIncomingArguments(Function &F) {
244 BasicBlock::iterator AfterAllocaInsPt = F.begin()->begin();
245 while (isa<AllocaInst>(AfterAllocaInsPt) &&
246 isa<ConstantInt>(cast<AllocaInst>(AfterAllocaInsPt)->getArraySize()))
247 ++AfterAllocaInsPt;
248
249 for (Function::arg_iterator AI = F.arg_begin(), AE = F.arg_end(); AI != AE;
250 ++AI) {
251 Type *Ty = AI->getType();
252
253 // Use 'select i8 true, %arg, undef' to simulate a 'no-op' instruction.
254 Value *TrueValue = ConstantInt::getTrue(F.getContext());
255 Value *UndefValue = UndefValue::get(Ty);
256 Instruction *SI = SelectInst::Create(TrueValue, AI, UndefValue,
257 AI->getName() + ".tmp",
258 AfterAllocaInsPt);
259 AI->replaceAllUsesWith(SI);
260
261 // Reset the operand, because it was clobbered by the RAUW above.
262 SI->setOperand(1, AI);
263 }
264 }
265
266 /// lowerAcrossUnwindEdges - Find all variables which are alive across an unwind
267 /// edge and spill them.
268 void SjLjEHPrepare::lowerAcrossUnwindEdges(Function &F,
269 ArrayRef<InvokeInst *> Invokes) {
270 // Finally, scan the code looking for instructions with bad live ranges.
271 for (Function::iterator BB = F.begin(), BBE = F.end(); BB != BBE; ++BB) {
272 for (BasicBlock::iterator II = BB->begin(), IIE = BB->end(); II != IIE;
273 ++II) {
274 // Ignore obvious cases we don't have to handle. In particular, most
275 // instructions either have no uses or only have a single use inside the
276 // current block. Ignore them quickly.
277 Instruction *Inst = II;
278 if (Inst->use_empty())
279 continue;
280 if (Inst->hasOneUse() &&
281 cast<Instruction>(Inst->user_back())->getParent() == BB &&
282 !isa<PHINode>(Inst->user_back()))
283 continue;
284
285 // If this is an alloca in the entry block, it's not a real register
286 // value.
287 if (AllocaInst *AI = dyn_cast<AllocaInst>(Inst))
288 if (isa<ConstantInt>(AI->getArraySize()) && BB == F.begin())
289 continue;
290
291 // Avoid iterator invalidation by copying users to a temporary vector.
292 SmallVector<Instruction *, 16> Users;
293 for (User *U : Inst->users()) {
294 Instruction *UI = cast<Instruction>(U);
295 if (UI->getParent() != BB || isa<PHINode>(UI))
296 Users.push_back(UI);
297 }
298
299 // Find all of the blocks that this value is live in.
300 SmallPtrSet<BasicBlock *, 64> LiveBBs;
301 LiveBBs.insert(Inst->getParent());
302 while (!Users.empty()) {
303 Instruction *U = Users.back();
304 Users.pop_back();
305
306 if (!isa<PHINode>(U)) {
307 MarkBlocksLiveIn(U->getParent(), LiveBBs);
308 } else {
309 // Uses for a PHI node occur in their predecessor block.
310 PHINode *PN = cast<PHINode>(U);
311 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i)
312 if (PN->getIncomingValue(i) == Inst)
313 MarkBlocksLiveIn(PN->getIncomingBlock(i), LiveBBs);
314 }
315 }
316
317 // Now that we know all of the blocks that this thing is live in, see if
318 // it includes any of the unwind locations.
319 bool NeedsSpill = false;
320 for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
321 BasicBlock *UnwindBlock = Invokes[i]->getUnwindDest();
322 if (UnwindBlock != BB && LiveBBs.count(UnwindBlock)) {
323 DEBUG(dbgs() << "SJLJ Spill: " << *Inst << " around "
324 << UnwindBlock->getName() << "\n");
325 NeedsSpill = true;
326 break;
327 }
328 }
329
330 // If we decided we need a spill, do it.
331 // FIXME: Spilling this way is overkill, as it forces all uses of
332 // the value to be reloaded from the stack slot, even those that aren't
333 // in the unwind blocks. We should be more selective.
334 if (NeedsSpill) {
335 DemoteRegToStack(*Inst, true);
336 ++NumSpilled;
337 }
338 }
339 }
340
341 // Go through the landing pads and remove any PHIs there.
342 for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
343 BasicBlock *UnwindBlock = Invokes[i]->getUnwindDest();
344 LandingPadInst *LPI = UnwindBlock->getLandingPadInst();
345
346 // Place PHIs into a set to avoid invalidating the iterator.
347 SmallPtrSet<PHINode *, 8> PHIsToDemote;
348 for (BasicBlock::iterator PN = UnwindBlock->begin(); isa<PHINode>(PN); ++PN)
349 PHIsToDemote.insert(cast<PHINode>(PN));
350 if (PHIsToDemote.empty())
351 continue;
352
353 // Demote the PHIs to the stack.
354 for (PHINode *PN : PHIsToDemote)
355 DemotePHIToStack(PN);
356
357 // Move the landingpad instruction back to the top of the landing pad block.
358 LPI->moveBefore(UnwindBlock->begin());
359 }
360 }
361
362 /// setupEntryBlockAndCallSites - Setup the entry block by creating and filling
363 /// the function context and marking the call sites with the appropriate
364 /// values. These values are used by the DWARF EH emitter.
365 bool SjLjEHPrepare::setupEntryBlockAndCallSites(Function &F) {
366 SmallVector<ReturnInst *, 16> Returns;
367 SmallVector<InvokeInst *, 16> Invokes;
368 SmallSetVector<LandingPadInst *, 16> LPads;
369
370 // Look through the terminators of the basic blocks to find invokes.
371 for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
372 if (InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator())) {
373 if (Function *Callee = II->getCalledFunction())
374 if (Callee->isIntrinsic() &&
375 Callee->getIntrinsicID() == Intrinsic::donothing) {
376 // Remove the NOP invoke.
377 BranchInst::Create(II->getNormalDest(), II);
378 II->eraseFromParent();
379 continue;
380 }
381
382 Invokes.push_back(II);
383 LPads.insert(II->getUnwindDest()->getLandingPadInst());
384 } else if (ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator())) {
385 Returns.push_back(RI);
386 }
387
388 if (Invokes.empty())
389 return false;
390
391 NumInvokes += Invokes.size();
392
393 lowerIncomingArguments(F);
394 lowerAcrossUnwindEdges(F, Invokes);
395
396 Value *FuncCtx =
397 setupFunctionContext(F, makeArrayRef(LPads.begin(), LPads.end()));
398 BasicBlock *EntryBB = F.begin();
399 IRBuilder<> Builder(EntryBB->getTerminator());
400
401 // Get a reference to the jump buffer.
402 Value *JBufPtr = Builder.CreateConstGEP2_32(FuncCtx, 0, 5, "jbuf_gep");
403
404 // Save the frame pointer.
405 Value *FramePtr = Builder.CreateConstGEP2_32(JBufPtr, 0, 0, "jbuf_fp_gep");
406
407 Value *Val = Builder.CreateCall(FrameAddrFn, Builder.getInt32(0), "fp");
408 Builder.CreateStore(Val, FramePtr, /*isVolatile=*/true);
409
410 // Save the stack pointer.
411 Value *StackPtr = Builder.CreateConstGEP2_32(JBufPtr, 0, 2, "jbuf_sp_gep");
412
413 Val = Builder.CreateCall(StackAddrFn, "sp");
414 Builder.CreateStore(Val, StackPtr, /*isVolatile=*/true);
415
416 // Call the setjmp instrinsic. It fills in the rest of the jmpbuf.
417 Value *SetjmpArg = Builder.CreateBitCast(JBufPtr, Builder.getInt8PtrTy());
418 Builder.CreateCall(BuiltinSetjmpFn, SetjmpArg);
419
420 // Store a pointer to the function context so that the back-end will know
421 // where to look for it.
422 Value *FuncCtxArg = Builder.CreateBitCast(FuncCtx, Builder.getInt8PtrTy());
423 Builder.CreateCall(FuncCtxFn, FuncCtxArg);
424
425 // At this point, we are all set up, update the invoke instructions to mark
426 // their call_site values.
427 for (unsigned I = 0, E = Invokes.size(); I != E; ++I) {
428 insertCallSiteStore(Invokes[I], I + 1);
429
430 ConstantInt *CallSiteNum =
431 ConstantInt::get(Type::getInt32Ty(F.getContext()), I + 1);
432
433 // Record the call site value for the back end so it stays associated with
434 // the invoke.
435 CallInst::Create(CallSiteFn, CallSiteNum, "", Invokes[I]);
436 }
437
438 // Mark call instructions that aren't nounwind as no-action (call_site ==
439 // -1). Skip the entry block, as prior to then, no function context has been
440 // created for this function and any unexpected exceptions thrown will go
441 // directly to the caller's context, which is what we want anyway, so no need
442 // to do anything here.
443 for (Function::iterator BB = F.begin(), E = F.end(); ++BB != E;)
444 for (BasicBlock::iterator I = BB->begin(), end = BB->end(); I != end; ++I)
445 if (CallInst *CI = dyn_cast<CallInst>(I)) {
446 if (!CI->doesNotThrow())
447 insertCallSiteStore(CI, -1);
448 } else if (ResumeInst *RI = dyn_cast<ResumeInst>(I)) {
449 insertCallSiteStore(RI, -1);
450 }
451
452 // Register the function context and make sure it's known to not throw
453 CallInst *Register =
454 CallInst::Create(RegisterFn, FuncCtx, "", EntryBB->getTerminator());
455 Register->setDoesNotThrow();
456
457 // Following any allocas not in the entry block, update the saved SP in the
458 // jmpbuf to the new value.
459 for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB) {
460 if (BB == F.begin())
461 continue;
462 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
463 if (CallInst *CI = dyn_cast<CallInst>(I)) {
464 if (CI->getCalledFunction() != StackRestoreFn)
465 continue;
466 } else if (!isa<AllocaInst>(I)) {
467 continue;
468 }
469 Instruction *StackAddr = CallInst::Create(StackAddrFn, "sp");
470 StackAddr->insertAfter(I);
471 Instruction *StoreStackAddr = new StoreInst(StackAddr, StackPtr, true);
472 StoreStackAddr->insertAfter(StackAddr);
473 }
474 }
475
476 // Finally, for any returns from this function, if this function contains an
477 // invoke, add a call to unregister the function context.
478 for (unsigned I = 0, E = Returns.size(); I != E; ++I)
479 CallInst::Create(UnregisterFn, FuncCtx, "", Returns[I]);
480
481 return true;
482 }
483
484 bool SjLjEHPrepare::runOnFunction(Function &F) {
485 bool Res = setupEntryBlockAndCallSites(F);
486 return Res;
487 }