]>
Commit | Line | Data |
---|---|---|
223e47cc LB |
1 | //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// |
2 | // | |
3 | // The LLVM Compiler Infrastructure | |
4 | // | |
5 | // This file is distributed under the University of Illinois Open Source | |
6 | // License. See LICENSE.TXT for details. | |
7 | // | |
8 | //===----------------------------------------------------------------------===// | |
9 | // | |
10 | // This register allocator allocates registers to a basic block at a time, | |
11 | // attempting to keep values in registers and reusing registers as appropriate. | |
12 | // | |
13 | //===----------------------------------------------------------------------===// | |
14 | ||
970d7e83 LB |
15 | #include "llvm/CodeGen/Passes.h" |
16 | #include "llvm/ADT/DenseMap.h" | |
17 | #include "llvm/ADT/IndexedMap.h" | |
18 | #include "llvm/ADT/STLExtras.h" | |
19 | #include "llvm/ADT/SmallSet.h" | |
20 | #include "llvm/ADT/SmallVector.h" | |
21 | #include "llvm/ADT/SparseSet.h" | |
22 | #include "llvm/ADT/Statistic.h" | |
23 | #include "llvm/CodeGen/MachineFrameInfo.h" | |
223e47cc LB |
24 | #include "llvm/CodeGen/MachineFunctionPass.h" |
25 | #include "llvm/CodeGen/MachineInstr.h" | |
26 | #include "llvm/CodeGen/MachineInstrBuilder.h" | |
223e47cc | 27 | #include "llvm/CodeGen/MachineRegisterInfo.h" |
223e47cc LB |
28 | #include "llvm/CodeGen/RegAllocRegistry.h" |
29 | #include "llvm/CodeGen/RegisterClassInfo.h" | |
970d7e83 | 30 | #include "llvm/IR/BasicBlock.h" |
223e47cc LB |
31 | #include "llvm/Support/CommandLine.h" |
32 | #include "llvm/Support/Debug.h" | |
33 | #include "llvm/Support/ErrorHandling.h" | |
34 | #include "llvm/Support/raw_ostream.h" | |
970d7e83 | 35 | #include "llvm/Target/TargetInstrInfo.h" |
1a4d82fc | 36 | #include "llvm/Target/TargetSubtargetInfo.h" |
223e47cc LB |
37 | #include <algorithm> |
38 | using namespace llvm; | |
39 | ||
1a4d82fc JJ |
40 | #define DEBUG_TYPE "regalloc" |
41 | ||
223e47cc LB |
42 | STATISTIC(NumStores, "Number of stores added"); |
43 | STATISTIC(NumLoads , "Number of loads added"); | |
44 | STATISTIC(NumCopies, "Number of copies coalesced"); | |
45 | ||
46 | static RegisterRegAlloc | |
47 | fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); | |
48 | ||
49 | namespace { | |
50 | class RAFast : public MachineFunctionPass { | |
51 | public: | |
52 | static char ID; | |
53 | RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), | |
54 | isBulkSpilling(false) {} | |
55 | private: | |
223e47cc LB |
56 | MachineFunction *MF; |
57 | MachineRegisterInfo *MRI; | |
58 | const TargetRegisterInfo *TRI; | |
59 | const TargetInstrInfo *TII; | |
60 | RegisterClassInfo RegClassInfo; | |
61 | ||
62 | // Basic block currently being allocated. | |
63 | MachineBasicBlock *MBB; | |
64 | ||
65 | // StackSlotForVirtReg - Maps virtual regs to the frame index where these | |
66 | // values are spilled. | |
67 | IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; | |
68 | ||
69 | // Everything we know about a live virtual register. | |
70 | struct LiveReg { | |
71 | MachineInstr *LastUse; // Last instr to use reg. | |
72 | unsigned VirtReg; // Virtual register number. | |
73 | unsigned PhysReg; // Currently held here. | |
74 | unsigned short LastOpNum; // OpNum on LastUse. | |
75 | bool Dirty; // Register needs spill. | |
76 | ||
77 | explicit LiveReg(unsigned v) | |
1a4d82fc | 78 | : LastUse(nullptr), VirtReg(v), PhysReg(0), LastOpNum(0), Dirty(false){} |
223e47cc LB |
79 | |
80 | unsigned getSparseSetIndex() const { | |
81 | return TargetRegisterInfo::virtReg2Index(VirtReg); | |
82 | } | |
83 | }; | |
84 | ||
85 | typedef SparseSet<LiveReg> LiveRegMap; | |
86 | ||
87 | // LiveVirtRegs - This map contains entries for each virtual register | |
88 | // that is currently available in a physical register. | |
89 | LiveRegMap LiveVirtRegs; | |
90 | ||
91 | DenseMap<unsigned, SmallVector<MachineInstr *, 4> > LiveDbgValueMap; | |
92 | ||
93 | // RegState - Track the state of a physical register. | |
94 | enum RegState { | |
95 | // A disabled register is not available for allocation, but an alias may | |
96 | // be in use. A register can only be moved out of the disabled state if | |
97 | // all aliases are disabled. | |
98 | regDisabled, | |
99 | ||
100 | // A free register is not currently in use and can be allocated | |
101 | // immediately without checking aliases. | |
102 | regFree, | |
103 | ||
104 | // A reserved register has been assigned explicitly (e.g., setting up a | |
105 | // call parameter), and it remains reserved until it is used. | |
106 | regReserved | |
107 | ||
108 | // A register state may also be a virtual register number, indication that | |
109 | // the physical register is currently allocated to a virtual register. In | |
110 | // that case, LiveVirtRegs contains the inverse mapping. | |
111 | }; | |
112 | ||
113 | // PhysRegState - One of the RegState enums, or a virtreg. | |
114 | std::vector<unsigned> PhysRegState; | |
115 | ||
970d7e83 LB |
116 | // Set of register units. |
117 | typedef SparseSet<unsigned> UsedInInstrSet; | |
118 | ||
119 | // Set of register units that are used in the current instruction, and so | |
120 | // cannot be allocated. | |
121 | UsedInInstrSet UsedInInstr; | |
122 | ||
123 | // Mark a physreg as used in this instruction. | |
124 | void markRegUsedInInstr(unsigned PhysReg) { | |
125 | for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) | |
126 | UsedInInstr.insert(*Units); | |
127 | } | |
128 | ||
129 | // Check if a physreg or any of its aliases are used in this instruction. | |
130 | bool isRegUsedInInstr(unsigned PhysReg) const { | |
131 | for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) | |
132 | if (UsedInInstr.count(*Units)) | |
133 | return true; | |
134 | return false; | |
135 | } | |
223e47cc LB |
136 | |
137 | // SkippedInstrs - Descriptors of instructions whose clobber list was | |
138 | // ignored because all registers were spilled. It is still necessary to | |
139 | // mark all the clobbered registers as used by the function. | |
140 | SmallPtrSet<const MCInstrDesc*, 4> SkippedInstrs; | |
141 | ||
142 | // isBulkSpilling - This flag is set when LiveRegMap will be cleared | |
143 | // completely after spilling all live registers. LiveRegMap entries should | |
144 | // not be erased. | |
145 | bool isBulkSpilling; | |
146 | ||
1a4d82fc | 147 | enum : unsigned { |
223e47cc LB |
148 | spillClean = 1, |
149 | spillDirty = 100, | |
150 | spillImpossible = ~0u | |
151 | }; | |
152 | public: | |
1a4d82fc | 153 | const char *getPassName() const override { |
223e47cc LB |
154 | return "Fast Register Allocator"; |
155 | } | |
156 | ||
1a4d82fc | 157 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
223e47cc LB |
158 | AU.setPreservesCFG(); |
159 | MachineFunctionPass::getAnalysisUsage(AU); | |
160 | } | |
161 | ||
162 | private: | |
1a4d82fc | 163 | bool runOnMachineFunction(MachineFunction &Fn) override; |
223e47cc LB |
164 | void AllocateBasicBlock(); |
165 | void handleThroughOperands(MachineInstr *MI, | |
166 | SmallVectorImpl<unsigned> &VirtDead); | |
167 | int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); | |
168 | bool isLastUseOfLocalReg(MachineOperand&); | |
169 | ||
170 | void addKillFlag(const LiveReg&); | |
171 | void killVirtReg(LiveRegMap::iterator); | |
172 | void killVirtReg(unsigned VirtReg); | |
173 | void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); | |
174 | void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); | |
175 | ||
176 | void usePhysReg(MachineOperand&); | |
177 | void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); | |
178 | unsigned calcSpillCost(unsigned PhysReg) const; | |
179 | void assignVirtToPhysReg(LiveReg&, unsigned PhysReg); | |
180 | LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) { | |
181 | return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); | |
182 | } | |
183 | LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const { | |
184 | return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); | |
185 | } | |
186 | LiveRegMap::iterator assignVirtToPhysReg(unsigned VReg, unsigned PhysReg); | |
187 | LiveRegMap::iterator allocVirtReg(MachineInstr *MI, LiveRegMap::iterator, | |
188 | unsigned Hint); | |
189 | LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, | |
190 | unsigned VirtReg, unsigned Hint); | |
191 | LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, | |
192 | unsigned VirtReg, unsigned Hint); | |
970d7e83 | 193 | void spillAll(MachineBasicBlock::iterator MI); |
223e47cc | 194 | bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); |
223e47cc LB |
195 | }; |
196 | char RAFast::ID = 0; | |
197 | } | |
198 | ||
199 | /// getStackSpaceFor - This allocates space for the specified virtual register | |
200 | /// to be held on the stack. | |
201 | int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { | |
202 | // Find the location Reg would belong... | |
203 | int SS = StackSlotForVirtReg[VirtReg]; | |
204 | if (SS != -1) | |
205 | return SS; // Already has space allocated? | |
206 | ||
207 | // Allocate a new stack object for this spill location... | |
208 | int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), | |
209 | RC->getAlignment()); | |
210 | ||
211 | // Assign the slot. | |
212 | StackSlotForVirtReg[VirtReg] = FrameIdx; | |
213 | return FrameIdx; | |
214 | } | |
215 | ||
216 | /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to | |
217 | /// its virtual register, and it is guaranteed to be a block-local register. | |
218 | /// | |
219 | bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { | |
220 | // If the register has ever been spilled or reloaded, we conservatively assume | |
221 | // it is a global register used in multiple blocks. | |
222 | if (StackSlotForVirtReg[MO.getReg()] != -1) | |
223 | return false; | |
224 | ||
225 | // Check that the use/def chain has exactly one operand - MO. | |
226 | MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg()); | |
1a4d82fc | 227 | if (&*I != &MO) |
223e47cc LB |
228 | return false; |
229 | return ++I == MRI->reg_nodbg_end(); | |
230 | } | |
231 | ||
232 | /// addKillFlag - Set kill flags on last use of a virtual register. | |
233 | void RAFast::addKillFlag(const LiveReg &LR) { | |
234 | if (!LR.LastUse) return; | |
235 | MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); | |
236 | if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { | |
237 | if (MO.getReg() == LR.PhysReg) | |
238 | MO.setIsKill(); | |
239 | else | |
240 | LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); | |
241 | } | |
242 | } | |
243 | ||
244 | /// killVirtReg - Mark virtreg as no longer available. | |
245 | void RAFast::killVirtReg(LiveRegMap::iterator LRI) { | |
246 | addKillFlag(*LRI); | |
247 | assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg && | |
248 | "Broken RegState mapping"); | |
249 | PhysRegState[LRI->PhysReg] = regFree; | |
250 | // Erase from LiveVirtRegs unless we're spilling in bulk. | |
251 | if (!isBulkSpilling) | |
252 | LiveVirtRegs.erase(LRI); | |
253 | } | |
254 | ||
255 | /// killVirtReg - Mark virtreg as no longer available. | |
256 | void RAFast::killVirtReg(unsigned VirtReg) { | |
257 | assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && | |
258 | "killVirtReg needs a virtual register"); | |
259 | LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); | |
260 | if (LRI != LiveVirtRegs.end()) | |
261 | killVirtReg(LRI); | |
262 | } | |
263 | ||
264 | /// spillVirtReg - This method spills the value specified by VirtReg into the | |
265 | /// corresponding stack slot if needed. | |
266 | void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { | |
267 | assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && | |
268 | "Spilling a physical register is illegal!"); | |
269 | LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); | |
270 | assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); | |
271 | spillVirtReg(MI, LRI); | |
272 | } | |
273 | ||
274 | /// spillVirtReg - Do the actual work of spilling. | |
275 | void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, | |
276 | LiveRegMap::iterator LRI) { | |
277 | LiveReg &LR = *LRI; | |
278 | assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping"); | |
279 | ||
280 | if (LR.Dirty) { | |
281 | // If this physreg is used by the instruction, we want to kill it on the | |
282 | // instruction, not on the spill. | |
283 | bool SpillKill = LR.LastUse != MI; | |
284 | LR.Dirty = false; | |
285 | DEBUG(dbgs() << "Spilling " << PrintReg(LRI->VirtReg, TRI) | |
286 | << " in " << PrintReg(LR.PhysReg, TRI)); | |
287 | const TargetRegisterClass *RC = MRI->getRegClass(LRI->VirtReg); | |
288 | int FI = getStackSpaceFor(LRI->VirtReg, RC); | |
289 | DEBUG(dbgs() << " to stack slot #" << FI << "\n"); | |
290 | TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); | |
291 | ++NumStores; // Update statistics | |
292 | ||
293 | // If this register is used by DBG_VALUE then insert new DBG_VALUE to | |
294 | // identify spilled location as the place to find corresponding variable's | |
295 | // value. | |
1a4d82fc | 296 | SmallVectorImpl<MachineInstr *> &LRIDbgValues = |
223e47cc LB |
297 | LiveDbgValueMap[LRI->VirtReg]; |
298 | for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) { | |
299 | MachineInstr *DBG = LRIDbgValues[li]; | |
85aaf69f SL |
300 | const MDNode *Var = DBG->getDebugVariable(); |
301 | const MDNode *Expr = DBG->getDebugExpression(); | |
1a4d82fc JJ |
302 | bool IsIndirect = DBG->isIndirectDebugValue(); |
303 | uint64_t Offset = IsIndirect ? DBG->getOperand(1).getImm() : 0; | |
223e47cc LB |
304 | DebugLoc DL; |
305 | if (MI == MBB->end()) { | |
306 | // If MI is at basic block end then use last instruction's location. | |
307 | MachineBasicBlock::iterator EI = MI; | |
308 | DL = (--EI)->getDebugLoc(); | |
1a4d82fc | 309 | } else |
223e47cc | 310 | DL = MI->getDebugLoc(); |
1a4d82fc JJ |
311 | MachineInstr *NewDV = |
312 | BuildMI(*MBB, MI, DL, TII->get(TargetOpcode::DBG_VALUE)) | |
85aaf69f SL |
313 | .addFrameIndex(FI) |
314 | .addImm(Offset) | |
315 | .addMetadata(Var) | |
316 | .addMetadata(Expr); | |
1a4d82fc JJ |
317 | assert(NewDV->getParent() == MBB && "dangling parent pointer"); |
318 | (void)NewDV; | |
319 | DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); | |
223e47cc LB |
320 | } |
321 | // Now this register is spilled there is should not be any DBG_VALUE | |
322 | // pointing to this register because they are all pointing to spilled value | |
323 | // now. | |
324 | LRIDbgValues.clear(); | |
325 | if (SpillKill) | |
1a4d82fc | 326 | LR.LastUse = nullptr; // Don't kill register again |
223e47cc LB |
327 | } |
328 | killVirtReg(LRI); | |
329 | } | |
330 | ||
331 | /// spillAll - Spill all dirty virtregs without killing them. | |
970d7e83 | 332 | void RAFast::spillAll(MachineBasicBlock::iterator MI) { |
223e47cc LB |
333 | if (LiveVirtRegs.empty()) return; |
334 | isBulkSpilling = true; | |
335 | // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order | |
336 | // of spilling here is deterministic, if arbitrary. | |
337 | for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); | |
338 | i != e; ++i) | |
339 | spillVirtReg(MI, i); | |
340 | LiveVirtRegs.clear(); | |
341 | isBulkSpilling = false; | |
342 | } | |
343 | ||
344 | /// usePhysReg - Handle the direct use of a physical register. | |
345 | /// Check that the register is not used by a virtreg. | |
346 | /// Kill the physreg, marking it free. | |
347 | /// This may add implicit kills to MO->getParent() and invalidate MO. | |
348 | void RAFast::usePhysReg(MachineOperand &MO) { | |
349 | unsigned PhysReg = MO.getReg(); | |
350 | assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && | |
351 | "Bad usePhysReg operand"); | |
970d7e83 | 352 | markRegUsedInInstr(PhysReg); |
223e47cc LB |
353 | switch (PhysRegState[PhysReg]) { |
354 | case regDisabled: | |
355 | break; | |
356 | case regReserved: | |
357 | PhysRegState[PhysReg] = regFree; | |
358 | // Fall through | |
359 | case regFree: | |
223e47cc LB |
360 | MO.setIsKill(); |
361 | return; | |
362 | default: | |
363 | // The physreg was allocated to a virtual register. That means the value we | |
364 | // wanted has been clobbered. | |
365 | llvm_unreachable("Instruction uses an allocated register"); | |
366 | } | |
367 | ||
368 | // Maybe a superregister is reserved? | |
369 | for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { | |
370 | unsigned Alias = *AI; | |
371 | switch (PhysRegState[Alias]) { | |
372 | case regDisabled: | |
373 | break; | |
374 | case regReserved: | |
85aaf69f SL |
375 | // Either PhysReg is a subregister of Alias and we mark the |
376 | // whole register as free, or PhysReg is the superregister of | |
377 | // Alias and we mark all the aliases as disabled before freeing | |
378 | // PhysReg. | |
379 | // In the latter case, since PhysReg was disabled, this means that | |
380 | // its value is defined only by physical sub-registers. This check | |
381 | // is performed by the assert of the default case in this loop. | |
382 | // Note: The value of the superregister may only be partial | |
383 | // defined, that is why regDisabled is a valid state for aliases. | |
384 | assert((TRI->isSuperRegister(PhysReg, Alias) || | |
385 | TRI->isSuperRegister(Alias, PhysReg)) && | |
223e47cc | 386 | "Instruction is not using a subregister of a reserved register"); |
85aaf69f | 387 | // Fall through. |
223e47cc LB |
388 | case regFree: |
389 | if (TRI->isSuperRegister(PhysReg, Alias)) { | |
390 | // Leave the superregister in the working set. | |
85aaf69f | 391 | PhysRegState[Alias] = regFree; |
223e47cc LB |
392 | MO.getParent()->addRegisterKilled(Alias, TRI, true); |
393 | return; | |
394 | } | |
395 | // Some other alias was in the working set - clear it. | |
396 | PhysRegState[Alias] = regDisabled; | |
397 | break; | |
398 | default: | |
399 | llvm_unreachable("Instruction uses an alias of an allocated register"); | |
400 | } | |
401 | } | |
402 | ||
403 | // All aliases are disabled, bring register into working set. | |
404 | PhysRegState[PhysReg] = regFree; | |
223e47cc LB |
405 | MO.setIsKill(); |
406 | } | |
407 | ||
408 | /// definePhysReg - Mark PhysReg as reserved or free after spilling any | |
409 | /// virtregs. This is very similar to defineVirtReg except the physreg is | |
410 | /// reserved instead of allocated. | |
411 | void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, | |
412 | RegState NewState) { | |
970d7e83 | 413 | markRegUsedInInstr(PhysReg); |
223e47cc LB |
414 | switch (unsigned VirtReg = PhysRegState[PhysReg]) { |
415 | case regDisabled: | |
416 | break; | |
417 | default: | |
418 | spillVirtReg(MI, VirtReg); | |
419 | // Fall through. | |
420 | case regFree: | |
421 | case regReserved: | |
422 | PhysRegState[PhysReg] = NewState; | |
423 | return; | |
424 | } | |
425 | ||
426 | // This is a disabled register, disable all aliases. | |
427 | PhysRegState[PhysReg] = NewState; | |
428 | for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { | |
429 | unsigned Alias = *AI; | |
430 | switch (unsigned VirtReg = PhysRegState[Alias]) { | |
431 | case regDisabled: | |
432 | break; | |
433 | default: | |
434 | spillVirtReg(MI, VirtReg); | |
435 | // Fall through. | |
436 | case regFree: | |
437 | case regReserved: | |
438 | PhysRegState[Alias] = regDisabled; | |
439 | if (TRI->isSuperRegister(PhysReg, Alias)) | |
440 | return; | |
441 | break; | |
442 | } | |
443 | } | |
444 | } | |
445 | ||
446 | ||
447 | // calcSpillCost - Return the cost of spilling clearing out PhysReg and | |
448 | // aliases so it is free for allocation. | |
449 | // Returns 0 when PhysReg is free or disabled with all aliases disabled - it | |
450 | // can be allocated directly. | |
451 | // Returns spillImpossible when PhysReg or an alias can't be spilled. | |
452 | unsigned RAFast::calcSpillCost(unsigned PhysReg) const { | |
970d7e83 | 453 | if (isRegUsedInInstr(PhysReg)) { |
223e47cc LB |
454 | DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n"); |
455 | return spillImpossible; | |
456 | } | |
457 | switch (unsigned VirtReg = PhysRegState[PhysReg]) { | |
458 | case regDisabled: | |
459 | break; | |
460 | case regFree: | |
461 | return 0; | |
462 | case regReserved: | |
463 | DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding " | |
464 | << PrintReg(PhysReg, TRI) << " is reserved already.\n"); | |
465 | return spillImpossible; | |
466 | default: { | |
467 | LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); | |
468 | assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); | |
469 | return I->Dirty ? spillDirty : spillClean; | |
470 | } | |
471 | } | |
472 | ||
473 | // This is a disabled register, add up cost of aliases. | |
474 | DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n"); | |
475 | unsigned Cost = 0; | |
476 | for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { | |
477 | unsigned Alias = *AI; | |
223e47cc LB |
478 | switch (unsigned VirtReg = PhysRegState[Alias]) { |
479 | case regDisabled: | |
480 | break; | |
481 | case regFree: | |
482 | ++Cost; | |
483 | break; | |
484 | case regReserved: | |
485 | return spillImpossible; | |
486 | default: { | |
487 | LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); | |
488 | assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); | |
489 | Cost += I->Dirty ? spillDirty : spillClean; | |
490 | break; | |
491 | } | |
492 | } | |
493 | } | |
494 | return Cost; | |
495 | } | |
496 | ||
497 | ||
498 | /// assignVirtToPhysReg - This method updates local state so that we know | |
499 | /// that PhysReg is the proper container for VirtReg now. The physical | |
500 | /// register must not be used for anything else when this is called. | |
501 | /// | |
502 | void RAFast::assignVirtToPhysReg(LiveReg &LR, unsigned PhysReg) { | |
503 | DEBUG(dbgs() << "Assigning " << PrintReg(LR.VirtReg, TRI) << " to " | |
504 | << PrintReg(PhysReg, TRI) << "\n"); | |
505 | PhysRegState[PhysReg] = LR.VirtReg; | |
506 | assert(!LR.PhysReg && "Already assigned a physreg"); | |
507 | LR.PhysReg = PhysReg; | |
508 | } | |
509 | ||
510 | RAFast::LiveRegMap::iterator | |
511 | RAFast::assignVirtToPhysReg(unsigned VirtReg, unsigned PhysReg) { | |
512 | LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); | |
513 | assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared"); | |
514 | assignVirtToPhysReg(*LRI, PhysReg); | |
515 | return LRI; | |
516 | } | |
517 | ||
518 | /// allocVirtReg - Allocate a physical register for VirtReg. | |
519 | RAFast::LiveRegMap::iterator RAFast::allocVirtReg(MachineInstr *MI, | |
520 | LiveRegMap::iterator LRI, | |
521 | unsigned Hint) { | |
522 | const unsigned VirtReg = LRI->VirtReg; | |
523 | ||
524 | assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && | |
525 | "Can only allocate virtual registers"); | |
526 | ||
527 | const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); | |
528 | ||
529 | // Ignore invalid hints. | |
530 | if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || | |
970d7e83 | 531 | !RC->contains(Hint) || !MRI->isAllocatable(Hint))) |
223e47cc LB |
532 | Hint = 0; |
533 | ||
534 | // Take hint when possible. | |
535 | if (Hint) { | |
536 | // Ignore the hint if we would have to spill a dirty register. | |
537 | unsigned Cost = calcSpillCost(Hint); | |
538 | if (Cost < spillDirty) { | |
539 | if (Cost) | |
540 | definePhysReg(MI, Hint, regFree); | |
541 | // definePhysReg may kill virtual registers and modify LiveVirtRegs. | |
542 | // That invalidates LRI, so run a new lookup for VirtReg. | |
543 | return assignVirtToPhysReg(VirtReg, Hint); | |
544 | } | |
545 | } | |
546 | ||
970d7e83 | 547 | ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(RC); |
223e47cc LB |
548 | |
549 | // First try to find a completely free register. | |
970d7e83 | 550 | for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ |
223e47cc | 551 | unsigned PhysReg = *I; |
970d7e83 | 552 | if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) { |
223e47cc LB |
553 | assignVirtToPhysReg(*LRI, PhysReg); |
554 | return LRI; | |
555 | } | |
556 | } | |
557 | ||
558 | DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from " | |
85aaf69f | 559 | << TRI->getRegClassName(RC) << "\n"); |
223e47cc LB |
560 | |
561 | unsigned BestReg = 0, BestCost = spillImpossible; | |
970d7e83 | 562 | for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ |
223e47cc LB |
563 | unsigned Cost = calcSpillCost(*I); |
564 | DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n"); | |
565 | DEBUG(dbgs() << "\tCost: " << Cost << "\n"); | |
566 | DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n"); | |
567 | // Cost is 0 when all aliases are already disabled. | |
568 | if (Cost == 0) { | |
569 | assignVirtToPhysReg(*LRI, *I); | |
570 | return LRI; | |
571 | } | |
572 | if (Cost < BestCost) | |
573 | BestReg = *I, BestCost = Cost; | |
574 | } | |
575 | ||
576 | if (BestReg) { | |
577 | definePhysReg(MI, BestReg, regFree); | |
578 | // definePhysReg may kill virtual registers and modify LiveVirtRegs. | |
579 | // That invalidates LRI, so run a new lookup for VirtReg. | |
580 | return assignVirtToPhysReg(VirtReg, BestReg); | |
581 | } | |
582 | ||
583 | // Nothing we can do. Report an error and keep going with a bad allocation. | |
1a4d82fc JJ |
584 | if (MI->isInlineAsm()) |
585 | MI->emitError("inline assembly requires more registers than available"); | |
586 | else | |
587 | MI->emitError("ran out of registers during register allocation"); | |
223e47cc LB |
588 | definePhysReg(MI, *AO.begin(), regFree); |
589 | return assignVirtToPhysReg(VirtReg, *AO.begin()); | |
590 | } | |
591 | ||
592 | /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. | |
593 | RAFast::LiveRegMap::iterator | |
594 | RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, | |
595 | unsigned VirtReg, unsigned Hint) { | |
596 | assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && | |
597 | "Not a virtual register"); | |
598 | LiveRegMap::iterator LRI; | |
599 | bool New; | |
1a4d82fc | 600 | std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); |
223e47cc LB |
601 | if (New) { |
602 | // If there is no hint, peek at the only use of this register. | |
603 | if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && | |
604 | MRI->hasOneNonDBGUse(VirtReg)) { | |
1a4d82fc | 605 | const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg); |
223e47cc LB |
606 | // It's a copy, use the destination register as a hint. |
607 | if (UseMI.isCopyLike()) | |
608 | Hint = UseMI.getOperand(0).getReg(); | |
609 | } | |
610 | LRI = allocVirtReg(MI, LRI, Hint); | |
611 | } else if (LRI->LastUse) { | |
612 | // Redefining a live register - kill at the last use, unless it is this | |
613 | // instruction defining VirtReg multiple times. | |
614 | if (LRI->LastUse != MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse()) | |
615 | addKillFlag(*LRI); | |
616 | } | |
617 | assert(LRI->PhysReg && "Register not assigned"); | |
618 | LRI->LastUse = MI; | |
619 | LRI->LastOpNum = OpNum; | |
620 | LRI->Dirty = true; | |
970d7e83 | 621 | markRegUsedInInstr(LRI->PhysReg); |
223e47cc LB |
622 | return LRI; |
623 | } | |
624 | ||
625 | /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. | |
626 | RAFast::LiveRegMap::iterator | |
627 | RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, | |
628 | unsigned VirtReg, unsigned Hint) { | |
629 | assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && | |
630 | "Not a virtual register"); | |
631 | LiveRegMap::iterator LRI; | |
632 | bool New; | |
1a4d82fc | 633 | std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); |
223e47cc LB |
634 | MachineOperand &MO = MI->getOperand(OpNum); |
635 | if (New) { | |
636 | LRI = allocVirtReg(MI, LRI, Hint); | |
637 | const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); | |
638 | int FrameIndex = getStackSpaceFor(VirtReg, RC); | |
639 | DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into " | |
640 | << PrintReg(LRI->PhysReg, TRI) << "\n"); | |
641 | TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, RC, TRI); | |
642 | ++NumLoads; | |
643 | } else if (LRI->Dirty) { | |
644 | if (isLastUseOfLocalReg(MO)) { | |
645 | DEBUG(dbgs() << "Killing last use: " << MO << "\n"); | |
646 | if (MO.isUse()) | |
647 | MO.setIsKill(); | |
648 | else | |
649 | MO.setIsDead(); | |
650 | } else if (MO.isKill()) { | |
651 | DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); | |
652 | MO.setIsKill(false); | |
653 | } else if (MO.isDead()) { | |
654 | DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); | |
655 | MO.setIsDead(false); | |
656 | } | |
657 | } else if (MO.isKill()) { | |
658 | // We must remove kill flags from uses of reloaded registers because the | |
659 | // register would be killed immediately, and there might be a second use: | |
660 | // %foo = OR %x<kill>, %x | |
661 | // This would cause a second reload of %x into a different register. | |
662 | DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); | |
663 | MO.setIsKill(false); | |
664 | } else if (MO.isDead()) { | |
665 | DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); | |
666 | MO.setIsDead(false); | |
667 | } | |
668 | assert(LRI->PhysReg && "Register not assigned"); | |
669 | LRI->LastUse = MI; | |
670 | LRI->LastOpNum = OpNum; | |
970d7e83 | 671 | markRegUsedInInstr(LRI->PhysReg); |
223e47cc LB |
672 | return LRI; |
673 | } | |
674 | ||
675 | // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering | |
676 | // subregs. This may invalidate any operand pointers. | |
677 | // Return true if the operand kills its register. | |
678 | bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { | |
679 | MachineOperand &MO = MI->getOperand(OpNum); | |
680 | bool Dead = MO.isDead(); | |
681 | if (!MO.getSubReg()) { | |
682 | MO.setReg(PhysReg); | |
683 | return MO.isKill() || Dead; | |
684 | } | |
685 | ||
686 | // Handle subregister index. | |
687 | MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); | |
688 | MO.setSubReg(0); | |
689 | ||
690 | // A kill flag implies killing the full register. Add corresponding super | |
691 | // register kill. | |
692 | if (MO.isKill()) { | |
693 | MI->addRegisterKilled(PhysReg, TRI, true); | |
694 | return true; | |
695 | } | |
696 | ||
697 | // A <def,read-undef> of a sub-register requires an implicit def of the full | |
698 | // register. | |
699 | if (MO.isDef() && MO.isUndef()) | |
700 | MI->addRegisterDefined(PhysReg, TRI); | |
701 | ||
702 | return Dead; | |
703 | } | |
704 | ||
705 | // Handle special instruction operand like early clobbers and tied ops when | |
706 | // there are additional physreg defines. | |
707 | void RAFast::handleThroughOperands(MachineInstr *MI, | |
708 | SmallVectorImpl<unsigned> &VirtDead) { | |
709 | DEBUG(dbgs() << "Scanning for through registers:"); | |
710 | SmallSet<unsigned, 8> ThroughRegs; | |
711 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | |
712 | MachineOperand &MO = MI->getOperand(i); | |
713 | if (!MO.isReg()) continue; | |
714 | unsigned Reg = MO.getReg(); | |
715 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) | |
716 | continue; | |
717 | if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || | |
718 | (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { | |
85aaf69f | 719 | if (ThroughRegs.insert(Reg).second) |
223e47cc LB |
720 | DEBUG(dbgs() << ' ' << PrintReg(Reg)); |
721 | } | |
722 | } | |
723 | ||
724 | // If any physreg defines collide with preallocated through registers, | |
725 | // we must spill and reallocate. | |
726 | DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); | |
727 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | |
728 | MachineOperand &MO = MI->getOperand(i); | |
729 | if (!MO.isReg() || !MO.isDef()) continue; | |
730 | unsigned Reg = MO.getReg(); | |
731 | if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; | |
970d7e83 | 732 | markRegUsedInInstr(Reg); |
223e47cc | 733 | for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { |
223e47cc LB |
734 | if (ThroughRegs.count(PhysRegState[*AI])) |
735 | definePhysReg(MI, *AI, regFree); | |
736 | } | |
737 | } | |
738 | ||
739 | SmallVector<unsigned, 8> PartialDefs; | |
740 | DEBUG(dbgs() << "Allocating tied uses.\n"); | |
741 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | |
742 | MachineOperand &MO = MI->getOperand(i); | |
743 | if (!MO.isReg()) continue; | |
744 | unsigned Reg = MO.getReg(); | |
745 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; | |
746 | if (MO.isUse()) { | |
747 | unsigned DefIdx = 0; | |
748 | if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; | |
749 | DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " | |
750 | << DefIdx << ".\n"); | |
751 | LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); | |
752 | unsigned PhysReg = LRI->PhysReg; | |
753 | setPhysReg(MI, i, PhysReg); | |
754 | // Note: we don't update the def operand yet. That would cause the normal | |
755 | // def-scan to attempt spilling. | |
756 | } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { | |
757 | DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); | |
758 | // Reload the register, but don't assign to the operand just yet. | |
759 | // That would confuse the later phys-def processing pass. | |
760 | LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); | |
761 | PartialDefs.push_back(LRI->PhysReg); | |
762 | } | |
763 | } | |
764 | ||
765 | DEBUG(dbgs() << "Allocating early clobbers.\n"); | |
766 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | |
767 | MachineOperand &MO = MI->getOperand(i); | |
768 | if (!MO.isReg()) continue; | |
769 | unsigned Reg = MO.getReg(); | |
770 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; | |
771 | if (!MO.isEarlyClobber()) | |
772 | continue; | |
773 | // Note: defineVirtReg may invalidate MO. | |
774 | LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); | |
775 | unsigned PhysReg = LRI->PhysReg; | |
776 | if (setPhysReg(MI, i, PhysReg)) | |
777 | VirtDead.push_back(Reg); | |
778 | } | |
779 | ||
780 | // Restore UsedInInstr to a state usable for allocating normal virtual uses. | |
970d7e83 | 781 | UsedInInstr.clear(); |
223e47cc LB |
782 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { |
783 | MachineOperand &MO = MI->getOperand(i); | |
784 | if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; | |
785 | unsigned Reg = MO.getReg(); | |
786 | if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; | |
787 | DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI) | |
788 | << " as used in instr\n"); | |
970d7e83 | 789 | markRegUsedInInstr(Reg); |
223e47cc LB |
790 | } |
791 | ||
792 | // Also mark PartialDefs as used to avoid reallocation. | |
793 | for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) | |
970d7e83 | 794 | markRegUsedInInstr(PartialDefs[i]); |
223e47cc LB |
795 | } |
796 | ||
797 | void RAFast::AllocateBasicBlock() { | |
798 | DEBUG(dbgs() << "\nAllocating " << *MBB); | |
799 | ||
800 | PhysRegState.assign(TRI->getNumRegs(), regDisabled); | |
801 | assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?"); | |
802 | ||
803 | MachineBasicBlock::iterator MII = MBB->begin(); | |
804 | ||
805 | // Add live-in registers as live. | |
806 | for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), | |
807 | E = MBB->livein_end(); I != E; ++I) | |
970d7e83 | 808 | if (MRI->isAllocatable(*I)) |
223e47cc LB |
809 | definePhysReg(MII, *I, regReserved); |
810 | ||
811 | SmallVector<unsigned, 8> VirtDead; | |
812 | SmallVector<MachineInstr*, 32> Coalesced; | |
813 | ||
814 | // Otherwise, sequentially allocate each instruction in the MBB. | |
815 | while (MII != MBB->end()) { | |
816 | MachineInstr *MI = MII++; | |
817 | const MCInstrDesc &MCID = MI->getDesc(); | |
818 | DEBUG({ | |
819 | dbgs() << "\n>> " << *MI << "Regs:"; | |
820 | for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { | |
821 | if (PhysRegState[Reg] == regDisabled) continue; | |
822 | dbgs() << " " << TRI->getName(Reg); | |
823 | switch(PhysRegState[Reg]) { | |
824 | case regFree: | |
825 | break; | |
826 | case regReserved: | |
827 | dbgs() << "*"; | |
828 | break; | |
829 | default: { | |
830 | dbgs() << '=' << PrintReg(PhysRegState[Reg]); | |
831 | LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]); | |
832 | assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); | |
833 | if (I->Dirty) | |
834 | dbgs() << "*"; | |
835 | assert(I->PhysReg == Reg && "Bad inverse map"); | |
836 | break; | |
837 | } | |
838 | } | |
839 | } | |
840 | dbgs() << '\n'; | |
841 | // Check that LiveVirtRegs is the inverse. | |
842 | for (LiveRegMap::iterator i = LiveVirtRegs.begin(), | |
843 | e = LiveVirtRegs.end(); i != e; ++i) { | |
844 | assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) && | |
845 | "Bad map key"); | |
846 | assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) && | |
847 | "Bad map value"); | |
848 | assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map"); | |
849 | } | |
850 | }); | |
851 | ||
852 | // Debug values are not allowed to change codegen in any way. | |
853 | if (MI->isDebugValue()) { | |
854 | bool ScanDbgValue = true; | |
855 | while (ScanDbgValue) { | |
856 | ScanDbgValue = false; | |
857 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | |
858 | MachineOperand &MO = MI->getOperand(i); | |
859 | if (!MO.isReg()) continue; | |
860 | unsigned Reg = MO.getReg(); | |
861 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; | |
862 | LiveRegMap::iterator LRI = findLiveVirtReg(Reg); | |
863 | if (LRI != LiveVirtRegs.end()) | |
864 | setPhysReg(MI, i, LRI->PhysReg); | |
865 | else { | |
866 | int SS = StackSlotForVirtReg[Reg]; | |
867 | if (SS == -1) { | |
868 | // We can't allocate a physreg for a DebugValue, sorry! | |
869 | DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); | |
870 | MO.setReg(0); | |
871 | } | |
872 | else { | |
873 | // Modify DBG_VALUE now that the value is in a spill slot. | |
1a4d82fc JJ |
874 | bool IsIndirect = MI->isIndirectDebugValue(); |
875 | uint64_t Offset = IsIndirect ? MI->getOperand(1).getImm() : 0; | |
85aaf69f SL |
876 | const MDNode *Var = MI->getDebugVariable(); |
877 | const MDNode *Expr = MI->getDebugExpression(); | |
223e47cc | 878 | DebugLoc DL = MI->getDebugLoc(); |
1a4d82fc JJ |
879 | MachineBasicBlock *MBB = MI->getParent(); |
880 | MachineInstr *NewDV = BuildMI(*MBB, MBB->erase(MI), DL, | |
881 | TII->get(TargetOpcode::DBG_VALUE)) | |
85aaf69f SL |
882 | .addFrameIndex(SS) |
883 | .addImm(Offset) | |
884 | .addMetadata(Var) | |
885 | .addMetadata(Expr); | |
1a4d82fc JJ |
886 | DEBUG(dbgs() << "Modifying debug info due to spill:" |
887 | << "\t" << *NewDV); | |
888 | // Scan NewDV operands from the beginning. | |
889 | MI = NewDV; | |
890 | ScanDbgValue = true; | |
891 | break; | |
223e47cc LB |
892 | } |
893 | } | |
894 | LiveDbgValueMap[Reg].push_back(MI); | |
895 | } | |
896 | } | |
897 | // Next instruction. | |
898 | continue; | |
899 | } | |
900 | ||
223e47cc LB |
901 | // If this is a copy, we may be able to coalesce. |
902 | unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; | |
903 | if (MI->isCopy()) { | |
904 | CopyDst = MI->getOperand(0).getReg(); | |
905 | CopySrc = MI->getOperand(1).getReg(); | |
906 | CopyDstSub = MI->getOperand(0).getSubReg(); | |
907 | CopySrcSub = MI->getOperand(1).getSubReg(); | |
908 | } | |
909 | ||
910 | // Track registers used by instruction. | |
970d7e83 | 911 | UsedInInstr.clear(); |
223e47cc LB |
912 | |
913 | // First scan. | |
914 | // Mark physreg uses and early clobbers as used. | |
915 | // Find the end of the virtreg operands | |
916 | unsigned VirtOpEnd = 0; | |
917 | bool hasTiedOps = false; | |
918 | bool hasEarlyClobbers = false; | |
919 | bool hasPartialRedefs = false; | |
920 | bool hasPhysDefs = false; | |
921 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | |
922 | MachineOperand &MO = MI->getOperand(i); | |
970d7e83 LB |
923 | // Make sure MRI knows about registers clobbered by regmasks. |
924 | if (MO.isRegMask()) { | |
925 | MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); | |
926 | continue; | |
927 | } | |
223e47cc LB |
928 | if (!MO.isReg()) continue; |
929 | unsigned Reg = MO.getReg(); | |
930 | if (!Reg) continue; | |
931 | if (TargetRegisterInfo::isVirtualRegister(Reg)) { | |
932 | VirtOpEnd = i+1; | |
933 | if (MO.isUse()) { | |
934 | hasTiedOps = hasTiedOps || | |
935 | MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1; | |
936 | } else { | |
937 | if (MO.isEarlyClobber()) | |
938 | hasEarlyClobbers = true; | |
939 | if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) | |
940 | hasPartialRedefs = true; | |
941 | } | |
942 | continue; | |
943 | } | |
970d7e83 | 944 | if (!MRI->isAllocatable(Reg)) continue; |
223e47cc LB |
945 | if (MO.isUse()) { |
946 | usePhysReg(MO); | |
947 | } else if (MO.isEarlyClobber()) { | |
948 | definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? | |
949 | regFree : regReserved); | |
950 | hasEarlyClobbers = true; | |
951 | } else | |
952 | hasPhysDefs = true; | |
953 | } | |
954 | ||
955 | // The instruction may have virtual register operands that must be allocated | |
956 | // the same register at use-time and def-time: early clobbers and tied | |
957 | // operands. If there are also physical defs, these registers must avoid | |
958 | // both physical defs and uses, making them more constrained than normal | |
959 | // operands. | |
960 | // Similarly, if there are multiple defs and tied operands, we must make | |
961 | // sure the same register is allocated to uses and defs. | |
962 | // We didn't detect inline asm tied operands above, so just make this extra | |
963 | // pass for all inline asm. | |
964 | if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || | |
965 | (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) { | |
966 | handleThroughOperands(MI, VirtDead); | |
967 | // Don't attempt coalescing when we have funny stuff going on. | |
968 | CopyDst = 0; | |
969 | // Pretend we have early clobbers so the use operands get marked below. | |
970 | // This is not necessary for the common case of a single tied use. | |
971 | hasEarlyClobbers = true; | |
972 | } | |
973 | ||
974 | // Second scan. | |
975 | // Allocate virtreg uses. | |
976 | for (unsigned i = 0; i != VirtOpEnd; ++i) { | |
977 | MachineOperand &MO = MI->getOperand(i); | |
978 | if (!MO.isReg()) continue; | |
979 | unsigned Reg = MO.getReg(); | |
980 | if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; | |
981 | if (MO.isUse()) { | |
982 | LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); | |
983 | unsigned PhysReg = LRI->PhysReg; | |
984 | CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; | |
985 | if (setPhysReg(MI, i, PhysReg)) | |
986 | killVirtReg(LRI); | |
987 | } | |
988 | } | |
989 | ||
970d7e83 LB |
990 | for (UsedInInstrSet::iterator |
991 | I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++I) | |
992 | MRI->setRegUnitUsed(*I); | |
223e47cc LB |
993 | |
994 | // Track registers defined by instruction - early clobbers and tied uses at | |
995 | // this point. | |
970d7e83 | 996 | UsedInInstr.clear(); |
223e47cc LB |
997 | if (hasEarlyClobbers) { |
998 | for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { | |
999 | MachineOperand &MO = MI->getOperand(i); | |
1000 | if (!MO.isReg()) continue; | |
1001 | unsigned Reg = MO.getReg(); | |
1002 | if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; | |
1003 | // Look for physreg defs and tied uses. | |
1004 | if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; | |
970d7e83 | 1005 | markRegUsedInInstr(Reg); |
223e47cc LB |
1006 | } |
1007 | } | |
1008 | ||
1009 | unsigned DefOpEnd = MI->getNumOperands(); | |
1010 | if (MI->isCall()) { | |
1011 | // Spill all virtregs before a call. This serves two purposes: 1. If an | |
1012 | // exception is thrown, the landing pad is going to expect to find | |
1013 | // registers in their spill slots, and 2. we don't have to wade through | |
1014 | // all the <imp-def> operands on the call instruction. | |
1015 | DefOpEnd = VirtOpEnd; | |
1016 | DEBUG(dbgs() << " Spilling remaining registers before call.\n"); | |
1017 | spillAll(MI); | |
1018 | ||
1019 | // The imp-defs are skipped below, but we still need to mark those | |
1020 | // registers as used by the function. | |
1021 | SkippedInstrs.insert(&MCID); | |
1022 | } | |
1023 | ||
1024 | // Third scan. | |
1025 | // Allocate defs and collect dead defs. | |
1026 | for (unsigned i = 0; i != DefOpEnd; ++i) { | |
1027 | MachineOperand &MO = MI->getOperand(i); | |
1028 | if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) | |
1029 | continue; | |
1030 | unsigned Reg = MO.getReg(); | |
1031 | ||
1032 | if (TargetRegisterInfo::isPhysicalRegister(Reg)) { | |
970d7e83 | 1033 | if (!MRI->isAllocatable(Reg)) continue; |
85aaf69f | 1034 | definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved); |
223e47cc LB |
1035 | continue; |
1036 | } | |
1037 | LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); | |
1038 | unsigned PhysReg = LRI->PhysReg; | |
1039 | if (setPhysReg(MI, i, PhysReg)) { | |
1040 | VirtDead.push_back(Reg); | |
1041 | CopyDst = 0; // cancel coalescing; | |
1042 | } else | |
1043 | CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; | |
1044 | } | |
1045 | ||
1046 | // Kill dead defs after the scan to ensure that multiple defs of the same | |
1047 | // register are allocated identically. We didn't need to do this for uses | |
1048 | // because we are crerating our own kill flags, and they are always at the | |
1049 | // last use. | |
1050 | for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) | |
1051 | killVirtReg(VirtDead[i]); | |
1052 | VirtDead.clear(); | |
1053 | ||
970d7e83 LB |
1054 | for (UsedInInstrSet::iterator |
1055 | I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++I) | |
1056 | MRI->setRegUnitUsed(*I); | |
223e47cc LB |
1057 | |
1058 | if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { | |
1059 | DEBUG(dbgs() << "-- coalescing: " << *MI); | |
1060 | Coalesced.push_back(MI); | |
1061 | } else { | |
1062 | DEBUG(dbgs() << "<< " << *MI); | |
1063 | } | |
1064 | } | |
1065 | ||
1066 | // Spill all physical registers holding virtual registers now. | |
1067 | DEBUG(dbgs() << "Spilling live registers at end of block.\n"); | |
1068 | spillAll(MBB->getFirstTerminator()); | |
1069 | ||
1070 | // Erase all the coalesced copies. We are delaying it until now because | |
1071 | // LiveVirtRegs might refer to the instrs. | |
1072 | for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) | |
1073 | MBB->erase(Coalesced[i]); | |
1074 | NumCopies += Coalesced.size(); | |
1075 | ||
223e47cc LB |
1076 | DEBUG(MBB->dump()); |
1077 | } | |
1078 | ||
1079 | /// runOnMachineFunction - Register allocate the whole function | |
1080 | /// | |
1081 | bool RAFast::runOnMachineFunction(MachineFunction &Fn) { | |
1082 | DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" | |
1083 | << "********** Function: " << Fn.getName() << '\n'); | |
1084 | MF = &Fn; | |
1085 | MRI = &MF->getRegInfo(); | |
85aaf69f SL |
1086 | TRI = MF->getSubtarget().getRegisterInfo(); |
1087 | TII = MF->getSubtarget().getInstrInfo(); | |
223e47cc LB |
1088 | MRI->freezeReservedRegs(Fn); |
1089 | RegClassInfo.runOnMachineFunction(Fn); | |
970d7e83 LB |
1090 | UsedInInstr.clear(); |
1091 | UsedInInstr.setUniverse(TRI->getNumRegUnits()); | |
223e47cc LB |
1092 | |
1093 | assert(!MRI->isSSA() && "regalloc requires leaving SSA"); | |
1094 | ||
1095 | // initialize the virtual->physical register map to have a 'null' | |
1096 | // mapping for all virtual registers | |
1097 | StackSlotForVirtReg.resize(MRI->getNumVirtRegs()); | |
1098 | LiveVirtRegs.setUniverse(MRI->getNumVirtRegs()); | |
1099 | ||
1100 | // Loop over all of the basic blocks, eliminating virtual register references | |
1101 | for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); | |
1102 | MBBi != MBBe; ++MBBi) { | |
1103 | MBB = &*MBBi; | |
1104 | AllocateBasicBlock(); | |
1105 | } | |
1106 | ||
1107 | // Add the clobber lists for all the instructions we skipped earlier. | |
1a4d82fc JJ |
1108 | for (const MCInstrDesc *Desc : SkippedInstrs) |
1109 | if (const uint16_t *Defs = Desc->getImplicitDefs()) | |
223e47cc LB |
1110 | while (*Defs) |
1111 | MRI->setPhysRegUsed(*Defs++); | |
1112 | ||
1113 | // All machine operands and other references to virtual registers have been | |
1114 | // replaced. Remove the virtual registers. | |
1115 | MRI->clearVirtRegs(); | |
1116 | ||
1117 | SkippedInstrs.clear(); | |
1118 | StackSlotForVirtReg.clear(); | |
1119 | LiveDbgValueMap.clear(); | |
1120 | return true; | |
1121 | } | |
1122 | ||
1123 | FunctionPass *llvm::createFastRegisterAllocator() { | |
1124 | return new RAFast(); | |
1125 | } |