]> git.proxmox.com Git - rustc.git/blob - src/llvm/lib/CodeGen/AggressiveAntiDepBreaker.cpp
Imported Upstream version 1.0.0~0alpha
[rustc.git] / src / llvm / lib / CodeGen / AggressiveAntiDepBreaker.cpp
1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements the AggressiveAntiDepBreaker class, which
11 // implements register anti-dependence breaking during post-RA
12 // scheduling. It attempts to break all anti-dependencies within a
13 // block.
14 //
15 //===----------------------------------------------------------------------===//
16
17 #include "AggressiveAntiDepBreaker.h"
18 #include "llvm/CodeGen/MachineBasicBlock.h"
19 #include "llvm/CodeGen/MachineFrameInfo.h"
20 #include "llvm/CodeGen/MachineInstr.h"
21 #include "llvm/CodeGen/RegisterClassInfo.h"
22 #include "llvm/Support/CommandLine.h"
23 #include "llvm/Support/Debug.h"
24 #include "llvm/Support/ErrorHandling.h"
25 #include "llvm/Support/raw_ostream.h"
26 #include "llvm/Target/TargetInstrInfo.h"
27 #include "llvm/Target/TargetMachine.h"
28 #include "llvm/Target/TargetRegisterInfo.h"
29 using namespace llvm;
30
31 #define DEBUG_TYPE "post-RA-sched"
32
33 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod
34 static cl::opt<int>
35 DebugDiv("agg-antidep-debugdiv",
36 cl::desc("Debug control for aggressive anti-dep breaker"),
37 cl::init(0), cl::Hidden);
38 static cl::opt<int>
39 DebugMod("agg-antidep-debugmod",
40 cl::desc("Debug control for aggressive anti-dep breaker"),
41 cl::init(0), cl::Hidden);
42
43 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs,
44 MachineBasicBlock *BB) :
45 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0),
46 GroupNodeIndices(TargetRegs, 0),
47 KillIndices(TargetRegs, 0),
48 DefIndices(TargetRegs, 0)
49 {
50 const unsigned BBSize = BB->size();
51 for (unsigned i = 0; i < NumTargetRegs; ++i) {
52 // Initialize all registers to be in their own group. Initially we
53 // assign the register to the same-indexed GroupNode.
54 GroupNodeIndices[i] = i;
55 // Initialize the indices to indicate that no registers are live.
56 KillIndices[i] = ~0u;
57 DefIndices[i] = BBSize;
58 }
59 }
60
61 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) {
62 unsigned Node = GroupNodeIndices[Reg];
63 while (GroupNodes[Node] != Node)
64 Node = GroupNodes[Node];
65
66 return Node;
67 }
68
69 void AggressiveAntiDepState::GetGroupRegs(
70 unsigned Group,
71 std::vector<unsigned> &Regs,
72 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs)
73 {
74 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) {
75 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0))
76 Regs.push_back(Reg);
77 }
78 }
79
80 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2)
81 {
82 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!");
83 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!");
84
85 // find group for each register
86 unsigned Group1 = GetGroup(Reg1);
87 unsigned Group2 = GetGroup(Reg2);
88
89 // if either group is 0, then that must become the parent
90 unsigned Parent = (Group1 == 0) ? Group1 : Group2;
91 unsigned Other = (Parent == Group1) ? Group2 : Group1;
92 GroupNodes.at(Other) = Parent;
93 return Parent;
94 }
95
96 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg)
97 {
98 // Create a new GroupNode for Reg. Reg's existing GroupNode must
99 // stay as is because there could be other GroupNodes referring to
100 // it.
101 unsigned idx = GroupNodes.size();
102 GroupNodes.push_back(idx);
103 GroupNodeIndices[Reg] = idx;
104 return idx;
105 }
106
107 bool AggressiveAntiDepState::IsLive(unsigned Reg)
108 {
109 // KillIndex must be defined and DefIndex not defined for a register
110 // to be live.
111 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u));
112 }
113
114 AggressiveAntiDepBreaker::AggressiveAntiDepBreaker(
115 MachineFunction &MFi, const RegisterClassInfo &RCI,
116 TargetSubtargetInfo::RegClassVector &CriticalPathRCs)
117 : AntiDepBreaker(), MF(MFi), MRI(MF.getRegInfo()),
118 TII(MF.getSubtarget().getInstrInfo()),
119 TRI(MF.getSubtarget().getRegisterInfo()), RegClassInfo(RCI),
120 State(nullptr) {
121 /* Collect a bitset of all registers that are only broken if they
122 are on the critical path. */
123 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) {
124 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]);
125 if (CriticalPathSet.none())
126 CriticalPathSet = CPSet;
127 else
128 CriticalPathSet |= CPSet;
129 }
130
131 DEBUG(dbgs() << "AntiDep Critical-Path Registers:");
132 DEBUG(for (int r = CriticalPathSet.find_first(); r != -1;
133 r = CriticalPathSet.find_next(r))
134 dbgs() << " " << TRI->getName(r));
135 DEBUG(dbgs() << '\n');
136 }
137
138 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() {
139 delete State;
140 }
141
142 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) {
143 assert(!State);
144 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB);
145
146 bool IsReturnBlock = (!BB->empty() && BB->back().isReturn());
147 std::vector<unsigned> &KillIndices = State->GetKillIndices();
148 std::vector<unsigned> &DefIndices = State->GetDefIndices();
149
150 // Examine the live-in regs of all successors.
151 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(),
152 SE = BB->succ_end(); SI != SE; ++SI)
153 for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(),
154 E = (*SI)->livein_end(); I != E; ++I) {
155 for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) {
156 unsigned Reg = *AI;
157 State->UnionGroups(Reg, 0);
158 KillIndices[Reg] = BB->size();
159 DefIndices[Reg] = ~0u;
160 }
161 }
162
163 // Mark live-out callee-saved registers. In a return block this is
164 // all callee-saved registers. In non-return this is any
165 // callee-saved register that is not saved in the prolog.
166 const MachineFrameInfo *MFI = MF.getFrameInfo();
167 BitVector Pristine = MFI->getPristineRegs(BB);
168 for (const MCPhysReg *I = TRI->getCalleeSavedRegs(&MF); *I; ++I) {
169 unsigned Reg = *I;
170 if (!IsReturnBlock && !Pristine.test(Reg)) continue;
171 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
172 unsigned AliasReg = *AI;
173 State->UnionGroups(AliasReg, 0);
174 KillIndices[AliasReg] = BB->size();
175 DefIndices[AliasReg] = ~0u;
176 }
177 }
178 }
179
180 void AggressiveAntiDepBreaker::FinishBlock() {
181 delete State;
182 State = nullptr;
183 }
184
185 void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count,
186 unsigned InsertPosIndex) {
187 assert(Count < InsertPosIndex && "Instruction index out of expected range!");
188
189 std::set<unsigned> PassthruRegs;
190 GetPassthruRegs(MI, PassthruRegs);
191 PrescanInstruction(MI, Count, PassthruRegs);
192 ScanInstruction(MI, Count);
193
194 DEBUG(dbgs() << "Observe: ");
195 DEBUG(MI->dump());
196 DEBUG(dbgs() << "\tRegs:");
197
198 std::vector<unsigned> &DefIndices = State->GetDefIndices();
199 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) {
200 // If Reg is current live, then mark that it can't be renamed as
201 // we don't know the extent of its live-range anymore (now that it
202 // has been scheduled). If it is not live but was defined in the
203 // previous schedule region, then set its def index to the most
204 // conservative location (i.e. the beginning of the previous
205 // schedule region).
206 if (State->IsLive(Reg)) {
207 DEBUG(if (State->GetGroup(Reg) != 0)
208 dbgs() << " " << TRI->getName(Reg) << "=g" <<
209 State->GetGroup(Reg) << "->g0(region live-out)");
210 State->UnionGroups(Reg, 0);
211 } else if ((DefIndices[Reg] < InsertPosIndex)
212 && (DefIndices[Reg] >= Count)) {
213 DefIndices[Reg] = Count;
214 }
215 }
216 DEBUG(dbgs() << '\n');
217 }
218
219 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI,
220 MachineOperand& MO)
221 {
222 if (!MO.isReg() || !MO.isImplicit())
223 return false;
224
225 unsigned Reg = MO.getReg();
226 if (Reg == 0)
227 return false;
228
229 MachineOperand *Op = nullptr;
230 if (MO.isDef())
231 Op = MI->findRegisterUseOperand(Reg, true);
232 else
233 Op = MI->findRegisterDefOperand(Reg);
234
235 return(Op && Op->isImplicit());
236 }
237
238 void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI,
239 std::set<unsigned>& PassthruRegs) {
240 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
241 MachineOperand &MO = MI->getOperand(i);
242 if (!MO.isReg()) continue;
243 if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) ||
244 IsImplicitDefUse(MI, MO)) {
245 const unsigned Reg = MO.getReg();
246 for (MCSubRegIterator SubRegs(Reg, TRI, /*IncludeSelf=*/true);
247 SubRegs.isValid(); ++SubRegs)
248 PassthruRegs.insert(*SubRegs);
249 }
250 }
251 }
252
253 /// AntiDepEdges - Return in Edges the anti- and output- dependencies
254 /// in SU that we want to consider for breaking.
255 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) {
256 SmallSet<unsigned, 4> RegSet;
257 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
258 P != PE; ++P) {
259 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
260 unsigned Reg = P->getReg();
261 if (RegSet.count(Reg) == 0) {
262 Edges.push_back(&*P);
263 RegSet.insert(Reg);
264 }
265 }
266 }
267 }
268
269 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up
270 /// critical path.
271 static const SUnit *CriticalPathStep(const SUnit *SU) {
272 const SDep *Next = nullptr;
273 unsigned NextDepth = 0;
274 // Find the predecessor edge with the greatest depth.
275 if (SU) {
276 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
277 P != PE; ++P) {
278 const SUnit *PredSU = P->getSUnit();
279 unsigned PredLatency = P->getLatency();
280 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency;
281 // In the case of a latency tie, prefer an anti-dependency edge over
282 // other types of edges.
283 if (NextDepth < PredTotalLatency ||
284 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) {
285 NextDepth = PredTotalLatency;
286 Next = &*P;
287 }
288 }
289 }
290
291 return (Next) ? Next->getSUnit() : nullptr;
292 }
293
294 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx,
295 const char *tag,
296 const char *header,
297 const char *footer) {
298 std::vector<unsigned> &KillIndices = State->GetKillIndices();
299 std::vector<unsigned> &DefIndices = State->GetDefIndices();
300 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
301 RegRefs = State->GetRegRefs();
302
303 if (!State->IsLive(Reg)) {
304 KillIndices[Reg] = KillIdx;
305 DefIndices[Reg] = ~0u;
306 RegRefs.erase(Reg);
307 State->LeaveGroup(Reg);
308 DEBUG(if (header) {
309 dbgs() << header << TRI->getName(Reg); header = nullptr; });
310 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag);
311 }
312 // Repeat for subregisters.
313 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
314 unsigned SubregReg = *SubRegs;
315 if (!State->IsLive(SubregReg)) {
316 KillIndices[SubregReg] = KillIdx;
317 DefIndices[SubregReg] = ~0u;
318 RegRefs.erase(SubregReg);
319 State->LeaveGroup(SubregReg);
320 DEBUG(if (header) {
321 dbgs() << header << TRI->getName(Reg); header = nullptr; });
322 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" <<
323 State->GetGroup(SubregReg) << tag);
324 }
325 }
326
327 DEBUG(if (!header && footer) dbgs() << footer);
328 }
329
330 void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI,
331 unsigned Count,
332 std::set<unsigned>& PassthruRegs) {
333 std::vector<unsigned> &DefIndices = State->GetDefIndices();
334 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
335 RegRefs = State->GetRegRefs();
336
337 // Handle dead defs by simulating a last-use of the register just
338 // after the def. A dead def can occur because the def is truly
339 // dead, or because only a subregister is live at the def. If we
340 // don't do this the dead def will be incorrectly merged into the
341 // previous def.
342 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
343 MachineOperand &MO = MI->getOperand(i);
344 if (!MO.isReg() || !MO.isDef()) continue;
345 unsigned Reg = MO.getReg();
346 if (Reg == 0) continue;
347
348 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n");
349 }
350
351 DEBUG(dbgs() << "\tDef Groups:");
352 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
353 MachineOperand &MO = MI->getOperand(i);
354 if (!MO.isReg() || !MO.isDef()) continue;
355 unsigned Reg = MO.getReg();
356 if (Reg == 0) continue;
357
358 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg));
359
360 // If MI's defs have a special allocation requirement, don't allow
361 // any def registers to be changed. Also assume all registers
362 // defined in a call must not be changed (ABI).
363 if (MI->isCall() || MI->hasExtraDefRegAllocReq() ||
364 TII->isPredicated(MI)) {
365 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
366 State->UnionGroups(Reg, 0);
367 }
368
369 // Any aliased that are live at this point are completely or
370 // partially defined here, so group those aliases with Reg.
371 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) {
372 unsigned AliasReg = *AI;
373 if (State->IsLive(AliasReg)) {
374 State->UnionGroups(Reg, AliasReg);
375 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " <<
376 TRI->getName(AliasReg) << ")");
377 }
378 }
379
380 // Note register reference...
381 const TargetRegisterClass *RC = nullptr;
382 if (i < MI->getDesc().getNumOperands())
383 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
384 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
385 RegRefs.insert(std::make_pair(Reg, RR));
386 }
387
388 DEBUG(dbgs() << '\n');
389
390 // Scan the register defs for this instruction and update
391 // live-ranges.
392 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
393 MachineOperand &MO = MI->getOperand(i);
394 if (!MO.isReg() || !MO.isDef()) continue;
395 unsigned Reg = MO.getReg();
396 if (Reg == 0) continue;
397 // Ignore KILLs and passthru registers for liveness...
398 if (MI->isKill() || (PassthruRegs.count(Reg) != 0))
399 continue;
400
401 // Update def for Reg and aliases.
402 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
403 // We need to be careful here not to define already-live super registers.
404 // If the super register is already live, then this definition is not
405 // a definition of the whole super register (just a partial insertion
406 // into it). Earlier subregister definitions (which we've not yet visited
407 // because we're iterating bottom-up) need to be linked to the same group
408 // as this definition.
409 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI))
410 continue;
411
412 DefIndices[*AI] = Count;
413 }
414 }
415 }
416
417 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI,
418 unsigned Count) {
419 DEBUG(dbgs() << "\tUse Groups:");
420 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
421 RegRefs = State->GetRegRefs();
422
423 // If MI's uses have special allocation requirement, don't allow
424 // any use registers to be changed. Also assume all registers
425 // used in a call must not be changed (ABI).
426 // FIXME: The issue with predicated instruction is more complex. We are being
427 // conservatively here because the kill markers cannot be trusted after
428 // if-conversion:
429 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14]
430 // ...
431 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395]
432 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12]
433 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8)
434 //
435 // The first R6 kill is not really a kill since it's killed by a predicated
436 // instruction which may not be executed. The second R6 def may or may not
437 // re-define R6 so it's not safe to change it since the last R6 use cannot be
438 // changed.
439 bool Special = MI->isCall() ||
440 MI->hasExtraSrcRegAllocReq() ||
441 TII->isPredicated(MI);
442
443 // Scan the register uses for this instruction and update
444 // live-ranges, groups and RegRefs.
445 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
446 MachineOperand &MO = MI->getOperand(i);
447 if (!MO.isReg() || !MO.isUse()) continue;
448 unsigned Reg = MO.getReg();
449 if (Reg == 0) continue;
450
451 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" <<
452 State->GetGroup(Reg));
453
454 // It wasn't previously live but now it is, this is a kill. Forget
455 // the previous live-range information and start a new live-range
456 // for the register.
457 HandleLastUse(Reg, Count, "(last-use)");
458
459 if (Special) {
460 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
461 State->UnionGroups(Reg, 0);
462 }
463
464 // Note register reference...
465 const TargetRegisterClass *RC = nullptr;
466 if (i < MI->getDesc().getNumOperands())
467 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
468 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
469 RegRefs.insert(std::make_pair(Reg, RR));
470 }
471
472 DEBUG(dbgs() << '\n');
473
474 // Form a group of all defs and uses of a KILL instruction to ensure
475 // that all registers are renamed as a group.
476 if (MI->isKill()) {
477 DEBUG(dbgs() << "\tKill Group:");
478
479 unsigned FirstReg = 0;
480 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
481 MachineOperand &MO = MI->getOperand(i);
482 if (!MO.isReg()) continue;
483 unsigned Reg = MO.getReg();
484 if (Reg == 0) continue;
485
486 if (FirstReg != 0) {
487 DEBUG(dbgs() << "=" << TRI->getName(Reg));
488 State->UnionGroups(FirstReg, Reg);
489 } else {
490 DEBUG(dbgs() << " " << TRI->getName(Reg));
491 FirstReg = Reg;
492 }
493 }
494
495 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n');
496 }
497 }
498
499 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) {
500 BitVector BV(TRI->getNumRegs(), false);
501 bool first = true;
502
503 // Check all references that need rewriting for Reg. For each, use
504 // the corresponding register class to narrow the set of registers
505 // that are appropriate for renaming.
506 std::pair<std::multimap<unsigned,
507 AggressiveAntiDepState::RegisterReference>::iterator,
508 std::multimap<unsigned,
509 AggressiveAntiDepState::RegisterReference>::iterator>
510 Range = State->GetRegRefs().equal_range(Reg);
511 for (std::multimap<unsigned,
512 AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first,
513 QE = Range.second; Q != QE; ++Q) {
514 const TargetRegisterClass *RC = Q->second.RC;
515 if (!RC) continue;
516
517 BitVector RCBV = TRI->getAllocatableSet(MF, RC);
518 if (first) {
519 BV |= RCBV;
520 first = false;
521 } else {
522 BV &= RCBV;
523 }
524
525 DEBUG(dbgs() << " " << RC->getName());
526 }
527
528 return BV;
529 }
530
531 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters(
532 unsigned AntiDepGroupIndex,
533 RenameOrderType& RenameOrder,
534 std::map<unsigned, unsigned> &RenameMap) {
535 std::vector<unsigned> &KillIndices = State->GetKillIndices();
536 std::vector<unsigned> &DefIndices = State->GetDefIndices();
537 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
538 RegRefs = State->GetRegRefs();
539
540 // Collect all referenced registers in the same group as
541 // AntiDepReg. These all need to be renamed together if we are to
542 // break the anti-dependence.
543 std::vector<unsigned> Regs;
544 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs);
545 assert(Regs.size() > 0 && "Empty register group!");
546 if (Regs.size() == 0)
547 return false;
548
549 // Find the "superest" register in the group. At the same time,
550 // collect the BitVector of registers that can be used to rename
551 // each register.
552 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex
553 << ":\n");
554 std::map<unsigned, BitVector> RenameRegisterMap;
555 unsigned SuperReg = 0;
556 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
557 unsigned Reg = Regs[i];
558 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg))
559 SuperReg = Reg;
560
561 // If Reg has any references, then collect possible rename regs
562 if (RegRefs.count(Reg) > 0) {
563 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":");
564
565 BitVector BV = GetRenameRegisters(Reg);
566 RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV));
567
568 DEBUG(dbgs() << " ::");
569 DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r))
570 dbgs() << " " << TRI->getName(r));
571 DEBUG(dbgs() << "\n");
572 }
573 }
574
575 // All group registers should be a subreg of SuperReg.
576 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
577 unsigned Reg = Regs[i];
578 if (Reg == SuperReg) continue;
579 bool IsSub = TRI->isSubRegister(SuperReg, Reg);
580 // FIXME: remove this once PR18663 has been properly fixed. For now,
581 // return a conservative answer:
582 // assert(IsSub && "Expecting group subregister");
583 if (!IsSub)
584 return false;
585 }
586
587 #ifndef NDEBUG
588 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod
589 if (DebugDiv > 0) {
590 static int renamecnt = 0;
591 if (renamecnt++ % DebugDiv != DebugMod)
592 return false;
593
594 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) <<
595 " for debug ***\n";
596 }
597 #endif
598
599 // Check each possible rename register for SuperReg in round-robin
600 // order. If that register is available, and the corresponding
601 // registers are available for the other group subregisters, then we
602 // can use those registers to rename.
603
604 // FIXME: Using getMinimalPhysRegClass is very conservative. We should
605 // check every use of the register and find the largest register class
606 // that can be used in all of them.
607 const TargetRegisterClass *SuperRC =
608 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other);
609
610 ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC);
611 if (Order.empty()) {
612 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n");
613 return false;
614 }
615
616 DEBUG(dbgs() << "\tFind Registers:");
617
618 if (RenameOrder.count(SuperRC) == 0)
619 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size()));
620
621 unsigned OrigR = RenameOrder[SuperRC];
622 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR);
623 unsigned R = OrigR;
624 do {
625 if (R == 0) R = Order.size();
626 --R;
627 const unsigned NewSuperReg = Order[R];
628 // Don't consider non-allocatable registers
629 if (!MRI.isAllocatable(NewSuperReg)) continue;
630 // Don't replace a register with itself.
631 if (NewSuperReg == SuperReg) continue;
632
633 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':');
634 RenameMap.clear();
635
636 // For each referenced group register (which must be a SuperReg or
637 // a subregister of SuperReg), find the corresponding subregister
638 // of NewSuperReg and make sure it is free to be renamed.
639 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
640 unsigned Reg = Regs[i];
641 unsigned NewReg = 0;
642 if (Reg == SuperReg) {
643 NewReg = NewSuperReg;
644 } else {
645 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg);
646 if (NewSubRegIdx != 0)
647 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx);
648 }
649
650 DEBUG(dbgs() << " " << TRI->getName(NewReg));
651
652 // Check if Reg can be renamed to NewReg.
653 BitVector BV = RenameRegisterMap[Reg];
654 if (!BV.test(NewReg)) {
655 DEBUG(dbgs() << "(no rename)");
656 goto next_super_reg;
657 }
658
659 // If NewReg is dead and NewReg's most recent def is not before
660 // Regs's kill, it's safe to replace Reg with NewReg. We
661 // must also check all aliases of NewReg, because we can't define a
662 // register when any sub or super is already live.
663 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) {
664 DEBUG(dbgs() << "(live)");
665 goto next_super_reg;
666 } else {
667 bool found = false;
668 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) {
669 unsigned AliasReg = *AI;
670 if (State->IsLive(AliasReg) ||
671 (KillIndices[Reg] > DefIndices[AliasReg])) {
672 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)");
673 found = true;
674 break;
675 }
676 }
677 if (found)
678 goto next_super_reg;
679 }
680
681 // Record that 'Reg' can be renamed to 'NewReg'.
682 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg));
683 }
684
685 // If we fall-out here, then every register in the group can be
686 // renamed, as recorded in RenameMap.
687 RenameOrder.erase(SuperRC);
688 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R));
689 DEBUG(dbgs() << "]\n");
690 return true;
691
692 next_super_reg:
693 DEBUG(dbgs() << ']');
694 } while (R != EndR);
695
696 DEBUG(dbgs() << '\n');
697
698 // No registers are free and available!
699 return false;
700 }
701
702 /// BreakAntiDependencies - Identifiy anti-dependencies within the
703 /// ScheduleDAG and break them by renaming registers.
704 ///
705 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies(
706 const std::vector<SUnit>& SUnits,
707 MachineBasicBlock::iterator Begin,
708 MachineBasicBlock::iterator End,
709 unsigned InsertPosIndex,
710 DbgValueVector &DbgValues) {
711
712 std::vector<unsigned> &KillIndices = State->GetKillIndices();
713 std::vector<unsigned> &DefIndices = State->GetDefIndices();
714 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
715 RegRefs = State->GetRegRefs();
716
717 // The code below assumes that there is at least one instruction,
718 // so just duck out immediately if the block is empty.
719 if (SUnits.empty()) return 0;
720
721 // For each regclass the next register to use for renaming.
722 RenameOrderType RenameOrder;
723
724 // ...need a map from MI to SUnit.
725 std::map<MachineInstr *, const SUnit *> MISUnitMap;
726 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
727 const SUnit *SU = &SUnits[i];
728 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(),
729 SU));
730 }
731
732 // Track progress along the critical path through the SUnit graph as
733 // we walk the instructions. This is needed for regclasses that only
734 // break critical-path anti-dependencies.
735 const SUnit *CriticalPathSU = nullptr;
736 MachineInstr *CriticalPathMI = nullptr;
737 if (CriticalPathSet.any()) {
738 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
739 const SUnit *SU = &SUnits[i];
740 if (!CriticalPathSU ||
741 ((SU->getDepth() + SU->Latency) >
742 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) {
743 CriticalPathSU = SU;
744 }
745 }
746
747 CriticalPathMI = CriticalPathSU->getInstr();
748 }
749
750 #ifndef NDEBUG
751 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n");
752 DEBUG(dbgs() << "Available regs:");
753 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) {
754 if (!State->IsLive(Reg))
755 DEBUG(dbgs() << " " << TRI->getName(Reg));
756 }
757 DEBUG(dbgs() << '\n');
758 #endif
759
760 // Attempt to break anti-dependence edges. Walk the instructions
761 // from the bottom up, tracking information about liveness as we go
762 // to help determine which registers are available.
763 unsigned Broken = 0;
764 unsigned Count = InsertPosIndex - 1;
765 for (MachineBasicBlock::iterator I = End, E = Begin;
766 I != E; --Count) {
767 MachineInstr *MI = --I;
768
769 if (MI->isDebugValue())
770 continue;
771
772 DEBUG(dbgs() << "Anti: ");
773 DEBUG(MI->dump());
774
775 std::set<unsigned> PassthruRegs;
776 GetPassthruRegs(MI, PassthruRegs);
777
778 // Process the defs in MI...
779 PrescanInstruction(MI, Count, PassthruRegs);
780
781 // The dependence edges that represent anti- and output-
782 // dependencies that are candidates for breaking.
783 std::vector<const SDep *> Edges;
784 const SUnit *PathSU = MISUnitMap[MI];
785 AntiDepEdges(PathSU, Edges);
786
787 // If MI is not on the critical path, then we don't rename
788 // registers in the CriticalPathSet.
789 BitVector *ExcludeRegs = nullptr;
790 if (MI == CriticalPathMI) {
791 CriticalPathSU = CriticalPathStep(CriticalPathSU);
792 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : nullptr;
793 } else if (CriticalPathSet.any()) {
794 ExcludeRegs = &CriticalPathSet;
795 }
796
797 // Ignore KILL instructions (they form a group in ScanInstruction
798 // but don't cause any anti-dependence breaking themselves)
799 if (!MI->isKill()) {
800 // Attempt to break each anti-dependency...
801 for (unsigned i = 0, e = Edges.size(); i != e; ++i) {
802 const SDep *Edge = Edges[i];
803 SUnit *NextSU = Edge->getSUnit();
804
805 if ((Edge->getKind() != SDep::Anti) &&
806 (Edge->getKind() != SDep::Output)) continue;
807
808 unsigned AntiDepReg = Edge->getReg();
809 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg));
810 assert(AntiDepReg != 0 && "Anti-dependence on reg0?");
811
812 if (!MRI.isAllocatable(AntiDepReg)) {
813 // Don't break anti-dependencies on non-allocatable registers.
814 DEBUG(dbgs() << " (non-allocatable)\n");
815 continue;
816 } else if (ExcludeRegs && ExcludeRegs->test(AntiDepReg)) {
817 // Don't break anti-dependencies for critical path registers
818 // if not on the critical path
819 DEBUG(dbgs() << " (not critical-path)\n");
820 continue;
821 } else if (PassthruRegs.count(AntiDepReg) != 0) {
822 // If the anti-dep register liveness "passes-thru", then
823 // don't try to change it. It will be changed along with
824 // the use if required to break an earlier antidep.
825 DEBUG(dbgs() << " (passthru)\n");
826 continue;
827 } else {
828 // No anti-dep breaking for implicit deps
829 MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg);
830 assert(AntiDepOp && "Can't find index for defined register operand");
831 if (!AntiDepOp || AntiDepOp->isImplicit()) {
832 DEBUG(dbgs() << " (implicit)\n");
833 continue;
834 }
835
836 // If the SUnit has other dependencies on the SUnit that
837 // it anti-depends on, don't bother breaking the
838 // anti-dependency since those edges would prevent such
839 // units from being scheduled past each other
840 // regardless.
841 //
842 // Also, if there are dependencies on other SUnits with the
843 // same register as the anti-dependency, don't attempt to
844 // break it.
845 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
846 PE = PathSU->Preds.end(); P != PE; ++P) {
847 if (P->getSUnit() == NextSU ?
848 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) :
849 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) {
850 AntiDepReg = 0;
851 break;
852 }
853 }
854 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
855 PE = PathSU->Preds.end(); P != PE; ++P) {
856 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) &&
857 (P->getKind() != SDep::Output)) {
858 DEBUG(dbgs() << " (real dependency)\n");
859 AntiDepReg = 0;
860 break;
861 } else if ((P->getSUnit() != NextSU) &&
862 (P->getKind() == SDep::Data) &&
863 (P->getReg() == AntiDepReg)) {
864 DEBUG(dbgs() << " (other dependency)\n");
865 AntiDepReg = 0;
866 break;
867 }
868 }
869
870 if (AntiDepReg == 0) continue;
871 }
872
873 assert(AntiDepReg != 0);
874 if (AntiDepReg == 0) continue;
875
876 // Determine AntiDepReg's register group.
877 const unsigned GroupIndex = State->GetGroup(AntiDepReg);
878 if (GroupIndex == 0) {
879 DEBUG(dbgs() << " (zero group)\n");
880 continue;
881 }
882
883 DEBUG(dbgs() << '\n');
884
885 // Look for a suitable register to use to break the anti-dependence.
886 std::map<unsigned, unsigned> RenameMap;
887 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) {
888 DEBUG(dbgs() << "\tBreaking anti-dependence edge on "
889 << TRI->getName(AntiDepReg) << ":");
890
891 // Handle each group register...
892 for (std::map<unsigned, unsigned>::iterator
893 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) {
894 unsigned CurrReg = S->first;
895 unsigned NewReg = S->second;
896
897 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" <<
898 TRI->getName(NewReg) << "(" <<
899 RegRefs.count(CurrReg) << " refs)");
900
901 // Update the references to the old register CurrReg to
902 // refer to the new register NewReg.
903 std::pair<std::multimap<unsigned,
904 AggressiveAntiDepState::RegisterReference>::iterator,
905 std::multimap<unsigned,
906 AggressiveAntiDepState::RegisterReference>::iterator>
907 Range = RegRefs.equal_range(CurrReg);
908 for (std::multimap<unsigned,
909 AggressiveAntiDepState::RegisterReference>::iterator
910 Q = Range.first, QE = Range.second; Q != QE; ++Q) {
911 Q->second.Operand->setReg(NewReg);
912 // If the SU for the instruction being updated has debug
913 // information related to the anti-dependency register, make
914 // sure to update that as well.
915 const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()];
916 if (!SU) continue;
917 for (DbgValueVector::iterator DVI = DbgValues.begin(),
918 DVE = DbgValues.end(); DVI != DVE; ++DVI)
919 if (DVI->second == Q->second.Operand->getParent())
920 UpdateDbgValue(DVI->first, AntiDepReg, NewReg);
921 }
922
923 // We just went back in time and modified history; the
924 // liveness information for CurrReg is now inconsistent. Set
925 // the state as if it were dead.
926 State->UnionGroups(NewReg, 0);
927 RegRefs.erase(NewReg);
928 DefIndices[NewReg] = DefIndices[CurrReg];
929 KillIndices[NewReg] = KillIndices[CurrReg];
930
931 State->UnionGroups(CurrReg, 0);
932 RegRefs.erase(CurrReg);
933 DefIndices[CurrReg] = KillIndices[CurrReg];
934 KillIndices[CurrReg] = ~0u;
935 assert(((KillIndices[CurrReg] == ~0u) !=
936 (DefIndices[CurrReg] == ~0u)) &&
937 "Kill and Def maps aren't consistent for AntiDepReg!");
938 }
939
940 ++Broken;
941 DEBUG(dbgs() << '\n');
942 }
943 }
944 }
945
946 ScanInstruction(MI, Count);
947 }
948
949 return Broken;
950 }