]> git.proxmox.com Git - rustc.git/blame - src/llvm/lib/CodeGen/InterferenceCache.cpp
Imported Upstream version 1.0.0+dfsg1
[rustc.git] / src / llvm / lib / CodeGen / InterferenceCache.cpp
CommitLineData
223e47cc
LB
1//===-- InterferenceCache.cpp - Caching per-block interference ---------*--===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// InterferenceCache remembers per-block interference in LiveIntervalUnions.
11//
12//===----------------------------------------------------------------------===//
13
223e47cc 14#include "InterferenceCache.h"
223e47cc 15#include "llvm/CodeGen/LiveIntervalAnalysis.h"
970d7e83
LB
16#include "llvm/Support/ErrorHandling.h"
17#include "llvm/Target/TargetRegisterInfo.h"
223e47cc
LB
18
19using namespace llvm;
20
1a4d82fc
JJ
21#define DEBUG_TYPE "regalloc"
22
223e47cc
LB
23// Static member used for null interference cursors.
24InterferenceCache::BlockInterference InterferenceCache::Cursor::NoInterference;
25
1a4d82fc
JJ
26// Initializes PhysRegEntries (instead of a SmallVector, PhysRegEntries is a
27// buffer of size NumPhysRegs to speed up alloc/clear for targets with large
28// reg files). Calloced memory is used for good form, and quites tools like
29// Valgrind too, but zero initialized memory is not required by the algorithm:
30// this is because PhysRegEntries works like a SparseSet and its entries are
31// only valid when there is a corresponding CacheEntries assignment. There is
32// also support for when pass managers are reused for targets with different
33// numbers of PhysRegs: in this case PhysRegEntries is freed and reinitialized.
34void InterferenceCache::reinitPhysRegEntries() {
35 if (PhysRegEntriesCount == TRI->getNumRegs()) return;
36 free(PhysRegEntries);
37 PhysRegEntriesCount = TRI->getNumRegs();
38 PhysRegEntries = (unsigned char*)
39 calloc(PhysRegEntriesCount, sizeof(unsigned char));
40}
41
223e47cc
LB
42void InterferenceCache::init(MachineFunction *mf,
43 LiveIntervalUnion *liuarray,
44 SlotIndexes *indexes,
45 LiveIntervals *lis,
46 const TargetRegisterInfo *tri) {
47 MF = mf;
48 LIUArray = liuarray;
49 TRI = tri;
1a4d82fc 50 reinitPhysRegEntries();
223e47cc
LB
51 for (unsigned i = 0; i != CacheEntries; ++i)
52 Entries[i].clear(mf, indexes, lis);
53}
54
55InterferenceCache::Entry *InterferenceCache::get(unsigned PhysReg) {
56 unsigned E = PhysRegEntries[PhysReg];
57 if (E < CacheEntries && Entries[E].getPhysReg() == PhysReg) {
58 if (!Entries[E].valid(LIUArray, TRI))
59 Entries[E].revalidate(LIUArray, TRI);
60 return &Entries[E];
61 }
62 // No valid entry exists, pick the next round-robin entry.
63 E = RoundRobin;
64 if (++RoundRobin == CacheEntries)
65 RoundRobin = 0;
66 for (unsigned i = 0; i != CacheEntries; ++i) {
67 // Skip entries that are in use.
68 if (Entries[E].hasRefs()) {
69 if (++E == CacheEntries)
70 E = 0;
71 continue;
72 }
73 Entries[E].reset(PhysReg, LIUArray, TRI, MF);
74 PhysRegEntries[PhysReg] = E;
75 return &Entries[E];
76 }
77 llvm_unreachable("Ran out of interference cache entries.");
78}
79
80/// revalidate - LIU contents have changed, update tags.
81void InterferenceCache::Entry::revalidate(LiveIntervalUnion *LIUArray,
82 const TargetRegisterInfo *TRI) {
83 // Invalidate all block entries.
84 ++Tag;
85 // Invalidate all iterators.
86 PrevPos = SlotIndex();
87 unsigned i = 0;
88 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i)
89 RegUnits[i].VirtTag = LIUArray[*Units].getTag();
90}
91
92void InterferenceCache::Entry::reset(unsigned physReg,
93 LiveIntervalUnion *LIUArray,
94 const TargetRegisterInfo *TRI,
95 const MachineFunction *MF) {
96 assert(!hasRefs() && "Cannot reset cache entry with references");
97 // LIU's changed, invalidate cache.
98 ++Tag;
99 PhysReg = physReg;
100 Blocks.resize(MF->getNumBlockIDs());
101
102 // Reset iterators.
103 PrevPos = SlotIndex();
104 RegUnits.clear();
105 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) {
106 RegUnits.push_back(LIUArray[*Units]);
107 RegUnits.back().Fixed = &LIS->getRegUnit(*Units);
108 }
109}
110
111bool InterferenceCache::Entry::valid(LiveIntervalUnion *LIUArray,
112 const TargetRegisterInfo *TRI) {
113 unsigned i = 0, e = RegUnits.size();
114 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i) {
115 if (i == e)
116 return false;
117 if (LIUArray[*Units].changedSince(RegUnits[i].VirtTag))
118 return false;
119 }
120 return i == e;
121}
122
123void InterferenceCache::Entry::update(unsigned MBBNum) {
124 SlotIndex Start, Stop;
1a4d82fc 125 std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum);
223e47cc
LB
126
127 // Use advanceTo only when possible.
128 if (PrevPos != Start) {
129 if (!PrevPos.isValid() || Start < PrevPos) {
130 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
131 RegUnitInfo &RUI = RegUnits[i];
132 RUI.VirtI.find(Start);
133 RUI.FixedI = RUI.Fixed->find(Start);
134 }
135 } else {
136 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
137 RegUnitInfo &RUI = RegUnits[i];
138 RUI.VirtI.advanceTo(Start);
139 if (RUI.FixedI != RUI.Fixed->end())
140 RUI.FixedI = RUI.Fixed->advanceTo(RUI.FixedI, Start);
141 }
142 }
143 PrevPos = Start;
144 }
145
146 MachineFunction::const_iterator MFI = MF->getBlockNumbered(MBBNum);
147 BlockInterference *BI = &Blocks[MBBNum];
148 ArrayRef<SlotIndex> RegMaskSlots;
149 ArrayRef<const uint32_t*> RegMaskBits;
150 for (;;) {
151 BI->Tag = Tag;
152 BI->First = BI->Last = SlotIndex();
153
154 // Check for first interference from virtregs.
155 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
156 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI;
157 if (!I.valid())
158 continue;
159 SlotIndex StartI = I.start();
160 if (StartI >= Stop)
161 continue;
162 if (!BI->First.isValid() || StartI < BI->First)
163 BI->First = StartI;
164 }
165
166 // Same thing for fixed interference.
167 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
168 LiveInterval::const_iterator I = RegUnits[i].FixedI;
169 LiveInterval::const_iterator E = RegUnits[i].Fixed->end();
170 if (I == E)
171 continue;
172 SlotIndex StartI = I->start;
173 if (StartI >= Stop)
174 continue;
175 if (!BI->First.isValid() || StartI < BI->First)
176 BI->First = StartI;
177 }
178
179 // Also check for register mask interference.
180 RegMaskSlots = LIS->getRegMaskSlotsInBlock(MBBNum);
181 RegMaskBits = LIS->getRegMaskBitsInBlock(MBBNum);
182 SlotIndex Limit = BI->First.isValid() ? BI->First : Stop;
183 for (unsigned i = 0, e = RegMaskSlots.size();
184 i != e && RegMaskSlots[i] < Limit; ++i)
185 if (MachineOperand::clobbersPhysReg(RegMaskBits[i], PhysReg)) {
186 // Register mask i clobbers PhysReg before the LIU interference.
187 BI->First = RegMaskSlots[i];
188 break;
189 }
190
191 PrevPos = Stop;
192 if (BI->First.isValid())
193 break;
194
195 // No interference in this block? Go ahead and precompute the next block.
196 if (++MFI == MF->end())
197 return;
198 MBBNum = MFI->getNumber();
199 BI = &Blocks[MBBNum];
200 if (BI->Tag == Tag)
201 return;
1a4d82fc 202 std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum);
223e47cc
LB
203 }
204
205 // Check for last interference in block.
206 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
207 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI;
208 if (!I.valid() || I.start() >= Stop)
209 continue;
210 I.advanceTo(Stop);
211 bool Backup = !I.valid() || I.start() >= Stop;
212 if (Backup)
213 --I;
214 SlotIndex StopI = I.stop();
215 if (!BI->Last.isValid() || StopI > BI->Last)
216 BI->Last = StopI;
217 if (Backup)
218 ++I;
219 }
220
221 // Fixed interference.
222 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
223 LiveInterval::iterator &I = RegUnits[i].FixedI;
1a4d82fc
JJ
224 LiveRange *LR = RegUnits[i].Fixed;
225 if (I == LR->end() || I->start >= Stop)
223e47cc 226 continue;
1a4d82fc
JJ
227 I = LR->advanceTo(I, Stop);
228 bool Backup = I == LR->end() || I->start >= Stop;
223e47cc
LB
229 if (Backup)
230 --I;
231 SlotIndex StopI = I->end;
232 if (!BI->Last.isValid() || StopI > BI->Last)
233 BI->Last = StopI;
234 if (Backup)
235 ++I;
236 }
237
238 // Also check for register mask interference.
239 SlotIndex Limit = BI->Last.isValid() ? BI->Last : Start;
240 for (unsigned i = RegMaskSlots.size();
241 i && RegMaskSlots[i-1].getDeadSlot() > Limit; --i)
242 if (MachineOperand::clobbersPhysReg(RegMaskBits[i-1], PhysReg)) {
243 // Register mask i-1 clobbers PhysReg after the LIU interference.
244 // Model the regmask clobber as a dead def.
245 BI->Last = RegMaskSlots[i-1].getDeadSlot();
246 break;
247 }
248}