977c34843bbdd93f5d7ff21de9fa96071e7ac662
[oota-llvm.git] / include / llvm / CodeGen / PBQP / RegAllocSolver.h
1 //===-- RegAllocSolver.h - Heuristic PBQP Solver for reg alloc --*- C++ -*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // Heuristic PBQP solver for register allocation problems. This solver uses a
11 // graph reduction approach. Nodes of degree 0, 1 and 2 are eliminated with
12 // optimality-preserving rules (see ReductionRules.h). When no low-degree (<3)
13 // nodes are present, a heuristic derived from Brigg's graph coloring approach
14 // is used.
15 //
16 //===----------------------------------------------------------------------===//
17
18 #ifndef LLVM_CODEGEN_PBQP_REGALLOCSOLVER_H
19 #define LLVM_CODEGEN_PBQP_REGALLOCSOLVER_H
20
21 #include "CostAllocator.h"
22 #include "Graph.h"
23 #include "ReductionRules.h"
24 #include "Solution.h"
25 #include "llvm/Support/ErrorHandling.h"
26 #include <limits>
27 #include <vector>
28
29 namespace PBQP {
30
31   namespace RegAlloc {
32
33     /// \brief Metadata to speed allocatability test.
34     ///
35     /// Keeps track of the number of infinities in each row and column.
36     class MatrixMetadata {
37     private:
38       MatrixMetadata(const MatrixMetadata&);
39       void operator=(const MatrixMetadata&);
40     public:
41       MatrixMetadata(const PBQP::Matrix& M)
42         : WorstRow(0), WorstCol(0),
43           UnsafeRows(new bool[M.getRows() - 1]()),
44           UnsafeCols(new bool[M.getCols() - 1]()) {
45
46         unsigned* ColCounts = new unsigned[M.getCols() - 1]();
47
48         for (unsigned i = 1; i < M.getRows(); ++i) {
49           unsigned RowCount = 0;
50           for (unsigned j = 1; j < M.getCols(); ++j) {
51             if (M[i][j] == std::numeric_limits<PBQP::PBQPNum>::infinity()) {
52               ++RowCount;
53               ++ColCounts[j - 1];
54               UnsafeRows[i - 1] = true;
55               UnsafeCols[j - 1] = true;
56             }
57           }
58           WorstRow = std::max(WorstRow, RowCount);
59         }
60         unsigned WorstColCountForCurRow =
61           *std::max_element(ColCounts, ColCounts + M.getCols() - 1);
62         WorstCol = std::max(WorstCol, WorstColCountForCurRow);
63         delete[] ColCounts;
64       }
65
66       ~MatrixMetadata() {
67         delete[] UnsafeRows;
68         delete[] UnsafeCols;
69       }
70
71       unsigned getWorstRow() const { return WorstRow; }
72       unsigned getWorstCol() const { return WorstCol; }
73       const bool* getUnsafeRows() const { return UnsafeRows; }
74       const bool* getUnsafeCols() const { return UnsafeCols; }
75
76     private:
77       unsigned WorstRow, WorstCol;
78       bool* UnsafeRows;
79       bool* UnsafeCols;
80     };
81
82     class NodeMetadata {
83     public:
84       typedef enum { Unprocessed,
85                      OptimallyReducible,
86                      ConservativelyAllocatable,
87                      NotProvablyAllocatable } ReductionState;
88
89       NodeMetadata() : RS(Unprocessed), DeniedOpts(0), OptUnsafeEdges(nullptr){}
90       ~NodeMetadata() { delete[] OptUnsafeEdges; }
91
92       void setup(const Vector& Costs) {
93         NumOpts = Costs.getLength() - 1;
94         OptUnsafeEdges = new unsigned[NumOpts]();
95       }
96
97       ReductionState getReductionState() const { return RS; }
98       void setReductionState(ReductionState RS) { this->RS = RS; }
99
100       void handleAddEdge(const MatrixMetadata& MD, bool Transpose) {
101         DeniedOpts += Transpose ? MD.getWorstCol() : MD.getWorstRow();
102         const bool* UnsafeOpts =
103           Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
104         for (unsigned i = 0; i < NumOpts; ++i)
105           OptUnsafeEdges[i] += UnsafeOpts[i];
106       }
107
108       void handleRemoveEdge(const MatrixMetadata& MD, bool Transpose) {
109         DeniedOpts -= Transpose ? MD.getWorstCol() : MD.getWorstRow();
110         const bool* UnsafeOpts =
111           Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
112         for (unsigned i = 0; i < NumOpts; ++i)
113           OptUnsafeEdges[i] -= UnsafeOpts[i];
114       }
115
116       bool isConservativelyAllocatable() const {
117         return (DeniedOpts < NumOpts) ||
118                (std::find(OptUnsafeEdges, OptUnsafeEdges + NumOpts, 0) !=
119                   OptUnsafeEdges + NumOpts);
120       }
121
122     private:
123       ReductionState RS;
124       unsigned NumOpts;
125       unsigned DeniedOpts;
126       unsigned* OptUnsafeEdges;
127     };
128
129     class RegAllocSolverImpl {
130     private:
131       typedef PBQP::MDMatrix<MatrixMetadata> RAMatrix;
132     public:
133       typedef PBQP::Vector RawVector;
134       typedef PBQP::Matrix RawMatrix;
135       typedef PBQP::Vector Vector;
136       typedef RAMatrix     Matrix;
137       typedef PBQP::PoolCostAllocator<
138                 Vector, PBQP::VectorComparator,
139                 Matrix, PBQP::MatrixComparator> CostAllocator;
140
141       typedef PBQP::GraphBase::NodeId NodeId;
142       typedef PBQP::GraphBase::EdgeId EdgeId;
143
144       typedef RegAlloc::NodeMetadata NodeMetadata;
145
146       struct EdgeMetadata { };
147
148       typedef PBQP::Graph<RegAllocSolverImpl> Graph;
149
150       RegAllocSolverImpl(Graph &G) : G(G) {}
151
152       Solution solve() {
153         G.setSolver(*this);
154         Solution S;
155         setup();
156         S = backpropagate(G, reduce());
157         G.unsetSolver();
158         return S;
159       }
160
161       void handleAddNode(NodeId NId) {
162         G.getNodeMetadata(NId).setup(G.getNodeCosts(NId));
163       }
164       void handleRemoveNode(NodeId NId) {}
165       void handleSetNodeCosts(NodeId NId, const Vector& newCosts) {}
166
167       void handleAddEdge(EdgeId EId) {
168         handleReconnectEdge(EId, G.getEdgeNode1Id(EId));
169         handleReconnectEdge(EId, G.getEdgeNode2Id(EId));
170       }
171
172       void handleRemoveEdge(EdgeId EId) {
173         handleDisconnectEdge(EId, G.getEdgeNode1Id(EId));
174         handleDisconnectEdge(EId, G.getEdgeNode2Id(EId));
175       }
176
177       void handleDisconnectEdge(EdgeId EId, NodeId NId) {
178         NodeMetadata& NMd = G.getNodeMetadata(NId);
179         const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
180         NMd.handleRemoveEdge(MMd, NId == G.getEdgeNode2Id(EId));
181         if (G.getNodeDegree(NId) == 3) {
182           // This node is becoming optimally reducible.
183           moveToOptimallyReducibleNodes(NId);
184         } else if (NMd.getReductionState() ==
185                      NodeMetadata::NotProvablyAllocatable &&
186                    NMd.isConservativelyAllocatable()) {
187           // This node just became conservatively allocatable.
188           moveToConservativelyAllocatableNodes(NId);
189         }
190       }
191
192       void handleReconnectEdge(EdgeId EId, NodeId NId) {
193         NodeMetadata& NMd = G.getNodeMetadata(NId);
194         const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
195         NMd.handleAddEdge(MMd, NId == G.getEdgeNode2Id(EId));
196       }
197
198       void handleSetEdgeCosts(EdgeId EId, const Matrix& NewCosts) {
199         handleRemoveEdge(EId);
200
201         NodeId N1Id = G.getEdgeNode1Id(EId);
202         NodeId N2Id = G.getEdgeNode2Id(EId);
203         NodeMetadata& N1Md = G.getNodeMetadata(N1Id);
204         NodeMetadata& N2Md = G.getNodeMetadata(N2Id);
205         const MatrixMetadata& MMd = NewCosts.getMetadata();
206         N1Md.handleAddEdge(MMd, N1Id != G.getEdgeNode1Id(EId));
207         N2Md.handleAddEdge(MMd, N2Id != G.getEdgeNode1Id(EId));
208       }
209
210     private:
211
212       void removeFromCurrentSet(NodeId NId) {
213         switch (G.getNodeMetadata(NId).getReductionState()) {
214           case NodeMetadata::Unprocessed: break;
215           case NodeMetadata::OptimallyReducible:
216             assert(OptimallyReducibleNodes.find(NId) !=
217                      OptimallyReducibleNodes.end() &&
218                    "Node not in optimally reducible set.");
219             OptimallyReducibleNodes.erase(NId);
220             break;
221           case NodeMetadata::ConservativelyAllocatable:
222             assert(ConservativelyAllocatableNodes.find(NId) !=
223                      ConservativelyAllocatableNodes.end() &&
224                    "Node not in conservatively allocatable set.");
225             ConservativelyAllocatableNodes.erase(NId);
226             break;
227           case NodeMetadata::NotProvablyAllocatable:
228             assert(NotProvablyAllocatableNodes.find(NId) !=
229                      NotProvablyAllocatableNodes.end() &&
230                    "Node not in not-provably-allocatable set.");
231             NotProvablyAllocatableNodes.erase(NId);
232             break;
233         }
234       }
235
236       void moveToOptimallyReducibleNodes(NodeId NId) {
237         removeFromCurrentSet(NId);
238         OptimallyReducibleNodes.insert(NId);
239         G.getNodeMetadata(NId).setReductionState(
240           NodeMetadata::OptimallyReducible);
241       }
242
243       void moveToConservativelyAllocatableNodes(NodeId NId) {
244         removeFromCurrentSet(NId);
245         ConservativelyAllocatableNodes.insert(NId);
246         G.getNodeMetadata(NId).setReductionState(
247           NodeMetadata::ConservativelyAllocatable);
248       }
249
250       void moveToNotProvablyAllocatableNodes(NodeId NId) {
251         removeFromCurrentSet(NId);
252         NotProvablyAllocatableNodes.insert(NId);
253         G.getNodeMetadata(NId).setReductionState(
254           NodeMetadata::NotProvablyAllocatable);
255       }
256
257       void setup() {
258         // Set up worklists.
259         for (auto NId : G.nodeIds()) {
260           if (G.getNodeDegree(NId) < 3)
261             moveToOptimallyReducibleNodes(NId);
262           else if (G.getNodeMetadata(NId).isConservativelyAllocatable())
263             moveToConservativelyAllocatableNodes(NId);
264           else
265             moveToNotProvablyAllocatableNodes(NId);
266         }
267       }
268
269       // Compute a reduction order for the graph by iteratively applying PBQP
270       // reduction rules. Locally optimal rules are applied whenever possible (R0,
271       // R1, R2). If no locally-optimal rules apply then any conservatively
272       // allocatable node is reduced. Finally, if no conservatively allocatable
273       // node exists then the node with the lowest spill-cost:degree ratio is
274       // selected.
275       std::vector<GraphBase::NodeId> reduce() {
276         assert(!G.empty() && "Cannot reduce empty graph.");
277
278         typedef GraphBase::NodeId NodeId;
279         std::vector<NodeId> NodeStack;
280
281         // Consume worklists.
282         while (true) {
283           if (!OptimallyReducibleNodes.empty()) {
284             NodeSet::iterator NItr = OptimallyReducibleNodes.begin();
285             NodeId NId = *NItr;
286             OptimallyReducibleNodes.erase(NItr);
287             NodeStack.push_back(NId);
288             switch (G.getNodeDegree(NId)) {
289               case 0:
290                 break;
291               case 1:
292                 applyR1(G, NId);
293                 break;
294               case 2:
295                 applyR2(G, NId);
296                 break;
297               default: llvm_unreachable("Not an optimally reducible node.");
298             }
299           } else if (!ConservativelyAllocatableNodes.empty()) {
300             // Conservatively allocatable nodes will never spill. For now just
301             // take the first node in the set and push it on the stack. When we
302             // start optimizing more heavily for register preferencing, it may
303             // would be better to push nodes with lower 'expected' or worst-case
304             // register costs first (since early nodes are the most
305             // constrained).
306             NodeSet::iterator NItr = ConservativelyAllocatableNodes.begin();
307             NodeId NId = *NItr;
308             ConservativelyAllocatableNodes.erase(NItr);
309             NodeStack.push_back(NId);
310             G.disconnectAllNeighborsFromNode(NId);
311
312           } else if (!NotProvablyAllocatableNodes.empty()) {
313             NodeSet::iterator NItr =
314               std::min_element(NotProvablyAllocatableNodes.begin(),
315                                NotProvablyAllocatableNodes.end(),
316                                SpillCostComparator(G));
317             NodeId NId = *NItr;
318             NotProvablyAllocatableNodes.erase(NItr);
319             NodeStack.push_back(NId);
320             G.disconnectAllNeighborsFromNode(NId);
321           } else
322             break;
323         }
324
325         return NodeStack;
326       }
327
328       class SpillCostComparator {
329       public:
330         SpillCostComparator(const Graph& G) : G(G) {}
331         bool operator()(NodeId N1Id, NodeId N2Id) {
332           PBQPNum N1SC = G.getNodeCosts(N1Id)[0] / G.getNodeDegree(N1Id);
333           PBQPNum N2SC = G.getNodeCosts(N2Id)[0] / G.getNodeDegree(N2Id);
334           return N1SC < N2SC;
335         }
336       private:
337         const Graph& G;
338       };
339
340       Graph& G;
341       typedef std::set<NodeId> NodeSet;
342       NodeSet OptimallyReducibleNodes;
343       NodeSet ConservativelyAllocatableNodes;
344       NodeSet NotProvablyAllocatableNodes;
345     };
346
347     typedef Graph<RegAllocSolverImpl> Graph;
348
349     inline Solution solve(Graph& G) {
350       if (G.empty())
351         return Solution();
352       RegAllocSolverImpl RegAllocSolver(G);
353       return RegAllocSolver.solve();
354     }
355
356   }
357 }
358
359 #endif // LLVM_CODEGEN_PBQP_REGALLOCSOLVER_H