regmap: rbtree: Fixed node range check on sync
[firefly-linux-kernel-4.4.55.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36
37 #define EVERGREEN_PFP_UCODE_SIZE 1120
38 #define EVERGREEN_PM4_UCODE_SIZE 1376
39
40 static const u32 crtc_offsets[6] =
41 {
42         EVERGREEN_CRTC0_REGISTER_OFFSET,
43         EVERGREEN_CRTC1_REGISTER_OFFSET,
44         EVERGREEN_CRTC2_REGISTER_OFFSET,
45         EVERGREEN_CRTC3_REGISTER_OFFSET,
46         EVERGREEN_CRTC4_REGISTER_OFFSET,
47         EVERGREEN_CRTC5_REGISTER_OFFSET
48 };
49
50 static void evergreen_gpu_init(struct radeon_device *rdev);
51 void evergreen_fini(struct radeon_device *rdev);
52 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
53 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
54                                      int ring, u32 cp_int_cntl);
55
56 static const u32 evergreen_golden_registers[] =
57 {
58         0x3f90, 0xffff0000, 0xff000000,
59         0x9148, 0xffff0000, 0xff000000,
60         0x3f94, 0xffff0000, 0xff000000,
61         0x914c, 0xffff0000, 0xff000000,
62         0x9b7c, 0xffffffff, 0x00000000,
63         0x8a14, 0xffffffff, 0x00000007,
64         0x8b10, 0xffffffff, 0x00000000,
65         0x960c, 0xffffffff, 0x54763210,
66         0x88c4, 0xffffffff, 0x000000c2,
67         0x88d4, 0xffffffff, 0x00000010,
68         0x8974, 0xffffffff, 0x00000000,
69         0xc78, 0x00000080, 0x00000080,
70         0x5eb4, 0xffffffff, 0x00000002,
71         0x5e78, 0xffffffff, 0x001000f0,
72         0x6104, 0x01000300, 0x00000000,
73         0x5bc0, 0x00300000, 0x00000000,
74         0x7030, 0xffffffff, 0x00000011,
75         0x7c30, 0xffffffff, 0x00000011,
76         0x10830, 0xffffffff, 0x00000011,
77         0x11430, 0xffffffff, 0x00000011,
78         0x12030, 0xffffffff, 0x00000011,
79         0x12c30, 0xffffffff, 0x00000011,
80         0xd02c, 0xffffffff, 0x08421000,
81         0x240c, 0xffffffff, 0x00000380,
82         0x8b24, 0xffffffff, 0x00ff0fff,
83         0x28a4c, 0x06000000, 0x06000000,
84         0x10c, 0x00000001, 0x00000001,
85         0x8d00, 0xffffffff, 0x100e4848,
86         0x8d04, 0xffffffff, 0x00164745,
87         0x8c00, 0xffffffff, 0xe4000003,
88         0x8c04, 0xffffffff, 0x40600060,
89         0x8c08, 0xffffffff, 0x001c001c,
90         0x8cf0, 0xffffffff, 0x08e00620,
91         0x8c20, 0xffffffff, 0x00800080,
92         0x8c24, 0xffffffff, 0x00800080,
93         0x8c18, 0xffffffff, 0x20202078,
94         0x8c1c, 0xffffffff, 0x00001010,
95         0x28350, 0xffffffff, 0x00000000,
96         0xa008, 0xffffffff, 0x00010000,
97         0x5cc, 0xffffffff, 0x00000001,
98         0x9508, 0xffffffff, 0x00000002,
99         0x913c, 0x0000000f, 0x0000000a
100 };
101
102 static const u32 evergreen_golden_registers2[] =
103 {
104         0x2f4c, 0xffffffff, 0x00000000,
105         0x54f4, 0xffffffff, 0x00000000,
106         0x54f0, 0xffffffff, 0x00000000,
107         0x5498, 0xffffffff, 0x00000000,
108         0x549c, 0xffffffff, 0x00000000,
109         0x5494, 0xffffffff, 0x00000000,
110         0x53cc, 0xffffffff, 0x00000000,
111         0x53c8, 0xffffffff, 0x00000000,
112         0x53c4, 0xffffffff, 0x00000000,
113         0x53c0, 0xffffffff, 0x00000000,
114         0x53bc, 0xffffffff, 0x00000000,
115         0x53b8, 0xffffffff, 0x00000000,
116         0x53b4, 0xffffffff, 0x00000000,
117         0x53b0, 0xffffffff, 0x00000000
118 };
119
120 static const u32 cypress_mgcg_init[] =
121 {
122         0x802c, 0xffffffff, 0xc0000000,
123         0x5448, 0xffffffff, 0x00000100,
124         0x55e4, 0xffffffff, 0x00000100,
125         0x160c, 0xffffffff, 0x00000100,
126         0x5644, 0xffffffff, 0x00000100,
127         0xc164, 0xffffffff, 0x00000100,
128         0x8a18, 0xffffffff, 0x00000100,
129         0x897c, 0xffffffff, 0x06000100,
130         0x8b28, 0xffffffff, 0x00000100,
131         0x9144, 0xffffffff, 0x00000100,
132         0x9a60, 0xffffffff, 0x00000100,
133         0x9868, 0xffffffff, 0x00000100,
134         0x8d58, 0xffffffff, 0x00000100,
135         0x9510, 0xffffffff, 0x00000100,
136         0x949c, 0xffffffff, 0x00000100,
137         0x9654, 0xffffffff, 0x00000100,
138         0x9030, 0xffffffff, 0x00000100,
139         0x9034, 0xffffffff, 0x00000100,
140         0x9038, 0xffffffff, 0x00000100,
141         0x903c, 0xffffffff, 0x00000100,
142         0x9040, 0xffffffff, 0x00000100,
143         0xa200, 0xffffffff, 0x00000100,
144         0xa204, 0xffffffff, 0x00000100,
145         0xa208, 0xffffffff, 0x00000100,
146         0xa20c, 0xffffffff, 0x00000100,
147         0x971c, 0xffffffff, 0x00000100,
148         0x977c, 0xffffffff, 0x00000100,
149         0x3f80, 0xffffffff, 0x00000100,
150         0xa210, 0xffffffff, 0x00000100,
151         0xa214, 0xffffffff, 0x00000100,
152         0x4d8, 0xffffffff, 0x00000100,
153         0x9784, 0xffffffff, 0x00000100,
154         0x9698, 0xffffffff, 0x00000100,
155         0x4d4, 0xffffffff, 0x00000200,
156         0x30cc, 0xffffffff, 0x00000100,
157         0xd0c0, 0xffffffff, 0xff000100,
158         0x802c, 0xffffffff, 0x40000000,
159         0x915c, 0xffffffff, 0x00010000,
160         0x9160, 0xffffffff, 0x00030002,
161         0x9178, 0xffffffff, 0x00070000,
162         0x917c, 0xffffffff, 0x00030002,
163         0x9180, 0xffffffff, 0x00050004,
164         0x918c, 0xffffffff, 0x00010006,
165         0x9190, 0xffffffff, 0x00090008,
166         0x9194, 0xffffffff, 0x00070000,
167         0x9198, 0xffffffff, 0x00030002,
168         0x919c, 0xffffffff, 0x00050004,
169         0x91a8, 0xffffffff, 0x00010006,
170         0x91ac, 0xffffffff, 0x00090008,
171         0x91b0, 0xffffffff, 0x00070000,
172         0x91b4, 0xffffffff, 0x00030002,
173         0x91b8, 0xffffffff, 0x00050004,
174         0x91c4, 0xffffffff, 0x00010006,
175         0x91c8, 0xffffffff, 0x00090008,
176         0x91cc, 0xffffffff, 0x00070000,
177         0x91d0, 0xffffffff, 0x00030002,
178         0x91d4, 0xffffffff, 0x00050004,
179         0x91e0, 0xffffffff, 0x00010006,
180         0x91e4, 0xffffffff, 0x00090008,
181         0x91e8, 0xffffffff, 0x00000000,
182         0x91ec, 0xffffffff, 0x00070000,
183         0x91f0, 0xffffffff, 0x00030002,
184         0x91f4, 0xffffffff, 0x00050004,
185         0x9200, 0xffffffff, 0x00010006,
186         0x9204, 0xffffffff, 0x00090008,
187         0x9208, 0xffffffff, 0x00070000,
188         0x920c, 0xffffffff, 0x00030002,
189         0x9210, 0xffffffff, 0x00050004,
190         0x921c, 0xffffffff, 0x00010006,
191         0x9220, 0xffffffff, 0x00090008,
192         0x9224, 0xffffffff, 0x00070000,
193         0x9228, 0xffffffff, 0x00030002,
194         0x922c, 0xffffffff, 0x00050004,
195         0x9238, 0xffffffff, 0x00010006,
196         0x923c, 0xffffffff, 0x00090008,
197         0x9240, 0xffffffff, 0x00070000,
198         0x9244, 0xffffffff, 0x00030002,
199         0x9248, 0xffffffff, 0x00050004,
200         0x9254, 0xffffffff, 0x00010006,
201         0x9258, 0xffffffff, 0x00090008,
202         0x925c, 0xffffffff, 0x00070000,
203         0x9260, 0xffffffff, 0x00030002,
204         0x9264, 0xffffffff, 0x00050004,
205         0x9270, 0xffffffff, 0x00010006,
206         0x9274, 0xffffffff, 0x00090008,
207         0x9278, 0xffffffff, 0x00070000,
208         0x927c, 0xffffffff, 0x00030002,
209         0x9280, 0xffffffff, 0x00050004,
210         0x928c, 0xffffffff, 0x00010006,
211         0x9290, 0xffffffff, 0x00090008,
212         0x9294, 0xffffffff, 0x00000000,
213         0x929c, 0xffffffff, 0x00000001,
214         0x802c, 0xffffffff, 0x40010000,
215         0x915c, 0xffffffff, 0x00010000,
216         0x9160, 0xffffffff, 0x00030002,
217         0x9178, 0xffffffff, 0x00070000,
218         0x917c, 0xffffffff, 0x00030002,
219         0x9180, 0xffffffff, 0x00050004,
220         0x918c, 0xffffffff, 0x00010006,
221         0x9190, 0xffffffff, 0x00090008,
222         0x9194, 0xffffffff, 0x00070000,
223         0x9198, 0xffffffff, 0x00030002,
224         0x919c, 0xffffffff, 0x00050004,
225         0x91a8, 0xffffffff, 0x00010006,
226         0x91ac, 0xffffffff, 0x00090008,
227         0x91b0, 0xffffffff, 0x00070000,
228         0x91b4, 0xffffffff, 0x00030002,
229         0x91b8, 0xffffffff, 0x00050004,
230         0x91c4, 0xffffffff, 0x00010006,
231         0x91c8, 0xffffffff, 0x00090008,
232         0x91cc, 0xffffffff, 0x00070000,
233         0x91d0, 0xffffffff, 0x00030002,
234         0x91d4, 0xffffffff, 0x00050004,
235         0x91e0, 0xffffffff, 0x00010006,
236         0x91e4, 0xffffffff, 0x00090008,
237         0x91e8, 0xffffffff, 0x00000000,
238         0x91ec, 0xffffffff, 0x00070000,
239         0x91f0, 0xffffffff, 0x00030002,
240         0x91f4, 0xffffffff, 0x00050004,
241         0x9200, 0xffffffff, 0x00010006,
242         0x9204, 0xffffffff, 0x00090008,
243         0x9208, 0xffffffff, 0x00070000,
244         0x920c, 0xffffffff, 0x00030002,
245         0x9210, 0xffffffff, 0x00050004,
246         0x921c, 0xffffffff, 0x00010006,
247         0x9220, 0xffffffff, 0x00090008,
248         0x9224, 0xffffffff, 0x00070000,
249         0x9228, 0xffffffff, 0x00030002,
250         0x922c, 0xffffffff, 0x00050004,
251         0x9238, 0xffffffff, 0x00010006,
252         0x923c, 0xffffffff, 0x00090008,
253         0x9240, 0xffffffff, 0x00070000,
254         0x9244, 0xffffffff, 0x00030002,
255         0x9248, 0xffffffff, 0x00050004,
256         0x9254, 0xffffffff, 0x00010006,
257         0x9258, 0xffffffff, 0x00090008,
258         0x925c, 0xffffffff, 0x00070000,
259         0x9260, 0xffffffff, 0x00030002,
260         0x9264, 0xffffffff, 0x00050004,
261         0x9270, 0xffffffff, 0x00010006,
262         0x9274, 0xffffffff, 0x00090008,
263         0x9278, 0xffffffff, 0x00070000,
264         0x927c, 0xffffffff, 0x00030002,
265         0x9280, 0xffffffff, 0x00050004,
266         0x928c, 0xffffffff, 0x00010006,
267         0x9290, 0xffffffff, 0x00090008,
268         0x9294, 0xffffffff, 0x00000000,
269         0x929c, 0xffffffff, 0x00000001,
270         0x802c, 0xffffffff, 0xc0000000
271 };
272
273 static const u32 redwood_mgcg_init[] =
274 {
275         0x802c, 0xffffffff, 0xc0000000,
276         0x5448, 0xffffffff, 0x00000100,
277         0x55e4, 0xffffffff, 0x00000100,
278         0x160c, 0xffffffff, 0x00000100,
279         0x5644, 0xffffffff, 0x00000100,
280         0xc164, 0xffffffff, 0x00000100,
281         0x8a18, 0xffffffff, 0x00000100,
282         0x897c, 0xffffffff, 0x06000100,
283         0x8b28, 0xffffffff, 0x00000100,
284         0x9144, 0xffffffff, 0x00000100,
285         0x9a60, 0xffffffff, 0x00000100,
286         0x9868, 0xffffffff, 0x00000100,
287         0x8d58, 0xffffffff, 0x00000100,
288         0x9510, 0xffffffff, 0x00000100,
289         0x949c, 0xffffffff, 0x00000100,
290         0x9654, 0xffffffff, 0x00000100,
291         0x9030, 0xffffffff, 0x00000100,
292         0x9034, 0xffffffff, 0x00000100,
293         0x9038, 0xffffffff, 0x00000100,
294         0x903c, 0xffffffff, 0x00000100,
295         0x9040, 0xffffffff, 0x00000100,
296         0xa200, 0xffffffff, 0x00000100,
297         0xa204, 0xffffffff, 0x00000100,
298         0xa208, 0xffffffff, 0x00000100,
299         0xa20c, 0xffffffff, 0x00000100,
300         0x971c, 0xffffffff, 0x00000100,
301         0x977c, 0xffffffff, 0x00000100,
302         0x3f80, 0xffffffff, 0x00000100,
303         0xa210, 0xffffffff, 0x00000100,
304         0xa214, 0xffffffff, 0x00000100,
305         0x4d8, 0xffffffff, 0x00000100,
306         0x9784, 0xffffffff, 0x00000100,
307         0x9698, 0xffffffff, 0x00000100,
308         0x4d4, 0xffffffff, 0x00000200,
309         0x30cc, 0xffffffff, 0x00000100,
310         0xd0c0, 0xffffffff, 0xff000100,
311         0x802c, 0xffffffff, 0x40000000,
312         0x915c, 0xffffffff, 0x00010000,
313         0x9160, 0xffffffff, 0x00030002,
314         0x9178, 0xffffffff, 0x00070000,
315         0x917c, 0xffffffff, 0x00030002,
316         0x9180, 0xffffffff, 0x00050004,
317         0x918c, 0xffffffff, 0x00010006,
318         0x9190, 0xffffffff, 0x00090008,
319         0x9194, 0xffffffff, 0x00070000,
320         0x9198, 0xffffffff, 0x00030002,
321         0x919c, 0xffffffff, 0x00050004,
322         0x91a8, 0xffffffff, 0x00010006,
323         0x91ac, 0xffffffff, 0x00090008,
324         0x91b0, 0xffffffff, 0x00070000,
325         0x91b4, 0xffffffff, 0x00030002,
326         0x91b8, 0xffffffff, 0x00050004,
327         0x91c4, 0xffffffff, 0x00010006,
328         0x91c8, 0xffffffff, 0x00090008,
329         0x91cc, 0xffffffff, 0x00070000,
330         0x91d0, 0xffffffff, 0x00030002,
331         0x91d4, 0xffffffff, 0x00050004,
332         0x91e0, 0xffffffff, 0x00010006,
333         0x91e4, 0xffffffff, 0x00090008,
334         0x91e8, 0xffffffff, 0x00000000,
335         0x91ec, 0xffffffff, 0x00070000,
336         0x91f0, 0xffffffff, 0x00030002,
337         0x91f4, 0xffffffff, 0x00050004,
338         0x9200, 0xffffffff, 0x00010006,
339         0x9204, 0xffffffff, 0x00090008,
340         0x9294, 0xffffffff, 0x00000000,
341         0x929c, 0xffffffff, 0x00000001,
342         0x802c, 0xffffffff, 0xc0000000
343 };
344
345 static const u32 cedar_golden_registers[] =
346 {
347         0x3f90, 0xffff0000, 0xff000000,
348         0x9148, 0xffff0000, 0xff000000,
349         0x3f94, 0xffff0000, 0xff000000,
350         0x914c, 0xffff0000, 0xff000000,
351         0x9b7c, 0xffffffff, 0x00000000,
352         0x8a14, 0xffffffff, 0x00000007,
353         0x8b10, 0xffffffff, 0x00000000,
354         0x960c, 0xffffffff, 0x54763210,
355         0x88c4, 0xffffffff, 0x000000c2,
356         0x88d4, 0xffffffff, 0x00000000,
357         0x8974, 0xffffffff, 0x00000000,
358         0xc78, 0x00000080, 0x00000080,
359         0x5eb4, 0xffffffff, 0x00000002,
360         0x5e78, 0xffffffff, 0x001000f0,
361         0x6104, 0x01000300, 0x00000000,
362         0x5bc0, 0x00300000, 0x00000000,
363         0x7030, 0xffffffff, 0x00000011,
364         0x7c30, 0xffffffff, 0x00000011,
365         0x10830, 0xffffffff, 0x00000011,
366         0x11430, 0xffffffff, 0x00000011,
367         0xd02c, 0xffffffff, 0x08421000,
368         0x240c, 0xffffffff, 0x00000380,
369         0x8b24, 0xffffffff, 0x00ff0fff,
370         0x28a4c, 0x06000000, 0x06000000,
371         0x10c, 0x00000001, 0x00000001,
372         0x8d00, 0xffffffff, 0x100e4848,
373         0x8d04, 0xffffffff, 0x00164745,
374         0x8c00, 0xffffffff, 0xe4000003,
375         0x8c04, 0xffffffff, 0x40600060,
376         0x8c08, 0xffffffff, 0x001c001c,
377         0x8cf0, 0xffffffff, 0x08e00410,
378         0x8c20, 0xffffffff, 0x00800080,
379         0x8c24, 0xffffffff, 0x00800080,
380         0x8c18, 0xffffffff, 0x20202078,
381         0x8c1c, 0xffffffff, 0x00001010,
382         0x28350, 0xffffffff, 0x00000000,
383         0xa008, 0xffffffff, 0x00010000,
384         0x5cc, 0xffffffff, 0x00000001,
385         0x9508, 0xffffffff, 0x00000002
386 };
387
388 static const u32 cedar_mgcg_init[] =
389 {
390         0x802c, 0xffffffff, 0xc0000000,
391         0x5448, 0xffffffff, 0x00000100,
392         0x55e4, 0xffffffff, 0x00000100,
393         0x160c, 0xffffffff, 0x00000100,
394         0x5644, 0xffffffff, 0x00000100,
395         0xc164, 0xffffffff, 0x00000100,
396         0x8a18, 0xffffffff, 0x00000100,
397         0x897c, 0xffffffff, 0x06000100,
398         0x8b28, 0xffffffff, 0x00000100,
399         0x9144, 0xffffffff, 0x00000100,
400         0x9a60, 0xffffffff, 0x00000100,
401         0x9868, 0xffffffff, 0x00000100,
402         0x8d58, 0xffffffff, 0x00000100,
403         0x9510, 0xffffffff, 0x00000100,
404         0x949c, 0xffffffff, 0x00000100,
405         0x9654, 0xffffffff, 0x00000100,
406         0x9030, 0xffffffff, 0x00000100,
407         0x9034, 0xffffffff, 0x00000100,
408         0x9038, 0xffffffff, 0x00000100,
409         0x903c, 0xffffffff, 0x00000100,
410         0x9040, 0xffffffff, 0x00000100,
411         0xa200, 0xffffffff, 0x00000100,
412         0xa204, 0xffffffff, 0x00000100,
413         0xa208, 0xffffffff, 0x00000100,
414         0xa20c, 0xffffffff, 0x00000100,
415         0x971c, 0xffffffff, 0x00000100,
416         0x977c, 0xffffffff, 0x00000100,
417         0x3f80, 0xffffffff, 0x00000100,
418         0xa210, 0xffffffff, 0x00000100,
419         0xa214, 0xffffffff, 0x00000100,
420         0x4d8, 0xffffffff, 0x00000100,
421         0x9784, 0xffffffff, 0x00000100,
422         0x9698, 0xffffffff, 0x00000100,
423         0x4d4, 0xffffffff, 0x00000200,
424         0x30cc, 0xffffffff, 0x00000100,
425         0xd0c0, 0xffffffff, 0xff000100,
426         0x802c, 0xffffffff, 0x40000000,
427         0x915c, 0xffffffff, 0x00010000,
428         0x9178, 0xffffffff, 0x00050000,
429         0x917c, 0xffffffff, 0x00030002,
430         0x918c, 0xffffffff, 0x00010004,
431         0x9190, 0xffffffff, 0x00070006,
432         0x9194, 0xffffffff, 0x00050000,
433         0x9198, 0xffffffff, 0x00030002,
434         0x91a8, 0xffffffff, 0x00010004,
435         0x91ac, 0xffffffff, 0x00070006,
436         0x91e8, 0xffffffff, 0x00000000,
437         0x9294, 0xffffffff, 0x00000000,
438         0x929c, 0xffffffff, 0x00000001,
439         0x802c, 0xffffffff, 0xc0000000
440 };
441
442 static const u32 juniper_mgcg_init[] =
443 {
444         0x802c, 0xffffffff, 0xc0000000,
445         0x5448, 0xffffffff, 0x00000100,
446         0x55e4, 0xffffffff, 0x00000100,
447         0x160c, 0xffffffff, 0x00000100,
448         0x5644, 0xffffffff, 0x00000100,
449         0xc164, 0xffffffff, 0x00000100,
450         0x8a18, 0xffffffff, 0x00000100,
451         0x897c, 0xffffffff, 0x06000100,
452         0x8b28, 0xffffffff, 0x00000100,
453         0x9144, 0xffffffff, 0x00000100,
454         0x9a60, 0xffffffff, 0x00000100,
455         0x9868, 0xffffffff, 0x00000100,
456         0x8d58, 0xffffffff, 0x00000100,
457         0x9510, 0xffffffff, 0x00000100,
458         0x949c, 0xffffffff, 0x00000100,
459         0x9654, 0xffffffff, 0x00000100,
460         0x9030, 0xffffffff, 0x00000100,
461         0x9034, 0xffffffff, 0x00000100,
462         0x9038, 0xffffffff, 0x00000100,
463         0x903c, 0xffffffff, 0x00000100,
464         0x9040, 0xffffffff, 0x00000100,
465         0xa200, 0xffffffff, 0x00000100,
466         0xa204, 0xffffffff, 0x00000100,
467         0xa208, 0xffffffff, 0x00000100,
468         0xa20c, 0xffffffff, 0x00000100,
469         0x971c, 0xffffffff, 0x00000100,
470         0xd0c0, 0xffffffff, 0xff000100,
471         0x802c, 0xffffffff, 0x40000000,
472         0x915c, 0xffffffff, 0x00010000,
473         0x9160, 0xffffffff, 0x00030002,
474         0x9178, 0xffffffff, 0x00070000,
475         0x917c, 0xffffffff, 0x00030002,
476         0x9180, 0xffffffff, 0x00050004,
477         0x918c, 0xffffffff, 0x00010006,
478         0x9190, 0xffffffff, 0x00090008,
479         0x9194, 0xffffffff, 0x00070000,
480         0x9198, 0xffffffff, 0x00030002,
481         0x919c, 0xffffffff, 0x00050004,
482         0x91a8, 0xffffffff, 0x00010006,
483         0x91ac, 0xffffffff, 0x00090008,
484         0x91b0, 0xffffffff, 0x00070000,
485         0x91b4, 0xffffffff, 0x00030002,
486         0x91b8, 0xffffffff, 0x00050004,
487         0x91c4, 0xffffffff, 0x00010006,
488         0x91c8, 0xffffffff, 0x00090008,
489         0x91cc, 0xffffffff, 0x00070000,
490         0x91d0, 0xffffffff, 0x00030002,
491         0x91d4, 0xffffffff, 0x00050004,
492         0x91e0, 0xffffffff, 0x00010006,
493         0x91e4, 0xffffffff, 0x00090008,
494         0x91e8, 0xffffffff, 0x00000000,
495         0x91ec, 0xffffffff, 0x00070000,
496         0x91f0, 0xffffffff, 0x00030002,
497         0x91f4, 0xffffffff, 0x00050004,
498         0x9200, 0xffffffff, 0x00010006,
499         0x9204, 0xffffffff, 0x00090008,
500         0x9208, 0xffffffff, 0x00070000,
501         0x920c, 0xffffffff, 0x00030002,
502         0x9210, 0xffffffff, 0x00050004,
503         0x921c, 0xffffffff, 0x00010006,
504         0x9220, 0xffffffff, 0x00090008,
505         0x9224, 0xffffffff, 0x00070000,
506         0x9228, 0xffffffff, 0x00030002,
507         0x922c, 0xffffffff, 0x00050004,
508         0x9238, 0xffffffff, 0x00010006,
509         0x923c, 0xffffffff, 0x00090008,
510         0x9240, 0xffffffff, 0x00070000,
511         0x9244, 0xffffffff, 0x00030002,
512         0x9248, 0xffffffff, 0x00050004,
513         0x9254, 0xffffffff, 0x00010006,
514         0x9258, 0xffffffff, 0x00090008,
515         0x925c, 0xffffffff, 0x00070000,
516         0x9260, 0xffffffff, 0x00030002,
517         0x9264, 0xffffffff, 0x00050004,
518         0x9270, 0xffffffff, 0x00010006,
519         0x9274, 0xffffffff, 0x00090008,
520         0x9278, 0xffffffff, 0x00070000,
521         0x927c, 0xffffffff, 0x00030002,
522         0x9280, 0xffffffff, 0x00050004,
523         0x928c, 0xffffffff, 0x00010006,
524         0x9290, 0xffffffff, 0x00090008,
525         0x9294, 0xffffffff, 0x00000000,
526         0x929c, 0xffffffff, 0x00000001,
527         0x802c, 0xffffffff, 0xc0000000,
528         0x977c, 0xffffffff, 0x00000100,
529         0x3f80, 0xffffffff, 0x00000100,
530         0xa210, 0xffffffff, 0x00000100,
531         0xa214, 0xffffffff, 0x00000100,
532         0x4d8, 0xffffffff, 0x00000100,
533         0x9784, 0xffffffff, 0x00000100,
534         0x9698, 0xffffffff, 0x00000100,
535         0x4d4, 0xffffffff, 0x00000200,
536         0x30cc, 0xffffffff, 0x00000100,
537         0x802c, 0xffffffff, 0xc0000000
538 };
539
540 static const u32 supersumo_golden_registers[] =
541 {
542         0x5eb4, 0xffffffff, 0x00000002,
543         0x5cc, 0xffffffff, 0x00000001,
544         0x7030, 0xffffffff, 0x00000011,
545         0x7c30, 0xffffffff, 0x00000011,
546         0x6104, 0x01000300, 0x00000000,
547         0x5bc0, 0x00300000, 0x00000000,
548         0x8c04, 0xffffffff, 0x40600060,
549         0x8c08, 0xffffffff, 0x001c001c,
550         0x8c20, 0xffffffff, 0x00800080,
551         0x8c24, 0xffffffff, 0x00800080,
552         0x8c18, 0xffffffff, 0x20202078,
553         0x8c1c, 0xffffffff, 0x00001010,
554         0x918c, 0xffffffff, 0x00010006,
555         0x91a8, 0xffffffff, 0x00010006,
556         0x91c4, 0xffffffff, 0x00010006,
557         0x91e0, 0xffffffff, 0x00010006,
558         0x9200, 0xffffffff, 0x00010006,
559         0x9150, 0xffffffff, 0x6e944040,
560         0x917c, 0xffffffff, 0x00030002,
561         0x9180, 0xffffffff, 0x00050004,
562         0x9198, 0xffffffff, 0x00030002,
563         0x919c, 0xffffffff, 0x00050004,
564         0x91b4, 0xffffffff, 0x00030002,
565         0x91b8, 0xffffffff, 0x00050004,
566         0x91d0, 0xffffffff, 0x00030002,
567         0x91d4, 0xffffffff, 0x00050004,
568         0x91f0, 0xffffffff, 0x00030002,
569         0x91f4, 0xffffffff, 0x00050004,
570         0x915c, 0xffffffff, 0x00010000,
571         0x9160, 0xffffffff, 0x00030002,
572         0x3f90, 0xffff0000, 0xff000000,
573         0x9178, 0xffffffff, 0x00070000,
574         0x9194, 0xffffffff, 0x00070000,
575         0x91b0, 0xffffffff, 0x00070000,
576         0x91cc, 0xffffffff, 0x00070000,
577         0x91ec, 0xffffffff, 0x00070000,
578         0x9148, 0xffff0000, 0xff000000,
579         0x9190, 0xffffffff, 0x00090008,
580         0x91ac, 0xffffffff, 0x00090008,
581         0x91c8, 0xffffffff, 0x00090008,
582         0x91e4, 0xffffffff, 0x00090008,
583         0x9204, 0xffffffff, 0x00090008,
584         0x3f94, 0xffff0000, 0xff000000,
585         0x914c, 0xffff0000, 0xff000000,
586         0x929c, 0xffffffff, 0x00000001,
587         0x8a18, 0xffffffff, 0x00000100,
588         0x8b28, 0xffffffff, 0x00000100,
589         0x9144, 0xffffffff, 0x00000100,
590         0x5644, 0xffffffff, 0x00000100,
591         0x9b7c, 0xffffffff, 0x00000000,
592         0x8030, 0xffffffff, 0x0000100a,
593         0x8a14, 0xffffffff, 0x00000007,
594         0x8b24, 0xffffffff, 0x00ff0fff,
595         0x8b10, 0xffffffff, 0x00000000,
596         0x28a4c, 0x06000000, 0x06000000,
597         0x4d8, 0xffffffff, 0x00000100,
598         0x913c, 0xffff000f, 0x0100000a,
599         0x960c, 0xffffffff, 0x54763210,
600         0x88c4, 0xffffffff, 0x000000c2,
601         0x88d4, 0xffffffff, 0x00000010,
602         0x8974, 0xffffffff, 0x00000000,
603         0xc78, 0x00000080, 0x00000080,
604         0x5e78, 0xffffffff, 0x001000f0,
605         0xd02c, 0xffffffff, 0x08421000,
606         0xa008, 0xffffffff, 0x00010000,
607         0x8d00, 0xffffffff, 0x100e4848,
608         0x8d04, 0xffffffff, 0x00164745,
609         0x8c00, 0xffffffff, 0xe4000003,
610         0x8cf0, 0x1fffffff, 0x08e00620,
611         0x28350, 0xffffffff, 0x00000000,
612         0x9508, 0xffffffff, 0x00000002
613 };
614
615 static const u32 sumo_golden_registers[] =
616 {
617         0x900c, 0x00ffffff, 0x0017071f,
618         0x8c18, 0xffffffff, 0x10101060,
619         0x8c1c, 0xffffffff, 0x00001010,
620         0x8c30, 0x0000000f, 0x00000005,
621         0x9688, 0x0000000f, 0x00000007
622 };
623
624 static const u32 wrestler_golden_registers[] =
625 {
626         0x5eb4, 0xffffffff, 0x00000002,
627         0x5cc, 0xffffffff, 0x00000001,
628         0x7030, 0xffffffff, 0x00000011,
629         0x7c30, 0xffffffff, 0x00000011,
630         0x6104, 0x01000300, 0x00000000,
631         0x5bc0, 0x00300000, 0x00000000,
632         0x918c, 0xffffffff, 0x00010006,
633         0x91a8, 0xffffffff, 0x00010006,
634         0x9150, 0xffffffff, 0x6e944040,
635         0x917c, 0xffffffff, 0x00030002,
636         0x9198, 0xffffffff, 0x00030002,
637         0x915c, 0xffffffff, 0x00010000,
638         0x3f90, 0xffff0000, 0xff000000,
639         0x9178, 0xffffffff, 0x00070000,
640         0x9194, 0xffffffff, 0x00070000,
641         0x9148, 0xffff0000, 0xff000000,
642         0x9190, 0xffffffff, 0x00090008,
643         0x91ac, 0xffffffff, 0x00090008,
644         0x3f94, 0xffff0000, 0xff000000,
645         0x914c, 0xffff0000, 0xff000000,
646         0x929c, 0xffffffff, 0x00000001,
647         0x8a18, 0xffffffff, 0x00000100,
648         0x8b28, 0xffffffff, 0x00000100,
649         0x9144, 0xffffffff, 0x00000100,
650         0x9b7c, 0xffffffff, 0x00000000,
651         0x8030, 0xffffffff, 0x0000100a,
652         0x8a14, 0xffffffff, 0x00000001,
653         0x8b24, 0xffffffff, 0x00ff0fff,
654         0x8b10, 0xffffffff, 0x00000000,
655         0x28a4c, 0x06000000, 0x06000000,
656         0x4d8, 0xffffffff, 0x00000100,
657         0x913c, 0xffff000f, 0x0100000a,
658         0x960c, 0xffffffff, 0x54763210,
659         0x88c4, 0xffffffff, 0x000000c2,
660         0x88d4, 0xffffffff, 0x00000010,
661         0x8974, 0xffffffff, 0x00000000,
662         0xc78, 0x00000080, 0x00000080,
663         0x5e78, 0xffffffff, 0x001000f0,
664         0xd02c, 0xffffffff, 0x08421000,
665         0xa008, 0xffffffff, 0x00010000,
666         0x8d00, 0xffffffff, 0x100e4848,
667         0x8d04, 0xffffffff, 0x00164745,
668         0x8c00, 0xffffffff, 0xe4000003,
669         0x8cf0, 0x1fffffff, 0x08e00410,
670         0x28350, 0xffffffff, 0x00000000,
671         0x9508, 0xffffffff, 0x00000002,
672         0x900c, 0xffffffff, 0x0017071f,
673         0x8c18, 0xffffffff, 0x10101060,
674         0x8c1c, 0xffffffff, 0x00001010
675 };
676
677 static const u32 barts_golden_registers[] =
678 {
679         0x5eb4, 0xffffffff, 0x00000002,
680         0x5e78, 0x8f311ff1, 0x001000f0,
681         0x3f90, 0xffff0000, 0xff000000,
682         0x9148, 0xffff0000, 0xff000000,
683         0x3f94, 0xffff0000, 0xff000000,
684         0x914c, 0xffff0000, 0xff000000,
685         0xc78, 0x00000080, 0x00000080,
686         0xbd4, 0x70073777, 0x00010001,
687         0xd02c, 0xbfffff1f, 0x08421000,
688         0xd0b8, 0x03773777, 0x02011003,
689         0x5bc0, 0x00200000, 0x50100000,
690         0x98f8, 0x33773777, 0x02011003,
691         0x98fc, 0xffffffff, 0x76543210,
692         0x7030, 0x31000311, 0x00000011,
693         0x2f48, 0x00000007, 0x02011003,
694         0x6b28, 0x00000010, 0x00000012,
695         0x7728, 0x00000010, 0x00000012,
696         0x10328, 0x00000010, 0x00000012,
697         0x10f28, 0x00000010, 0x00000012,
698         0x11b28, 0x00000010, 0x00000012,
699         0x12728, 0x00000010, 0x00000012,
700         0x240c, 0x000007ff, 0x00000380,
701         0x8a14, 0xf000001f, 0x00000007,
702         0x8b24, 0x3fff3fff, 0x00ff0fff,
703         0x8b10, 0x0000ff0f, 0x00000000,
704         0x28a4c, 0x07ffffff, 0x06000000,
705         0x10c, 0x00000001, 0x00010003,
706         0xa02c, 0xffffffff, 0x0000009b,
707         0x913c, 0x0000000f, 0x0100000a,
708         0x8d00, 0xffff7f7f, 0x100e4848,
709         0x8d04, 0x00ffffff, 0x00164745,
710         0x8c00, 0xfffc0003, 0xe4000003,
711         0x8c04, 0xf8ff00ff, 0x40600060,
712         0x8c08, 0x00ff00ff, 0x001c001c,
713         0x8cf0, 0x1fff1fff, 0x08e00620,
714         0x8c20, 0x0fff0fff, 0x00800080,
715         0x8c24, 0x0fff0fff, 0x00800080,
716         0x8c18, 0xffffffff, 0x20202078,
717         0x8c1c, 0x0000ffff, 0x00001010,
718         0x28350, 0x00000f01, 0x00000000,
719         0x9508, 0x3700001f, 0x00000002,
720         0x960c, 0xffffffff, 0x54763210,
721         0x88c4, 0x001f3ae3, 0x000000c2,
722         0x88d4, 0x0000001f, 0x00000010,
723         0x8974, 0xffffffff, 0x00000000
724 };
725
726 static const u32 turks_golden_registers[] =
727 {
728         0x5eb4, 0xffffffff, 0x00000002,
729         0x5e78, 0x8f311ff1, 0x001000f0,
730         0x8c8, 0x00003000, 0x00001070,
731         0x8cc, 0x000fffff, 0x00040035,
732         0x3f90, 0xffff0000, 0xfff00000,
733         0x9148, 0xffff0000, 0xfff00000,
734         0x3f94, 0xffff0000, 0xfff00000,
735         0x914c, 0xffff0000, 0xfff00000,
736         0xc78, 0x00000080, 0x00000080,
737         0xbd4, 0x00073007, 0x00010002,
738         0xd02c, 0xbfffff1f, 0x08421000,
739         0xd0b8, 0x03773777, 0x02010002,
740         0x5bc0, 0x00200000, 0x50100000,
741         0x98f8, 0x33773777, 0x00010002,
742         0x98fc, 0xffffffff, 0x33221100,
743         0x7030, 0x31000311, 0x00000011,
744         0x2f48, 0x33773777, 0x00010002,
745         0x6b28, 0x00000010, 0x00000012,
746         0x7728, 0x00000010, 0x00000012,
747         0x10328, 0x00000010, 0x00000012,
748         0x10f28, 0x00000010, 0x00000012,
749         0x11b28, 0x00000010, 0x00000012,
750         0x12728, 0x00000010, 0x00000012,
751         0x240c, 0x000007ff, 0x00000380,
752         0x8a14, 0xf000001f, 0x00000007,
753         0x8b24, 0x3fff3fff, 0x00ff0fff,
754         0x8b10, 0x0000ff0f, 0x00000000,
755         0x28a4c, 0x07ffffff, 0x06000000,
756         0x10c, 0x00000001, 0x00010003,
757         0xa02c, 0xffffffff, 0x0000009b,
758         0x913c, 0x0000000f, 0x0100000a,
759         0x8d00, 0xffff7f7f, 0x100e4848,
760         0x8d04, 0x00ffffff, 0x00164745,
761         0x8c00, 0xfffc0003, 0xe4000003,
762         0x8c04, 0xf8ff00ff, 0x40600060,
763         0x8c08, 0x00ff00ff, 0x001c001c,
764         0x8cf0, 0x1fff1fff, 0x08e00410,
765         0x8c20, 0x0fff0fff, 0x00800080,
766         0x8c24, 0x0fff0fff, 0x00800080,
767         0x8c18, 0xffffffff, 0x20202078,
768         0x8c1c, 0x0000ffff, 0x00001010,
769         0x28350, 0x00000f01, 0x00000000,
770         0x9508, 0x3700001f, 0x00000002,
771         0x960c, 0xffffffff, 0x54763210,
772         0x88c4, 0x001f3ae3, 0x000000c2,
773         0x88d4, 0x0000001f, 0x00000010,
774         0x8974, 0xffffffff, 0x00000000
775 };
776
777 static const u32 caicos_golden_registers[] =
778 {
779         0x5eb4, 0xffffffff, 0x00000002,
780         0x5e78, 0x8f311ff1, 0x001000f0,
781         0x8c8, 0x00003420, 0x00001450,
782         0x8cc, 0x000fffff, 0x00040035,
783         0x3f90, 0xffff0000, 0xfffc0000,
784         0x9148, 0xffff0000, 0xfffc0000,
785         0x3f94, 0xffff0000, 0xfffc0000,
786         0x914c, 0xffff0000, 0xfffc0000,
787         0xc78, 0x00000080, 0x00000080,
788         0xbd4, 0x00073007, 0x00010001,
789         0xd02c, 0xbfffff1f, 0x08421000,
790         0xd0b8, 0x03773777, 0x02010001,
791         0x5bc0, 0x00200000, 0x50100000,
792         0x98f8, 0x33773777, 0x02010001,
793         0x98fc, 0xffffffff, 0x33221100,
794         0x7030, 0x31000311, 0x00000011,
795         0x2f48, 0x33773777, 0x02010001,
796         0x6b28, 0x00000010, 0x00000012,
797         0x7728, 0x00000010, 0x00000012,
798         0x10328, 0x00000010, 0x00000012,
799         0x10f28, 0x00000010, 0x00000012,
800         0x11b28, 0x00000010, 0x00000012,
801         0x12728, 0x00000010, 0x00000012,
802         0x240c, 0x000007ff, 0x00000380,
803         0x8a14, 0xf000001f, 0x00000001,
804         0x8b24, 0x3fff3fff, 0x00ff0fff,
805         0x8b10, 0x0000ff0f, 0x00000000,
806         0x28a4c, 0x07ffffff, 0x06000000,
807         0x10c, 0x00000001, 0x00010003,
808         0xa02c, 0xffffffff, 0x0000009b,
809         0x913c, 0x0000000f, 0x0100000a,
810         0x8d00, 0xffff7f7f, 0x100e4848,
811         0x8d04, 0x00ffffff, 0x00164745,
812         0x8c00, 0xfffc0003, 0xe4000003,
813         0x8c04, 0xf8ff00ff, 0x40600060,
814         0x8c08, 0x00ff00ff, 0x001c001c,
815         0x8cf0, 0x1fff1fff, 0x08e00410,
816         0x8c20, 0x0fff0fff, 0x00800080,
817         0x8c24, 0x0fff0fff, 0x00800080,
818         0x8c18, 0xffffffff, 0x20202078,
819         0x8c1c, 0x0000ffff, 0x00001010,
820         0x28350, 0x00000f01, 0x00000000,
821         0x9508, 0x3700001f, 0x00000002,
822         0x960c, 0xffffffff, 0x54763210,
823         0x88c4, 0x001f3ae3, 0x000000c2,
824         0x88d4, 0x0000001f, 0x00000010,
825         0x8974, 0xffffffff, 0x00000000
826 };
827
828 static void evergreen_init_golden_registers(struct radeon_device *rdev)
829 {
830         switch (rdev->family) {
831         case CHIP_CYPRESS:
832         case CHIP_HEMLOCK:
833                 radeon_program_register_sequence(rdev,
834                                                  evergreen_golden_registers,
835                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
836                 radeon_program_register_sequence(rdev,
837                                                  evergreen_golden_registers2,
838                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
839                 radeon_program_register_sequence(rdev,
840                                                  cypress_mgcg_init,
841                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
842                 break;
843         case CHIP_JUNIPER:
844                 radeon_program_register_sequence(rdev,
845                                                  evergreen_golden_registers,
846                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
847                 radeon_program_register_sequence(rdev,
848                                                  evergreen_golden_registers2,
849                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
850                 radeon_program_register_sequence(rdev,
851                                                  juniper_mgcg_init,
852                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
853                 break;
854         case CHIP_REDWOOD:
855                 radeon_program_register_sequence(rdev,
856                                                  evergreen_golden_registers,
857                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
858                 radeon_program_register_sequence(rdev,
859                                                  evergreen_golden_registers2,
860                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
861                 radeon_program_register_sequence(rdev,
862                                                  redwood_mgcg_init,
863                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
864                 break;
865         case CHIP_CEDAR:
866                 radeon_program_register_sequence(rdev,
867                                                  cedar_golden_registers,
868                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
869                 radeon_program_register_sequence(rdev,
870                                                  evergreen_golden_registers2,
871                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
872                 radeon_program_register_sequence(rdev,
873                                                  cedar_mgcg_init,
874                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
875                 break;
876         case CHIP_PALM:
877                 radeon_program_register_sequence(rdev,
878                                                  wrestler_golden_registers,
879                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
880                 break;
881         case CHIP_SUMO:
882                 radeon_program_register_sequence(rdev,
883                                                  supersumo_golden_registers,
884                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
885                 break;
886         case CHIP_SUMO2:
887                 radeon_program_register_sequence(rdev,
888                                                  supersumo_golden_registers,
889                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
890                 radeon_program_register_sequence(rdev,
891                                                  sumo_golden_registers,
892                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
893                 break;
894         case CHIP_BARTS:
895                 radeon_program_register_sequence(rdev,
896                                                  barts_golden_registers,
897                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
898                 break;
899         case CHIP_TURKS:
900                 radeon_program_register_sequence(rdev,
901                                                  turks_golden_registers,
902                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
903                 break;
904         case CHIP_CAICOS:
905                 radeon_program_register_sequence(rdev,
906                                                  caicos_golden_registers,
907                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
908                 break;
909         default:
910                 break;
911         }
912 }
913
914 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
915                              unsigned *bankh, unsigned *mtaspect,
916                              unsigned *tile_split)
917 {
918         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
919         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
920         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
921         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
922         switch (*bankw) {
923         default:
924         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
925         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
926         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
927         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
928         }
929         switch (*bankh) {
930         default:
931         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
932         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
933         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
934         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
935         }
936         switch (*mtaspect) {
937         default:
938         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
939         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
940         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
941         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
942         }
943 }
944
945 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
946                               u32 cntl_reg, u32 status_reg)
947 {
948         int r, i;
949         struct atom_clock_dividers dividers;
950
951         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
952                                            clock, false, &dividers);
953         if (r)
954                 return r;
955
956         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
957
958         for (i = 0; i < 100; i++) {
959                 if (RREG32(status_reg) & DCLK_STATUS)
960                         break;
961                 mdelay(10);
962         }
963         if (i == 100)
964                 return -ETIMEDOUT;
965
966         return 0;
967 }
968
969 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
970 {
971         int r = 0;
972         u32 cg_scratch = RREG32(CG_SCRATCH1);
973
974         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
975         if (r)
976                 goto done;
977         cg_scratch &= 0xffff0000;
978         cg_scratch |= vclk / 100; /* Mhz */
979
980         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
981         if (r)
982                 goto done;
983         cg_scratch &= 0x0000ffff;
984         cg_scratch |= (dclk / 100) << 16; /* Mhz */
985
986 done:
987         WREG32(CG_SCRATCH1, cg_scratch);
988
989         return r;
990 }
991
992 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
993 {
994         /* start off with something large */
995         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
996         int r;
997
998         /* bypass vclk and dclk with bclk */
999         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1000                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1001                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1002
1003         /* put PLL in bypass mode */
1004         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1005
1006         if (!vclk || !dclk) {
1007                 /* keep the Bypass mode, put PLL to sleep */
1008                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1009                 return 0;
1010         }
1011
1012         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1013                                           16384, 0x03FFFFFF, 0, 128, 5,
1014                                           &fb_div, &vclk_div, &dclk_div);
1015         if (r)
1016                 return r;
1017
1018         /* set VCO_MODE to 1 */
1019         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1020
1021         /* toggle UPLL_SLEEP to 1 then back to 0 */
1022         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1023         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1024
1025         /* deassert UPLL_RESET */
1026         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1027
1028         mdelay(1);
1029
1030         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1031         if (r)
1032                 return r;
1033
1034         /* assert UPLL_RESET again */
1035         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1036
1037         /* disable spread spectrum. */
1038         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1039
1040         /* set feedback divider */
1041         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1042
1043         /* set ref divider to 0 */
1044         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1045
1046         if (fb_div < 307200)
1047                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1048         else
1049                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1050
1051         /* set PDIV_A and PDIV_B */
1052         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1053                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1054                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1055
1056         /* give the PLL some time to settle */
1057         mdelay(15);
1058
1059         /* deassert PLL_RESET */
1060         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1061
1062         mdelay(15);
1063
1064         /* switch from bypass mode to normal mode */
1065         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1066
1067         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1068         if (r)
1069                 return r;
1070
1071         /* switch VCLK and DCLK selection */
1072         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1073                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1074                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1075
1076         mdelay(100);
1077
1078         return 0;
1079 }
1080
1081 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1082 {
1083         u16 ctl, v;
1084         int err;
1085
1086         err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
1087         if (err)
1088                 return;
1089
1090         v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1091
1092         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1093          * to avoid hangs or perfomance issues
1094          */
1095         if ((v == 0) || (v == 6) || (v == 7)) {
1096                 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1097                 ctl |= (2 << 12);
1098                 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
1099         }
1100 }
1101
1102 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1103 {
1104         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1105                 return true;
1106         else
1107                 return false;
1108 }
1109
1110 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1111 {
1112         u32 pos1, pos2;
1113
1114         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1115         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1116
1117         if (pos1 != pos2)
1118                 return true;
1119         else
1120                 return false;
1121 }
1122
1123 /**
1124  * dce4_wait_for_vblank - vblank wait asic callback.
1125  *
1126  * @rdev: radeon_device pointer
1127  * @crtc: crtc to wait for vblank on
1128  *
1129  * Wait for vblank on the requested crtc (evergreen+).
1130  */
1131 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1132 {
1133         unsigned i = 0;
1134
1135         if (crtc >= rdev->num_crtc)
1136                 return;
1137
1138         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1139                 return;
1140
1141         /* depending on when we hit vblank, we may be close to active; if so,
1142          * wait for another frame.
1143          */
1144         while (dce4_is_in_vblank(rdev, crtc)) {
1145                 if (i++ % 100 == 0) {
1146                         if (!dce4_is_counter_moving(rdev, crtc))
1147                                 break;
1148                 }
1149         }
1150
1151         while (!dce4_is_in_vblank(rdev, crtc)) {
1152                 if (i++ % 100 == 0) {
1153                         if (!dce4_is_counter_moving(rdev, crtc))
1154                                 break;
1155                 }
1156         }
1157 }
1158
1159 /**
1160  * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1161  *
1162  * @rdev: radeon_device pointer
1163  * @crtc: crtc to prepare for pageflip on
1164  *
1165  * Pre-pageflip callback (evergreen+).
1166  * Enables the pageflip irq (vblank irq).
1167  */
1168 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1169 {
1170         /* enable the pflip int */
1171         radeon_irq_kms_pflip_irq_get(rdev, crtc);
1172 }
1173
1174 /**
1175  * evergreen_post_page_flip - pos-pageflip callback.
1176  *
1177  * @rdev: radeon_device pointer
1178  * @crtc: crtc to cleanup pageflip on
1179  *
1180  * Post-pageflip callback (evergreen+).
1181  * Disables the pageflip irq (vblank irq).
1182  */
1183 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1184 {
1185         /* disable the pflip int */
1186         radeon_irq_kms_pflip_irq_put(rdev, crtc);
1187 }
1188
1189 /**
1190  * evergreen_page_flip - pageflip callback.
1191  *
1192  * @rdev: radeon_device pointer
1193  * @crtc_id: crtc to cleanup pageflip on
1194  * @crtc_base: new address of the crtc (GPU MC address)
1195  *
1196  * Does the actual pageflip (evergreen+).
1197  * During vblank we take the crtc lock and wait for the update_pending
1198  * bit to go high, when it does, we release the lock, and allow the
1199  * double buffered update to take place.
1200  * Returns the current update pending status.
1201  */
1202 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1203 {
1204         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1205         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1206         int i;
1207
1208         /* Lock the graphics update lock */
1209         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1210         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1211
1212         /* update the scanout addresses */
1213         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1214                upper_32_bits(crtc_base));
1215         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1216                (u32)crtc_base);
1217
1218         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1219                upper_32_bits(crtc_base));
1220         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1221                (u32)crtc_base);
1222
1223         /* Wait for update_pending to go high. */
1224         for (i = 0; i < rdev->usec_timeout; i++) {
1225                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1226                         break;
1227                 udelay(1);
1228         }
1229         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1230
1231         /* Unlock the lock, so double-buffering can take place inside vblank */
1232         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1233         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1234
1235         /* Return current update_pending status: */
1236         return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1237 }
1238
1239 /* get temperature in millidegrees */
1240 int evergreen_get_temp(struct radeon_device *rdev)
1241 {
1242         u32 temp, toffset;
1243         int actual_temp = 0;
1244
1245         if (rdev->family == CHIP_JUNIPER) {
1246                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1247                         TOFFSET_SHIFT;
1248                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1249                         TS0_ADC_DOUT_SHIFT;
1250
1251                 if (toffset & 0x100)
1252                         actual_temp = temp / 2 - (0x200 - toffset);
1253                 else
1254                         actual_temp = temp / 2 + toffset;
1255
1256                 actual_temp = actual_temp * 1000;
1257
1258         } else {
1259                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1260                         ASIC_T_SHIFT;
1261
1262                 if (temp & 0x400)
1263                         actual_temp = -256;
1264                 else if (temp & 0x200)
1265                         actual_temp = 255;
1266                 else if (temp & 0x100) {
1267                         actual_temp = temp & 0x1ff;
1268                         actual_temp |= ~0x1ff;
1269                 } else
1270                         actual_temp = temp & 0xff;
1271
1272                 actual_temp = (actual_temp * 1000) / 2;
1273         }
1274
1275         return actual_temp;
1276 }
1277
1278 int sumo_get_temp(struct radeon_device *rdev)
1279 {
1280         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1281         int actual_temp = temp - 49;
1282
1283         return actual_temp * 1000;
1284 }
1285
1286 /**
1287  * sumo_pm_init_profile - Initialize power profiles callback.
1288  *
1289  * @rdev: radeon_device pointer
1290  *
1291  * Initialize the power states used in profile mode
1292  * (sumo, trinity, SI).
1293  * Used for profile mode only.
1294  */
1295 void sumo_pm_init_profile(struct radeon_device *rdev)
1296 {
1297         int idx;
1298
1299         /* default */
1300         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1301         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1302         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1303         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1304
1305         /* low,mid sh/mh */
1306         if (rdev->flags & RADEON_IS_MOBILITY)
1307                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1308         else
1309                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1310
1311         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1312         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1313         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1314         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1315
1316         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1317         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1318         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1319         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1320
1321         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1322         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1323         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1324         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1325
1326         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1327         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1328         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1329         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1330
1331         /* high sh/mh */
1332         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1333         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1334         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1335         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1336         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1337                 rdev->pm.power_state[idx].num_clock_modes - 1;
1338
1339         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1340         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1341         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1342         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1343                 rdev->pm.power_state[idx].num_clock_modes - 1;
1344 }
1345
1346 /**
1347  * btc_pm_init_profile - Initialize power profiles callback.
1348  *
1349  * @rdev: radeon_device pointer
1350  *
1351  * Initialize the power states used in profile mode
1352  * (BTC, cayman).
1353  * Used for profile mode only.
1354  */
1355 void btc_pm_init_profile(struct radeon_device *rdev)
1356 {
1357         int idx;
1358
1359         /* default */
1360         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1361         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1362         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1363         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1364         /* starting with BTC, there is one state that is used for both
1365          * MH and SH.  Difference is that we always use the high clock index for
1366          * mclk.
1367          */
1368         if (rdev->flags & RADEON_IS_MOBILITY)
1369                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1370         else
1371                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1372         /* low sh */
1373         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1374         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1375         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1376         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1377         /* mid sh */
1378         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1379         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1380         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1381         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1382         /* high sh */
1383         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1384         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1385         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1386         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1387         /* low mh */
1388         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1389         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1390         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1391         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1392         /* mid mh */
1393         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1394         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1395         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1396         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1397         /* high mh */
1398         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1399         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1400         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1401         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1402 }
1403
1404 /**
1405  * evergreen_pm_misc - set additional pm hw parameters callback.
1406  *
1407  * @rdev: radeon_device pointer
1408  *
1409  * Set non-clock parameters associated with a power state
1410  * (voltage, etc.) (evergreen+).
1411  */
1412 void evergreen_pm_misc(struct radeon_device *rdev)
1413 {
1414         int req_ps_idx = rdev->pm.requested_power_state_index;
1415         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1416         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1417         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1418
1419         if (voltage->type == VOLTAGE_SW) {
1420                 /* 0xff01 is a flag rather then an actual voltage */
1421                 if (voltage->voltage == 0xff01)
1422                         return;
1423                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1424                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1425                         rdev->pm.current_vddc = voltage->voltage;
1426                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1427                 }
1428
1429                 /* starting with BTC, there is one state that is used for both
1430                  * MH and SH.  Difference is that we always use the high clock index for
1431                  * mclk and vddci.
1432                  */
1433                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1434                     (rdev->family >= CHIP_BARTS) &&
1435                     rdev->pm.active_crtc_count &&
1436                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1437                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1438                         voltage = &rdev->pm.power_state[req_ps_idx].
1439                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1440
1441                 /* 0xff01 is a flag rather then an actual voltage */
1442                 if (voltage->vddci == 0xff01)
1443                         return;
1444                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1445                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1446                         rdev->pm.current_vddci = voltage->vddci;
1447                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1448                 }
1449         }
1450 }
1451
1452 /**
1453  * evergreen_pm_prepare - pre-power state change callback.
1454  *
1455  * @rdev: radeon_device pointer
1456  *
1457  * Prepare for a power state change (evergreen+).
1458  */
1459 void evergreen_pm_prepare(struct radeon_device *rdev)
1460 {
1461         struct drm_device *ddev = rdev->ddev;
1462         struct drm_crtc *crtc;
1463         struct radeon_crtc *radeon_crtc;
1464         u32 tmp;
1465
1466         /* disable any active CRTCs */
1467         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1468                 radeon_crtc = to_radeon_crtc(crtc);
1469                 if (radeon_crtc->enabled) {
1470                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1471                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1472                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1473                 }
1474         }
1475 }
1476
1477 /**
1478  * evergreen_pm_finish - post-power state change callback.
1479  *
1480  * @rdev: radeon_device pointer
1481  *
1482  * Clean up after a power state change (evergreen+).
1483  */
1484 void evergreen_pm_finish(struct radeon_device *rdev)
1485 {
1486         struct drm_device *ddev = rdev->ddev;
1487         struct drm_crtc *crtc;
1488         struct radeon_crtc *radeon_crtc;
1489         u32 tmp;
1490
1491         /* enable any active CRTCs */
1492         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1493                 radeon_crtc = to_radeon_crtc(crtc);
1494                 if (radeon_crtc->enabled) {
1495                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1496                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1497                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1498                 }
1499         }
1500 }
1501
1502 /**
1503  * evergreen_hpd_sense - hpd sense callback.
1504  *
1505  * @rdev: radeon_device pointer
1506  * @hpd: hpd (hotplug detect) pin
1507  *
1508  * Checks if a digital monitor is connected (evergreen+).
1509  * Returns true if connected, false if not connected.
1510  */
1511 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1512 {
1513         bool connected = false;
1514
1515         switch (hpd) {
1516         case RADEON_HPD_1:
1517                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1518                         connected = true;
1519                 break;
1520         case RADEON_HPD_2:
1521                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1522                         connected = true;
1523                 break;
1524         case RADEON_HPD_3:
1525                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1526                         connected = true;
1527                 break;
1528         case RADEON_HPD_4:
1529                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1530                         connected = true;
1531                 break;
1532         case RADEON_HPD_5:
1533                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1534                         connected = true;
1535                 break;
1536         case RADEON_HPD_6:
1537                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1538                         connected = true;
1539                         break;
1540         default:
1541                 break;
1542         }
1543
1544         return connected;
1545 }
1546
1547 /**
1548  * evergreen_hpd_set_polarity - hpd set polarity callback.
1549  *
1550  * @rdev: radeon_device pointer
1551  * @hpd: hpd (hotplug detect) pin
1552  *
1553  * Set the polarity of the hpd pin (evergreen+).
1554  */
1555 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1556                                 enum radeon_hpd_id hpd)
1557 {
1558         u32 tmp;
1559         bool connected = evergreen_hpd_sense(rdev, hpd);
1560
1561         switch (hpd) {
1562         case RADEON_HPD_1:
1563                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1564                 if (connected)
1565                         tmp &= ~DC_HPDx_INT_POLARITY;
1566                 else
1567                         tmp |= DC_HPDx_INT_POLARITY;
1568                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1569                 break;
1570         case RADEON_HPD_2:
1571                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1572                 if (connected)
1573                         tmp &= ~DC_HPDx_INT_POLARITY;
1574                 else
1575                         tmp |= DC_HPDx_INT_POLARITY;
1576                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1577                 break;
1578         case RADEON_HPD_3:
1579                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1580                 if (connected)
1581                         tmp &= ~DC_HPDx_INT_POLARITY;
1582                 else
1583                         tmp |= DC_HPDx_INT_POLARITY;
1584                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1585                 break;
1586         case RADEON_HPD_4:
1587                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1588                 if (connected)
1589                         tmp &= ~DC_HPDx_INT_POLARITY;
1590                 else
1591                         tmp |= DC_HPDx_INT_POLARITY;
1592                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1593                 break;
1594         case RADEON_HPD_5:
1595                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1596                 if (connected)
1597                         tmp &= ~DC_HPDx_INT_POLARITY;
1598                 else
1599                         tmp |= DC_HPDx_INT_POLARITY;
1600                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1601                         break;
1602         case RADEON_HPD_6:
1603                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1604                 if (connected)
1605                         tmp &= ~DC_HPDx_INT_POLARITY;
1606                 else
1607                         tmp |= DC_HPDx_INT_POLARITY;
1608                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1609                 break;
1610         default:
1611                 break;
1612         }
1613 }
1614
1615 /**
1616  * evergreen_hpd_init - hpd setup callback.
1617  *
1618  * @rdev: radeon_device pointer
1619  *
1620  * Setup the hpd pins used by the card (evergreen+).
1621  * Enable the pin, set the polarity, and enable the hpd interrupts.
1622  */
1623 void evergreen_hpd_init(struct radeon_device *rdev)
1624 {
1625         struct drm_device *dev = rdev->ddev;
1626         struct drm_connector *connector;
1627         unsigned enabled = 0;
1628         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1629                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1630
1631         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1632                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1633
1634                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1635                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1636                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1637                          * aux dp channel on imac and help (but not completely fix)
1638                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1639                          * also avoid interrupt storms during dpms.
1640                          */
1641                         continue;
1642                 }
1643                 switch (radeon_connector->hpd.hpd) {
1644                 case RADEON_HPD_1:
1645                         WREG32(DC_HPD1_CONTROL, tmp);
1646                         break;
1647                 case RADEON_HPD_2:
1648                         WREG32(DC_HPD2_CONTROL, tmp);
1649                         break;
1650                 case RADEON_HPD_3:
1651                         WREG32(DC_HPD3_CONTROL, tmp);
1652                         break;
1653                 case RADEON_HPD_4:
1654                         WREG32(DC_HPD4_CONTROL, tmp);
1655                         break;
1656                 case RADEON_HPD_5:
1657                         WREG32(DC_HPD5_CONTROL, tmp);
1658                         break;
1659                 case RADEON_HPD_6:
1660                         WREG32(DC_HPD6_CONTROL, tmp);
1661                         break;
1662                 default:
1663                         break;
1664                 }
1665                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1666                 enabled |= 1 << radeon_connector->hpd.hpd;
1667         }
1668         radeon_irq_kms_enable_hpd(rdev, enabled);
1669 }
1670
1671 /**
1672  * evergreen_hpd_fini - hpd tear down callback.
1673  *
1674  * @rdev: radeon_device pointer
1675  *
1676  * Tear down the hpd pins used by the card (evergreen+).
1677  * Disable the hpd interrupts.
1678  */
1679 void evergreen_hpd_fini(struct radeon_device *rdev)
1680 {
1681         struct drm_device *dev = rdev->ddev;
1682         struct drm_connector *connector;
1683         unsigned disabled = 0;
1684
1685         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1686                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1687                 switch (radeon_connector->hpd.hpd) {
1688                 case RADEON_HPD_1:
1689                         WREG32(DC_HPD1_CONTROL, 0);
1690                         break;
1691                 case RADEON_HPD_2:
1692                         WREG32(DC_HPD2_CONTROL, 0);
1693                         break;
1694                 case RADEON_HPD_3:
1695                         WREG32(DC_HPD3_CONTROL, 0);
1696                         break;
1697                 case RADEON_HPD_4:
1698                         WREG32(DC_HPD4_CONTROL, 0);
1699                         break;
1700                 case RADEON_HPD_5:
1701                         WREG32(DC_HPD5_CONTROL, 0);
1702                         break;
1703                 case RADEON_HPD_6:
1704                         WREG32(DC_HPD6_CONTROL, 0);
1705                         break;
1706                 default:
1707                         break;
1708                 }
1709                 disabled |= 1 << radeon_connector->hpd.hpd;
1710         }
1711         radeon_irq_kms_disable_hpd(rdev, disabled);
1712 }
1713
1714 /* watermark setup */
1715
1716 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1717                                         struct radeon_crtc *radeon_crtc,
1718                                         struct drm_display_mode *mode,
1719                                         struct drm_display_mode *other_mode)
1720 {
1721         u32 tmp;
1722         /*
1723          * Line Buffer Setup
1724          * There are 3 line buffers, each one shared by 2 display controllers.
1725          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1726          * the display controllers.  The paritioning is done via one of four
1727          * preset allocations specified in bits 2:0:
1728          * first display controller
1729          *  0 - first half of lb (3840 * 2)
1730          *  1 - first 3/4 of lb (5760 * 2)
1731          *  2 - whole lb (7680 * 2), other crtc must be disabled
1732          *  3 - first 1/4 of lb (1920 * 2)
1733          * second display controller
1734          *  4 - second half of lb (3840 * 2)
1735          *  5 - second 3/4 of lb (5760 * 2)
1736          *  6 - whole lb (7680 * 2), other crtc must be disabled
1737          *  7 - last 1/4 of lb (1920 * 2)
1738          */
1739         /* this can get tricky if we have two large displays on a paired group
1740          * of crtcs.  Ideally for multiple large displays we'd assign them to
1741          * non-linked crtcs for maximum line buffer allocation.
1742          */
1743         if (radeon_crtc->base.enabled && mode) {
1744                 if (other_mode)
1745                         tmp = 0; /* 1/2 */
1746                 else
1747                         tmp = 2; /* whole */
1748         } else
1749                 tmp = 0;
1750
1751         /* second controller of the pair uses second half of the lb */
1752         if (radeon_crtc->crtc_id % 2)
1753                 tmp += 4;
1754         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1755
1756         if (radeon_crtc->base.enabled && mode) {
1757                 switch (tmp) {
1758                 case 0:
1759                 case 4:
1760                 default:
1761                         if (ASIC_IS_DCE5(rdev))
1762                                 return 4096 * 2;
1763                         else
1764                                 return 3840 * 2;
1765                 case 1:
1766                 case 5:
1767                         if (ASIC_IS_DCE5(rdev))
1768                                 return 6144 * 2;
1769                         else
1770                                 return 5760 * 2;
1771                 case 2:
1772                 case 6:
1773                         if (ASIC_IS_DCE5(rdev))
1774                                 return 8192 * 2;
1775                         else
1776                                 return 7680 * 2;
1777                 case 3:
1778                 case 7:
1779                         if (ASIC_IS_DCE5(rdev))
1780                                 return 2048 * 2;
1781                         else
1782                                 return 1920 * 2;
1783                 }
1784         }
1785
1786         /* controller not enabled, so no lb used */
1787         return 0;
1788 }
1789
1790 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1791 {
1792         u32 tmp = RREG32(MC_SHARED_CHMAP);
1793
1794         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1795         case 0:
1796         default:
1797                 return 1;
1798         case 1:
1799                 return 2;
1800         case 2:
1801                 return 4;
1802         case 3:
1803                 return 8;
1804         }
1805 }
1806
1807 struct evergreen_wm_params {
1808         u32 dram_channels; /* number of dram channels */
1809         u32 yclk;          /* bandwidth per dram data pin in kHz */
1810         u32 sclk;          /* engine clock in kHz */
1811         u32 disp_clk;      /* display clock in kHz */
1812         u32 src_width;     /* viewport width */
1813         u32 active_time;   /* active display time in ns */
1814         u32 blank_time;    /* blank time in ns */
1815         bool interlaced;    /* mode is interlaced */
1816         fixed20_12 vsc;    /* vertical scale ratio */
1817         u32 num_heads;     /* number of active crtcs */
1818         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1819         u32 lb_size;       /* line buffer allocated to pipe */
1820         u32 vtaps;         /* vertical scaler taps */
1821 };
1822
1823 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1824 {
1825         /* Calculate DRAM Bandwidth and the part allocated to display. */
1826         fixed20_12 dram_efficiency; /* 0.7 */
1827         fixed20_12 yclk, dram_channels, bandwidth;
1828         fixed20_12 a;
1829
1830         a.full = dfixed_const(1000);
1831         yclk.full = dfixed_const(wm->yclk);
1832         yclk.full = dfixed_div(yclk, a);
1833         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1834         a.full = dfixed_const(10);
1835         dram_efficiency.full = dfixed_const(7);
1836         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1837         bandwidth.full = dfixed_mul(dram_channels, yclk);
1838         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1839
1840         return dfixed_trunc(bandwidth);
1841 }
1842
1843 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1844 {
1845         /* Calculate DRAM Bandwidth and the part allocated to display. */
1846         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1847         fixed20_12 yclk, dram_channels, bandwidth;
1848         fixed20_12 a;
1849
1850         a.full = dfixed_const(1000);
1851         yclk.full = dfixed_const(wm->yclk);
1852         yclk.full = dfixed_div(yclk, a);
1853         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1854         a.full = dfixed_const(10);
1855         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1856         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1857         bandwidth.full = dfixed_mul(dram_channels, yclk);
1858         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1859
1860         return dfixed_trunc(bandwidth);
1861 }
1862
1863 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1864 {
1865         /* Calculate the display Data return Bandwidth */
1866         fixed20_12 return_efficiency; /* 0.8 */
1867         fixed20_12 sclk, bandwidth;
1868         fixed20_12 a;
1869
1870         a.full = dfixed_const(1000);
1871         sclk.full = dfixed_const(wm->sclk);
1872         sclk.full = dfixed_div(sclk, a);
1873         a.full = dfixed_const(10);
1874         return_efficiency.full = dfixed_const(8);
1875         return_efficiency.full = dfixed_div(return_efficiency, a);
1876         a.full = dfixed_const(32);
1877         bandwidth.full = dfixed_mul(a, sclk);
1878         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1879
1880         return dfixed_trunc(bandwidth);
1881 }
1882
1883 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1884 {
1885         /* Calculate the DMIF Request Bandwidth */
1886         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1887         fixed20_12 disp_clk, bandwidth;
1888         fixed20_12 a;
1889
1890         a.full = dfixed_const(1000);
1891         disp_clk.full = dfixed_const(wm->disp_clk);
1892         disp_clk.full = dfixed_div(disp_clk, a);
1893         a.full = dfixed_const(10);
1894         disp_clk_request_efficiency.full = dfixed_const(8);
1895         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1896         a.full = dfixed_const(32);
1897         bandwidth.full = dfixed_mul(a, disp_clk);
1898         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1899
1900         return dfixed_trunc(bandwidth);
1901 }
1902
1903 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1904 {
1905         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1906         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1907         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
1908         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
1909
1910         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1911 }
1912
1913 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
1914 {
1915         /* Calculate the display mode Average Bandwidth
1916          * DisplayMode should contain the source and destination dimensions,
1917          * timing, etc.
1918          */
1919         fixed20_12 bpp;
1920         fixed20_12 line_time;
1921         fixed20_12 src_width;
1922         fixed20_12 bandwidth;
1923         fixed20_12 a;
1924
1925         a.full = dfixed_const(1000);
1926         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1927         line_time.full = dfixed_div(line_time, a);
1928         bpp.full = dfixed_const(wm->bytes_per_pixel);
1929         src_width.full = dfixed_const(wm->src_width);
1930         bandwidth.full = dfixed_mul(src_width, bpp);
1931         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1932         bandwidth.full = dfixed_div(bandwidth, line_time);
1933
1934         return dfixed_trunc(bandwidth);
1935 }
1936
1937 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
1938 {
1939         /* First calcualte the latency in ns */
1940         u32 mc_latency = 2000; /* 2000 ns. */
1941         u32 available_bandwidth = evergreen_available_bandwidth(wm);
1942         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1943         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1944         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1945         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1946                 (wm->num_heads * cursor_line_pair_return_time);
1947         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1948         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1949         fixed20_12 a, b, c;
1950
1951         if (wm->num_heads == 0)
1952                 return 0;
1953
1954         a.full = dfixed_const(2);
1955         b.full = dfixed_const(1);
1956         if ((wm->vsc.full > a.full) ||
1957             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1958             (wm->vtaps >= 5) ||
1959             ((wm->vsc.full >= a.full) && wm->interlaced))
1960                 max_src_lines_per_dst_line = 4;
1961         else
1962                 max_src_lines_per_dst_line = 2;
1963
1964         a.full = dfixed_const(available_bandwidth);
1965         b.full = dfixed_const(wm->num_heads);
1966         a.full = dfixed_div(a, b);
1967
1968         b.full = dfixed_const(1000);
1969         c.full = dfixed_const(wm->disp_clk);
1970         b.full = dfixed_div(c, b);
1971         c.full = dfixed_const(wm->bytes_per_pixel);
1972         b.full = dfixed_mul(b, c);
1973
1974         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
1975
1976         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
1977         b.full = dfixed_const(1000);
1978         c.full = dfixed_const(lb_fill_bw);
1979         b.full = dfixed_div(c, b);
1980         a.full = dfixed_div(a, b);
1981         line_fill_time = dfixed_trunc(a);
1982
1983         if (line_fill_time < wm->active_time)
1984                 return latency;
1985         else
1986                 return latency + (line_fill_time - wm->active_time);
1987
1988 }
1989
1990 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1991 {
1992         if (evergreen_average_bandwidth(wm) <=
1993             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
1994                 return true;
1995         else
1996                 return false;
1997 };
1998
1999 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2000 {
2001         if (evergreen_average_bandwidth(wm) <=
2002             (evergreen_available_bandwidth(wm) / wm->num_heads))
2003                 return true;
2004         else
2005                 return false;
2006 };
2007
2008 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2009 {
2010         u32 lb_partitions = wm->lb_size / wm->src_width;
2011         u32 line_time = wm->active_time + wm->blank_time;
2012         u32 latency_tolerant_lines;
2013         u32 latency_hiding;
2014         fixed20_12 a;
2015
2016         a.full = dfixed_const(1);
2017         if (wm->vsc.full > a.full)
2018                 latency_tolerant_lines = 1;
2019         else {
2020                 if (lb_partitions <= (wm->vtaps + 1))
2021                         latency_tolerant_lines = 1;
2022                 else
2023                         latency_tolerant_lines = 2;
2024         }
2025
2026         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2027
2028         if (evergreen_latency_watermark(wm) <= latency_hiding)
2029                 return true;
2030         else
2031                 return false;
2032 }
2033
2034 static void evergreen_program_watermarks(struct radeon_device *rdev,
2035                                          struct radeon_crtc *radeon_crtc,
2036                                          u32 lb_size, u32 num_heads)
2037 {
2038         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2039         struct evergreen_wm_params wm;
2040         u32 pixel_period;
2041         u32 line_time = 0;
2042         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2043         u32 priority_a_mark = 0, priority_b_mark = 0;
2044         u32 priority_a_cnt = PRIORITY_OFF;
2045         u32 priority_b_cnt = PRIORITY_OFF;
2046         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2047         u32 tmp, arb_control3;
2048         fixed20_12 a, b, c;
2049
2050         if (radeon_crtc->base.enabled && num_heads && mode) {
2051                 pixel_period = 1000000 / (u32)mode->clock;
2052                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2053                 priority_a_cnt = 0;
2054                 priority_b_cnt = 0;
2055
2056                 wm.yclk = rdev->pm.current_mclk * 10;
2057                 wm.sclk = rdev->pm.current_sclk * 10;
2058                 wm.disp_clk = mode->clock;
2059                 wm.src_width = mode->crtc_hdisplay;
2060                 wm.active_time = mode->crtc_hdisplay * pixel_period;
2061                 wm.blank_time = line_time - wm.active_time;
2062                 wm.interlaced = false;
2063                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2064                         wm.interlaced = true;
2065                 wm.vsc = radeon_crtc->vsc;
2066                 wm.vtaps = 1;
2067                 if (radeon_crtc->rmx_type != RMX_OFF)
2068                         wm.vtaps = 2;
2069                 wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
2070                 wm.lb_size = lb_size;
2071                 wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
2072                 wm.num_heads = num_heads;
2073
2074                 /* set for high clocks */
2075                 latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
2076                 /* set for low clocks */
2077                 /* wm.yclk = low clk; wm.sclk = low clk */
2078                 latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
2079
2080                 /* possibly force display priority to high */
2081                 /* should really do this at mode validation time... */
2082                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
2083                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
2084                     !evergreen_check_latency_hiding(&wm) ||
2085                     (rdev->disp_priority == 2)) {
2086                         DRM_DEBUG_KMS("force priority to high\n");
2087                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2088                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2089                 }
2090
2091                 a.full = dfixed_const(1000);
2092                 b.full = dfixed_const(mode->clock);
2093                 b.full = dfixed_div(b, a);
2094                 c.full = dfixed_const(latency_watermark_a);
2095                 c.full = dfixed_mul(c, b);
2096                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2097                 c.full = dfixed_div(c, a);
2098                 a.full = dfixed_const(16);
2099                 c.full = dfixed_div(c, a);
2100                 priority_a_mark = dfixed_trunc(c);
2101                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2102
2103                 a.full = dfixed_const(1000);
2104                 b.full = dfixed_const(mode->clock);
2105                 b.full = dfixed_div(b, a);
2106                 c.full = dfixed_const(latency_watermark_b);
2107                 c.full = dfixed_mul(c, b);
2108                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2109                 c.full = dfixed_div(c, a);
2110                 a.full = dfixed_const(16);
2111                 c.full = dfixed_div(c, a);
2112                 priority_b_mark = dfixed_trunc(c);
2113                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2114         }
2115
2116         /* select wm A */
2117         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2118         tmp = arb_control3;
2119         tmp &= ~LATENCY_WATERMARK_MASK(3);
2120         tmp |= LATENCY_WATERMARK_MASK(1);
2121         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2122         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2123                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2124                 LATENCY_HIGH_WATERMARK(line_time)));
2125         /* select wm B */
2126         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2127         tmp &= ~LATENCY_WATERMARK_MASK(3);
2128         tmp |= LATENCY_WATERMARK_MASK(2);
2129         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2130         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2131                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2132                 LATENCY_HIGH_WATERMARK(line_time)));
2133         /* restore original selection */
2134         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2135
2136         /* write the priority marks */
2137         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2138         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2139
2140 }
2141
2142 /**
2143  * evergreen_bandwidth_update - update display watermarks callback.
2144  *
2145  * @rdev: radeon_device pointer
2146  *
2147  * Update the display watermarks based on the requested mode(s)
2148  * (evergreen+).
2149  */
2150 void evergreen_bandwidth_update(struct radeon_device *rdev)
2151 {
2152         struct drm_display_mode *mode0 = NULL;
2153         struct drm_display_mode *mode1 = NULL;
2154         u32 num_heads = 0, lb_size;
2155         int i;
2156
2157         radeon_update_display_priority(rdev);
2158
2159         for (i = 0; i < rdev->num_crtc; i++) {
2160                 if (rdev->mode_info.crtcs[i]->base.enabled)
2161                         num_heads++;
2162         }
2163         for (i = 0; i < rdev->num_crtc; i += 2) {
2164                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2165                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2166                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2167                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2168                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2169                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2170         }
2171 }
2172
2173 /**
2174  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2175  *
2176  * @rdev: radeon_device pointer
2177  *
2178  * Wait for the MC (memory controller) to be idle.
2179  * (evergreen+).
2180  * Returns 0 if the MC is idle, -1 if not.
2181  */
2182 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2183 {
2184         unsigned i;
2185         u32 tmp;
2186
2187         for (i = 0; i < rdev->usec_timeout; i++) {
2188                 /* read MC_STATUS */
2189                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2190                 if (!tmp)
2191                         return 0;
2192                 udelay(1);
2193         }
2194         return -1;
2195 }
2196
2197 /*
2198  * GART
2199  */
2200 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2201 {
2202         unsigned i;
2203         u32 tmp;
2204
2205         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2206
2207         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2208         for (i = 0; i < rdev->usec_timeout; i++) {
2209                 /* read MC_STATUS */
2210                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2211                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2212                 if (tmp == 2) {
2213                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2214                         return;
2215                 }
2216                 if (tmp) {
2217                         return;
2218                 }
2219                 udelay(1);
2220         }
2221 }
2222
2223 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2224 {
2225         u32 tmp;
2226         int r;
2227
2228         if (rdev->gart.robj == NULL) {
2229                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2230                 return -EINVAL;
2231         }
2232         r = radeon_gart_table_vram_pin(rdev);
2233         if (r)
2234                 return r;
2235         radeon_gart_restore(rdev);
2236         /* Setup L2 cache */
2237         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2238                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2239                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2240         WREG32(VM_L2_CNTL2, 0);
2241         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2242         /* Setup TLB control */
2243         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2244                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2245                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2246                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2247         if (rdev->flags & RADEON_IS_IGP) {
2248                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2249                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2250                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2251         } else {
2252                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2253                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2254                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2255                 if ((rdev->family == CHIP_JUNIPER) ||
2256                     (rdev->family == CHIP_CYPRESS) ||
2257                     (rdev->family == CHIP_HEMLOCK) ||
2258                     (rdev->family == CHIP_BARTS))
2259                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2260         }
2261         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2262         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2263         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2264         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2265         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2266         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2267         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2268         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2269                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2270         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2271                         (u32)(rdev->dummy_page.addr >> 12));
2272         WREG32(VM_CONTEXT1_CNTL, 0);
2273
2274         evergreen_pcie_gart_tlb_flush(rdev);
2275         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2276                  (unsigned)(rdev->mc.gtt_size >> 20),
2277                  (unsigned long long)rdev->gart.table_addr);
2278         rdev->gart.ready = true;
2279         return 0;
2280 }
2281
2282 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2283 {
2284         u32 tmp;
2285
2286         /* Disable all tables */
2287         WREG32(VM_CONTEXT0_CNTL, 0);
2288         WREG32(VM_CONTEXT1_CNTL, 0);
2289
2290         /* Setup L2 cache */
2291         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2292                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2293         WREG32(VM_L2_CNTL2, 0);
2294         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2295         /* Setup TLB control */
2296         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2297         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2298         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2299         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2300         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2301         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2302         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2303         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2304         radeon_gart_table_vram_unpin(rdev);
2305 }
2306
2307 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2308 {
2309         evergreen_pcie_gart_disable(rdev);
2310         radeon_gart_table_vram_free(rdev);
2311         radeon_gart_fini(rdev);
2312 }
2313
2314
2315 static void evergreen_agp_enable(struct radeon_device *rdev)
2316 {
2317         u32 tmp;
2318
2319         /* Setup L2 cache */
2320         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2321                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2322                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2323         WREG32(VM_L2_CNTL2, 0);
2324         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2325         /* Setup TLB control */
2326         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2327                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2328                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2329                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2330         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2331         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2332         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2333         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2334         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2335         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2336         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2337         WREG32(VM_CONTEXT0_CNTL, 0);
2338         WREG32(VM_CONTEXT1_CNTL, 0);
2339 }
2340
2341 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2342 {
2343         u32 crtc_enabled, tmp, frame_count, blackout;
2344         int i, j;
2345
2346         save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2347         save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2348
2349         /* disable VGA render */
2350         WREG32(VGA_RENDER_CONTROL, 0);
2351         /* blank the display controllers */
2352         for (i = 0; i < rdev->num_crtc; i++) {
2353                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2354                 if (crtc_enabled) {
2355                         save->crtc_enabled[i] = true;
2356                         if (ASIC_IS_DCE6(rdev)) {
2357                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2358                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2359                                         radeon_wait_for_vblank(rdev, i);
2360                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2361                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2362                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2363                                 }
2364                         } else {
2365                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2366                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2367                                         radeon_wait_for_vblank(rdev, i);
2368                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2369                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2370                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2371                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2372                                 }
2373                         }
2374                         /* wait for the next frame */
2375                         frame_count = radeon_get_vblank_counter(rdev, i);
2376                         for (j = 0; j < rdev->usec_timeout; j++) {
2377                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2378                                         break;
2379                                 udelay(1);
2380                         }
2381
2382                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2383                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2384                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2385                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2386                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2387                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2388                         save->crtc_enabled[i] = false;
2389                         /* ***** */
2390                 } else {
2391                         save->crtc_enabled[i] = false;
2392                 }
2393         }
2394
2395         radeon_mc_wait_for_idle(rdev);
2396
2397         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2398         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2399                 /* Block CPU access */
2400                 WREG32(BIF_FB_EN, 0);
2401                 /* blackout the MC */
2402                 blackout &= ~BLACKOUT_MODE_MASK;
2403                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2404         }
2405         /* wait for the MC to settle */
2406         udelay(100);
2407
2408         /* lock double buffered regs */
2409         for (i = 0; i < rdev->num_crtc; i++) {
2410                 if (save->crtc_enabled[i]) {
2411                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2412                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2413                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2414                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2415                         }
2416                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2417                         if (!(tmp & 1)) {
2418                                 tmp |= 1;
2419                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2420                         }
2421                 }
2422         }
2423 }
2424
2425 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2426 {
2427         u32 tmp, frame_count;
2428         int i, j;
2429
2430         /* update crtc base addresses */
2431         for (i = 0; i < rdev->num_crtc; i++) {
2432                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2433                        upper_32_bits(rdev->mc.vram_start));
2434                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2435                        upper_32_bits(rdev->mc.vram_start));
2436                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2437                        (u32)rdev->mc.vram_start);
2438                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2439                        (u32)rdev->mc.vram_start);
2440         }
2441         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2442         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2443
2444         /* unlock regs and wait for update */
2445         for (i = 0; i < rdev->num_crtc; i++) {
2446                 if (save->crtc_enabled[i]) {
2447                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2448                         if ((tmp & 0x3) != 0) {
2449                                 tmp &= ~0x3;
2450                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2451                         }
2452                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2453                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2454                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2455                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2456                         }
2457                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2458                         if (tmp & 1) {
2459                                 tmp &= ~1;
2460                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2461                         }
2462                         for (j = 0; j < rdev->usec_timeout; j++) {
2463                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2464                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2465                                         break;
2466                                 udelay(1);
2467                         }
2468                 }
2469         }
2470
2471         /* unblackout the MC */
2472         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2473         tmp &= ~BLACKOUT_MODE_MASK;
2474         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2475         /* allow CPU access */
2476         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2477
2478         for (i = 0; i < rdev->num_crtc; i++) {
2479                 if (save->crtc_enabled[i]) {
2480                         if (ASIC_IS_DCE6(rdev)) {
2481                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2482                                 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2483                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2484                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2485                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2486                         } else {
2487                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2488                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2489                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2490                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2491                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2492                         }
2493                         /* wait for the next frame */
2494                         frame_count = radeon_get_vblank_counter(rdev, i);
2495                         for (j = 0; j < rdev->usec_timeout; j++) {
2496                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2497                                         break;
2498                                 udelay(1);
2499                         }
2500                 }
2501         }
2502         /* Unlock vga access */
2503         WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2504         mdelay(1);
2505         WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2506 }
2507
2508 void evergreen_mc_program(struct radeon_device *rdev)
2509 {
2510         struct evergreen_mc_save save;
2511         u32 tmp;
2512         int i, j;
2513
2514         /* Initialize HDP */
2515         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2516                 WREG32((0x2c14 + j), 0x00000000);
2517                 WREG32((0x2c18 + j), 0x00000000);
2518                 WREG32((0x2c1c + j), 0x00000000);
2519                 WREG32((0x2c20 + j), 0x00000000);
2520                 WREG32((0x2c24 + j), 0x00000000);
2521         }
2522         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2523
2524         evergreen_mc_stop(rdev, &save);
2525         if (evergreen_mc_wait_for_idle(rdev)) {
2526                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2527         }
2528         /* Lockout access through VGA aperture*/
2529         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2530         /* Update configuration */
2531         if (rdev->flags & RADEON_IS_AGP) {
2532                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2533                         /* VRAM before AGP */
2534                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2535                                 rdev->mc.vram_start >> 12);
2536                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2537                                 rdev->mc.gtt_end >> 12);
2538                 } else {
2539                         /* VRAM after AGP */
2540                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2541                                 rdev->mc.gtt_start >> 12);
2542                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2543                                 rdev->mc.vram_end >> 12);
2544                 }
2545         } else {
2546                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2547                         rdev->mc.vram_start >> 12);
2548                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2549                         rdev->mc.vram_end >> 12);
2550         }
2551         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2552         /* llano/ontario only */
2553         if ((rdev->family == CHIP_PALM) ||
2554             (rdev->family == CHIP_SUMO) ||
2555             (rdev->family == CHIP_SUMO2)) {
2556                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2557                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2558                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2559                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2560         }
2561         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2562         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2563         WREG32(MC_VM_FB_LOCATION, tmp);
2564         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2565         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2566         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2567         if (rdev->flags & RADEON_IS_AGP) {
2568                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2569                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2570                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2571         } else {
2572                 WREG32(MC_VM_AGP_BASE, 0);
2573                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2574                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2575         }
2576         if (evergreen_mc_wait_for_idle(rdev)) {
2577                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2578         }
2579         evergreen_mc_resume(rdev, &save);
2580         /* we need to own VRAM, so turn off the VGA renderer here
2581          * to stop it overwriting our objects */
2582         rv515_vga_render_disable(rdev);
2583 }
2584
2585 /*
2586  * CP.
2587  */
2588 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2589 {
2590         struct radeon_ring *ring = &rdev->ring[ib->ring];
2591         u32 next_rptr;
2592
2593         /* set to DX10/11 mode */
2594         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2595         radeon_ring_write(ring, 1);
2596
2597         if (ring->rptr_save_reg) {
2598                 next_rptr = ring->wptr + 3 + 4;
2599                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2600                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2601                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2602                 radeon_ring_write(ring, next_rptr);
2603         } else if (rdev->wb.enabled) {
2604                 next_rptr = ring->wptr + 5 + 4;
2605                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2606                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2607                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2608                 radeon_ring_write(ring, next_rptr);
2609                 radeon_ring_write(ring, 0);
2610         }
2611
2612         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2613         radeon_ring_write(ring,
2614 #ifdef __BIG_ENDIAN
2615                           (2 << 0) |
2616 #endif
2617                           (ib->gpu_addr & 0xFFFFFFFC));
2618         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2619         radeon_ring_write(ring, ib->length_dw);
2620 }
2621
2622
2623 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2624 {
2625         const __be32 *fw_data;
2626         int i;
2627
2628         if (!rdev->me_fw || !rdev->pfp_fw)
2629                 return -EINVAL;
2630
2631         r700_cp_stop(rdev);
2632         WREG32(CP_RB_CNTL,
2633 #ifdef __BIG_ENDIAN
2634                BUF_SWAP_32BIT |
2635 #endif
2636                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2637
2638         fw_data = (const __be32 *)rdev->pfp_fw->data;
2639         WREG32(CP_PFP_UCODE_ADDR, 0);
2640         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2641                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2642         WREG32(CP_PFP_UCODE_ADDR, 0);
2643
2644         fw_data = (const __be32 *)rdev->me_fw->data;
2645         WREG32(CP_ME_RAM_WADDR, 0);
2646         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2647                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2648
2649         WREG32(CP_PFP_UCODE_ADDR, 0);
2650         WREG32(CP_ME_RAM_WADDR, 0);
2651         WREG32(CP_ME_RAM_RADDR, 0);
2652         return 0;
2653 }
2654
2655 static int evergreen_cp_start(struct radeon_device *rdev)
2656 {
2657         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2658         int r, i;
2659         uint32_t cp_me;
2660
2661         r = radeon_ring_lock(rdev, ring, 7);
2662         if (r) {
2663                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2664                 return r;
2665         }
2666         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2667         radeon_ring_write(ring, 0x1);
2668         radeon_ring_write(ring, 0x0);
2669         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2670         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2671         radeon_ring_write(ring, 0);
2672         radeon_ring_write(ring, 0);
2673         radeon_ring_unlock_commit(rdev, ring);
2674
2675         cp_me = 0xff;
2676         WREG32(CP_ME_CNTL, cp_me);
2677
2678         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2679         if (r) {
2680                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2681                 return r;
2682         }
2683
2684         /* setup clear context state */
2685         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2686         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2687
2688         for (i = 0; i < evergreen_default_size; i++)
2689                 radeon_ring_write(ring, evergreen_default_state[i]);
2690
2691         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2692         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2693
2694         /* set clear context state */
2695         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2696         radeon_ring_write(ring, 0);
2697
2698         /* SQ_VTX_BASE_VTX_LOC */
2699         radeon_ring_write(ring, 0xc0026f00);
2700         radeon_ring_write(ring, 0x00000000);
2701         radeon_ring_write(ring, 0x00000000);
2702         radeon_ring_write(ring, 0x00000000);
2703
2704         /* Clear consts */
2705         radeon_ring_write(ring, 0xc0036f00);
2706         radeon_ring_write(ring, 0x00000bc4);
2707         radeon_ring_write(ring, 0xffffffff);
2708         radeon_ring_write(ring, 0xffffffff);
2709         radeon_ring_write(ring, 0xffffffff);
2710
2711         radeon_ring_write(ring, 0xc0026900);
2712         radeon_ring_write(ring, 0x00000316);
2713         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2714         radeon_ring_write(ring, 0x00000010); /*  */
2715
2716         radeon_ring_unlock_commit(rdev, ring);
2717
2718         return 0;
2719 }
2720
2721 static int evergreen_cp_resume(struct radeon_device *rdev)
2722 {
2723         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2724         u32 tmp;
2725         u32 rb_bufsz;
2726         int r;
2727
2728         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2729         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2730                                  SOFT_RESET_PA |
2731                                  SOFT_RESET_SH |
2732                                  SOFT_RESET_VGT |
2733                                  SOFT_RESET_SPI |
2734                                  SOFT_RESET_SX));
2735         RREG32(GRBM_SOFT_RESET);
2736         mdelay(15);
2737         WREG32(GRBM_SOFT_RESET, 0);
2738         RREG32(GRBM_SOFT_RESET);
2739
2740         /* Set ring buffer size */
2741         rb_bufsz = drm_order(ring->ring_size / 8);
2742         tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2743 #ifdef __BIG_ENDIAN
2744         tmp |= BUF_SWAP_32BIT;
2745 #endif
2746         WREG32(CP_RB_CNTL, tmp);
2747         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2748         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2749
2750         /* Set the write pointer delay */
2751         WREG32(CP_RB_WPTR_DELAY, 0);
2752
2753         /* Initialize the ring buffer's read and write pointers */
2754         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2755         WREG32(CP_RB_RPTR_WR, 0);
2756         ring->wptr = 0;
2757         WREG32(CP_RB_WPTR, ring->wptr);
2758
2759         /* set the wb address whether it's enabled or not */
2760         WREG32(CP_RB_RPTR_ADDR,
2761                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2762         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2763         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2764
2765         if (rdev->wb.enabled)
2766                 WREG32(SCRATCH_UMSK, 0xff);
2767         else {
2768                 tmp |= RB_NO_UPDATE;
2769                 WREG32(SCRATCH_UMSK, 0);
2770         }
2771
2772         mdelay(1);
2773         WREG32(CP_RB_CNTL, tmp);
2774
2775         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2776         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2777
2778         ring->rptr = RREG32(CP_RB_RPTR);
2779
2780         evergreen_cp_start(rdev);
2781         ring->ready = true;
2782         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2783         if (r) {
2784                 ring->ready = false;
2785                 return r;
2786         }
2787         return 0;
2788 }
2789
2790 /*
2791  * Core functions
2792  */
2793 static void evergreen_gpu_init(struct radeon_device *rdev)
2794 {
2795         u32 gb_addr_config;
2796         u32 mc_shared_chmap, mc_arb_ramcfg;
2797         u32 sx_debug_1;
2798         u32 smx_dc_ctl0;
2799         u32 sq_config;
2800         u32 sq_lds_resource_mgmt;
2801         u32 sq_gpr_resource_mgmt_1;
2802         u32 sq_gpr_resource_mgmt_2;
2803         u32 sq_gpr_resource_mgmt_3;
2804         u32 sq_thread_resource_mgmt;
2805         u32 sq_thread_resource_mgmt_2;
2806         u32 sq_stack_resource_mgmt_1;
2807         u32 sq_stack_resource_mgmt_2;
2808         u32 sq_stack_resource_mgmt_3;
2809         u32 vgt_cache_invalidation;
2810         u32 hdp_host_path_cntl, tmp;
2811         u32 disabled_rb_mask;
2812         int i, j, num_shader_engines, ps_thread_count;
2813
2814         switch (rdev->family) {
2815         case CHIP_CYPRESS:
2816         case CHIP_HEMLOCK:
2817                 rdev->config.evergreen.num_ses = 2;
2818                 rdev->config.evergreen.max_pipes = 4;
2819                 rdev->config.evergreen.max_tile_pipes = 8;
2820                 rdev->config.evergreen.max_simds = 10;
2821                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2822                 rdev->config.evergreen.max_gprs = 256;
2823                 rdev->config.evergreen.max_threads = 248;
2824                 rdev->config.evergreen.max_gs_threads = 32;
2825                 rdev->config.evergreen.max_stack_entries = 512;
2826                 rdev->config.evergreen.sx_num_of_sets = 4;
2827                 rdev->config.evergreen.sx_max_export_size = 256;
2828                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2829                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2830                 rdev->config.evergreen.max_hw_contexts = 8;
2831                 rdev->config.evergreen.sq_num_cf_insts = 2;
2832
2833                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2834                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2835                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2836                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
2837                 break;
2838         case CHIP_JUNIPER:
2839                 rdev->config.evergreen.num_ses = 1;
2840                 rdev->config.evergreen.max_pipes = 4;
2841                 rdev->config.evergreen.max_tile_pipes = 4;
2842                 rdev->config.evergreen.max_simds = 10;
2843                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2844                 rdev->config.evergreen.max_gprs = 256;
2845                 rdev->config.evergreen.max_threads = 248;
2846                 rdev->config.evergreen.max_gs_threads = 32;
2847                 rdev->config.evergreen.max_stack_entries = 512;
2848                 rdev->config.evergreen.sx_num_of_sets = 4;
2849                 rdev->config.evergreen.sx_max_export_size = 256;
2850                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2851                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2852                 rdev->config.evergreen.max_hw_contexts = 8;
2853                 rdev->config.evergreen.sq_num_cf_insts = 2;
2854
2855                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2856                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2857                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2858                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
2859                 break;
2860         case CHIP_REDWOOD:
2861                 rdev->config.evergreen.num_ses = 1;
2862                 rdev->config.evergreen.max_pipes = 4;
2863                 rdev->config.evergreen.max_tile_pipes = 4;
2864                 rdev->config.evergreen.max_simds = 5;
2865                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2866                 rdev->config.evergreen.max_gprs = 256;
2867                 rdev->config.evergreen.max_threads = 248;
2868                 rdev->config.evergreen.max_gs_threads = 32;
2869                 rdev->config.evergreen.max_stack_entries = 256;
2870                 rdev->config.evergreen.sx_num_of_sets = 4;
2871                 rdev->config.evergreen.sx_max_export_size = 256;
2872                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2873                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2874                 rdev->config.evergreen.max_hw_contexts = 8;
2875                 rdev->config.evergreen.sq_num_cf_insts = 2;
2876
2877                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2878                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2879                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2880                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
2881                 break;
2882         case CHIP_CEDAR:
2883         default:
2884                 rdev->config.evergreen.num_ses = 1;
2885                 rdev->config.evergreen.max_pipes = 2;
2886                 rdev->config.evergreen.max_tile_pipes = 2;
2887                 rdev->config.evergreen.max_simds = 2;
2888                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2889                 rdev->config.evergreen.max_gprs = 256;
2890                 rdev->config.evergreen.max_threads = 192;
2891                 rdev->config.evergreen.max_gs_threads = 16;
2892                 rdev->config.evergreen.max_stack_entries = 256;
2893                 rdev->config.evergreen.sx_num_of_sets = 4;
2894                 rdev->config.evergreen.sx_max_export_size = 128;
2895                 rdev->config.evergreen.sx_max_export_pos_size = 32;
2896                 rdev->config.evergreen.sx_max_export_smx_size = 96;
2897                 rdev->config.evergreen.max_hw_contexts = 4;
2898                 rdev->config.evergreen.sq_num_cf_insts = 1;
2899
2900                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2901                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2902                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2903                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2904                 break;
2905         case CHIP_PALM:
2906                 rdev->config.evergreen.num_ses = 1;
2907                 rdev->config.evergreen.max_pipes = 2;
2908                 rdev->config.evergreen.max_tile_pipes = 2;
2909                 rdev->config.evergreen.max_simds = 2;
2910                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2911                 rdev->config.evergreen.max_gprs = 256;
2912                 rdev->config.evergreen.max_threads = 192;
2913                 rdev->config.evergreen.max_gs_threads = 16;
2914                 rdev->config.evergreen.max_stack_entries = 256;
2915                 rdev->config.evergreen.sx_num_of_sets = 4;
2916                 rdev->config.evergreen.sx_max_export_size = 128;
2917                 rdev->config.evergreen.sx_max_export_pos_size = 32;
2918                 rdev->config.evergreen.sx_max_export_smx_size = 96;
2919                 rdev->config.evergreen.max_hw_contexts = 4;
2920                 rdev->config.evergreen.sq_num_cf_insts = 1;
2921
2922                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2923                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2924                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2925                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2926                 break;
2927         case CHIP_SUMO:
2928                 rdev->config.evergreen.num_ses = 1;
2929                 rdev->config.evergreen.max_pipes = 4;
2930                 rdev->config.evergreen.max_tile_pipes = 4;
2931                 if (rdev->pdev->device == 0x9648)
2932                         rdev->config.evergreen.max_simds = 3;
2933                 else if ((rdev->pdev->device == 0x9647) ||
2934                          (rdev->pdev->device == 0x964a))
2935                         rdev->config.evergreen.max_simds = 4;
2936                 else
2937                         rdev->config.evergreen.max_simds = 5;
2938                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2939                 rdev->config.evergreen.max_gprs = 256;
2940                 rdev->config.evergreen.max_threads = 248;
2941                 rdev->config.evergreen.max_gs_threads = 32;
2942                 rdev->config.evergreen.max_stack_entries = 256;
2943                 rdev->config.evergreen.sx_num_of_sets = 4;
2944                 rdev->config.evergreen.sx_max_export_size = 256;
2945                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2946                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2947                 rdev->config.evergreen.max_hw_contexts = 8;
2948                 rdev->config.evergreen.sq_num_cf_insts = 2;
2949
2950                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2951                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2952                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2953                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
2954                 break;
2955         case CHIP_SUMO2:
2956                 rdev->config.evergreen.num_ses = 1;
2957                 rdev->config.evergreen.max_pipes = 4;
2958                 rdev->config.evergreen.max_tile_pipes = 4;
2959                 rdev->config.evergreen.max_simds = 2;
2960                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2961                 rdev->config.evergreen.max_gprs = 256;
2962                 rdev->config.evergreen.max_threads = 248;
2963                 rdev->config.evergreen.max_gs_threads = 32;
2964                 rdev->config.evergreen.max_stack_entries = 512;
2965                 rdev->config.evergreen.sx_num_of_sets = 4;
2966                 rdev->config.evergreen.sx_max_export_size = 256;
2967                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2968                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2969                 rdev->config.evergreen.max_hw_contexts = 8;
2970                 rdev->config.evergreen.sq_num_cf_insts = 2;
2971
2972                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2973                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2974                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2975                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
2976                 break;
2977         case CHIP_BARTS:
2978                 rdev->config.evergreen.num_ses = 2;
2979                 rdev->config.evergreen.max_pipes = 4;
2980                 rdev->config.evergreen.max_tile_pipes = 8;
2981                 rdev->config.evergreen.max_simds = 7;
2982                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2983                 rdev->config.evergreen.max_gprs = 256;
2984                 rdev->config.evergreen.max_threads = 248;
2985                 rdev->config.evergreen.max_gs_threads = 32;
2986                 rdev->config.evergreen.max_stack_entries = 512;
2987                 rdev->config.evergreen.sx_num_of_sets = 4;
2988                 rdev->config.evergreen.sx_max_export_size = 256;
2989                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2990                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2991                 rdev->config.evergreen.max_hw_contexts = 8;
2992                 rdev->config.evergreen.sq_num_cf_insts = 2;
2993
2994                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2995                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2996                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2997                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
2998                 break;
2999         case CHIP_TURKS:
3000                 rdev->config.evergreen.num_ses = 1;
3001                 rdev->config.evergreen.max_pipes = 4;
3002                 rdev->config.evergreen.max_tile_pipes = 4;
3003                 rdev->config.evergreen.max_simds = 6;
3004                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3005                 rdev->config.evergreen.max_gprs = 256;
3006                 rdev->config.evergreen.max_threads = 248;
3007                 rdev->config.evergreen.max_gs_threads = 32;
3008                 rdev->config.evergreen.max_stack_entries = 256;
3009                 rdev->config.evergreen.sx_num_of_sets = 4;
3010                 rdev->config.evergreen.sx_max_export_size = 256;
3011                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3012                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3013                 rdev->config.evergreen.max_hw_contexts = 8;
3014                 rdev->config.evergreen.sq_num_cf_insts = 2;
3015
3016                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3017                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3018                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3019                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3020                 break;
3021         case CHIP_CAICOS:
3022                 rdev->config.evergreen.num_ses = 1;
3023                 rdev->config.evergreen.max_pipes = 2;
3024                 rdev->config.evergreen.max_tile_pipes = 2;
3025                 rdev->config.evergreen.max_simds = 2;
3026                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3027                 rdev->config.evergreen.max_gprs = 256;
3028                 rdev->config.evergreen.max_threads = 192;
3029                 rdev->config.evergreen.max_gs_threads = 16;
3030                 rdev->config.evergreen.max_stack_entries = 256;
3031                 rdev->config.evergreen.sx_num_of_sets = 4;
3032                 rdev->config.evergreen.sx_max_export_size = 128;
3033                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3034                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3035                 rdev->config.evergreen.max_hw_contexts = 4;
3036                 rdev->config.evergreen.sq_num_cf_insts = 1;
3037
3038                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3039                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3040                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3041                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3042                 break;
3043         }
3044
3045         /* Initialize HDP */
3046         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3047                 WREG32((0x2c14 + j), 0x00000000);
3048                 WREG32((0x2c18 + j), 0x00000000);
3049                 WREG32((0x2c1c + j), 0x00000000);
3050                 WREG32((0x2c20 + j), 0x00000000);
3051                 WREG32((0x2c24 + j), 0x00000000);
3052         }
3053
3054         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3055
3056         evergreen_fix_pci_max_read_req_size(rdev);
3057
3058         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3059         if ((rdev->family == CHIP_PALM) ||
3060             (rdev->family == CHIP_SUMO) ||
3061             (rdev->family == CHIP_SUMO2))
3062                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3063         else
3064                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3065
3066         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3067          * not have bank info, so create a custom tiling dword.
3068          * bits 3:0   num_pipes
3069          * bits 7:4   num_banks
3070          * bits 11:8  group_size
3071          * bits 15:12 row_size
3072          */
3073         rdev->config.evergreen.tile_config = 0;
3074         switch (rdev->config.evergreen.max_tile_pipes) {
3075         case 1:
3076         default:
3077                 rdev->config.evergreen.tile_config |= (0 << 0);
3078                 break;
3079         case 2:
3080                 rdev->config.evergreen.tile_config |= (1 << 0);
3081                 break;
3082         case 4:
3083                 rdev->config.evergreen.tile_config |= (2 << 0);
3084                 break;
3085         case 8:
3086                 rdev->config.evergreen.tile_config |= (3 << 0);
3087                 break;
3088         }
3089         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3090         if (rdev->flags & RADEON_IS_IGP)
3091                 rdev->config.evergreen.tile_config |= 1 << 4;
3092         else {
3093                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3094                 case 0: /* four banks */
3095                         rdev->config.evergreen.tile_config |= 0 << 4;
3096                         break;
3097                 case 1: /* eight banks */
3098                         rdev->config.evergreen.tile_config |= 1 << 4;
3099                         break;
3100                 case 2: /* sixteen banks */
3101                 default:
3102                         rdev->config.evergreen.tile_config |= 2 << 4;
3103                         break;
3104                 }
3105         }
3106         rdev->config.evergreen.tile_config |= 0 << 8;
3107         rdev->config.evergreen.tile_config |=
3108                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3109
3110         num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3111
3112         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3113                 u32 efuse_straps_4;
3114                 u32 efuse_straps_3;
3115
3116                 WREG32(RCU_IND_INDEX, 0x204);
3117                 efuse_straps_4 = RREG32(RCU_IND_DATA);
3118                 WREG32(RCU_IND_INDEX, 0x203);
3119                 efuse_straps_3 = RREG32(RCU_IND_DATA);
3120                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3121                       ((efuse_straps_3 & 0xf0000000) >> 28));
3122         } else {
3123                 tmp = 0;
3124                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3125                         u32 rb_disable_bitmap;
3126
3127                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3128                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3129                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3130                         tmp <<= 4;
3131                         tmp |= rb_disable_bitmap;
3132                 }
3133         }
3134         /* enabled rb are just the one not disabled :) */
3135         disabled_rb_mask = tmp;
3136         tmp = 0;
3137         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3138                 tmp |= (1 << i);
3139         /* if all the backends are disabled, fix it up here */
3140         if ((disabled_rb_mask & tmp) == tmp) {
3141                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3142                         disabled_rb_mask &= ~(1 << i);
3143         }
3144
3145         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3146         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3147
3148         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3149         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3150         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3151         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3152         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3153         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3154         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3155
3156         if ((rdev->config.evergreen.max_backends == 1) &&
3157             (rdev->flags & RADEON_IS_IGP)) {
3158                 if ((disabled_rb_mask & 3) == 1) {
3159                         /* RB0 disabled, RB1 enabled */
3160                         tmp = 0x11111111;
3161                 } else {
3162                         /* RB1 disabled, RB0 enabled */
3163                         tmp = 0x00000000;
3164                 }
3165         } else {
3166                 tmp = gb_addr_config & NUM_PIPES_MASK;
3167                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3168                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3169         }
3170         WREG32(GB_BACKEND_MAP, tmp);
3171
3172         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3173         WREG32(CGTS_TCC_DISABLE, 0);
3174         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3175         WREG32(CGTS_USER_TCC_DISABLE, 0);
3176
3177         /* set HW defaults for 3D engine */
3178         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3179                                      ROQ_IB2_START(0x2b)));
3180
3181         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3182
3183         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3184                              SYNC_GRADIENT |
3185                              SYNC_WALKER |
3186                              SYNC_ALIGNER));
3187
3188         sx_debug_1 = RREG32(SX_DEBUG_1);
3189         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3190         WREG32(SX_DEBUG_1, sx_debug_1);
3191
3192
3193         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3194         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3195         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3196         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3197
3198         if (rdev->family <= CHIP_SUMO2)
3199                 WREG32(SMX_SAR_CTL0, 0x00010000);
3200
3201         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3202                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3203                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3204
3205         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3206                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3207                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3208
3209         WREG32(VGT_NUM_INSTANCES, 1);
3210         WREG32(SPI_CONFIG_CNTL, 0);
3211         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3212         WREG32(CP_PERFMON_CNTL, 0);
3213
3214         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3215                                   FETCH_FIFO_HIWATER(0x4) |
3216                                   DONE_FIFO_HIWATER(0xe0) |
3217                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3218
3219         sq_config = RREG32(SQ_CONFIG);
3220         sq_config &= ~(PS_PRIO(3) |
3221                        VS_PRIO(3) |
3222                        GS_PRIO(3) |
3223                        ES_PRIO(3));
3224         sq_config |= (VC_ENABLE |
3225                       EXPORT_SRC_C |
3226                       PS_PRIO(0) |
3227                       VS_PRIO(1) |
3228                       GS_PRIO(2) |
3229                       ES_PRIO(3));
3230
3231         switch (rdev->family) {
3232         case CHIP_CEDAR:
3233         case CHIP_PALM:
3234         case CHIP_SUMO:
3235         case CHIP_SUMO2:
3236         case CHIP_CAICOS:
3237                 /* no vertex cache */
3238                 sq_config &= ~VC_ENABLE;
3239                 break;
3240         default:
3241                 break;
3242         }
3243
3244         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3245
3246         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3247         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3248         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3249         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3250         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3251         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3252         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3253
3254         switch (rdev->family) {
3255         case CHIP_CEDAR:
3256         case CHIP_PALM:
3257         case CHIP_SUMO:
3258         case CHIP_SUMO2:
3259                 ps_thread_count = 96;
3260                 break;
3261         default:
3262                 ps_thread_count = 128;
3263                 break;
3264         }
3265
3266         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3267         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3268         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3269         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3270         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3271         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3272
3273         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3274         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3275         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3276         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3277         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3278         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3279
3280         WREG32(SQ_CONFIG, sq_config);
3281         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3282         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3283         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3284         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3285         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3286         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3287         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3288         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3289         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3290         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3291
3292         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3293                                           FORCE_EOV_MAX_REZ_CNT(255)));
3294
3295         switch (rdev->family) {
3296         case CHIP_CEDAR:
3297         case CHIP_PALM:
3298         case CHIP_SUMO:
3299         case CHIP_SUMO2:
3300         case CHIP_CAICOS:
3301                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3302                 break;
3303         default:
3304                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3305                 break;
3306         }
3307         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3308         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3309
3310         WREG32(VGT_GS_VERTEX_REUSE, 16);
3311         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3312         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3313
3314         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3315         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3316
3317         WREG32(CB_PERF_CTR0_SEL_0, 0);
3318         WREG32(CB_PERF_CTR0_SEL_1, 0);
3319         WREG32(CB_PERF_CTR1_SEL_0, 0);
3320         WREG32(CB_PERF_CTR1_SEL_1, 0);
3321         WREG32(CB_PERF_CTR2_SEL_0, 0);
3322         WREG32(CB_PERF_CTR2_SEL_1, 0);
3323         WREG32(CB_PERF_CTR3_SEL_0, 0);
3324         WREG32(CB_PERF_CTR3_SEL_1, 0);
3325
3326         /* clear render buffer base addresses */
3327         WREG32(CB_COLOR0_BASE, 0);
3328         WREG32(CB_COLOR1_BASE, 0);
3329         WREG32(CB_COLOR2_BASE, 0);
3330         WREG32(CB_COLOR3_BASE, 0);
3331         WREG32(CB_COLOR4_BASE, 0);
3332         WREG32(CB_COLOR5_BASE, 0);
3333         WREG32(CB_COLOR6_BASE, 0);
3334         WREG32(CB_COLOR7_BASE, 0);
3335         WREG32(CB_COLOR8_BASE, 0);
3336         WREG32(CB_COLOR9_BASE, 0);
3337         WREG32(CB_COLOR10_BASE, 0);
3338         WREG32(CB_COLOR11_BASE, 0);
3339
3340         /* set the shader const cache sizes to 0 */
3341         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3342                 WREG32(i, 0);
3343         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3344                 WREG32(i, 0);
3345
3346         tmp = RREG32(HDP_MISC_CNTL);
3347         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3348         WREG32(HDP_MISC_CNTL, tmp);
3349
3350         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3351         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3352
3353         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3354
3355         udelay(50);
3356
3357 }
3358
3359 int evergreen_mc_init(struct radeon_device *rdev)
3360 {
3361         u32 tmp;
3362         int chansize, numchan;
3363
3364         /* Get VRAM informations */
3365         rdev->mc.vram_is_ddr = true;
3366         if ((rdev->family == CHIP_PALM) ||
3367             (rdev->family == CHIP_SUMO) ||
3368             (rdev->family == CHIP_SUMO2))
3369                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3370         else
3371                 tmp = RREG32(MC_ARB_RAMCFG);
3372         if (tmp & CHANSIZE_OVERRIDE) {
3373                 chansize = 16;
3374         } else if (tmp & CHANSIZE_MASK) {
3375                 chansize = 64;
3376         } else {
3377                 chansize = 32;
3378         }
3379         tmp = RREG32(MC_SHARED_CHMAP);
3380         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3381         case 0:
3382         default:
3383                 numchan = 1;
3384                 break;
3385         case 1:
3386                 numchan = 2;
3387                 break;
3388         case 2:
3389                 numchan = 4;
3390                 break;
3391         case 3:
3392                 numchan = 8;
3393                 break;
3394         }
3395         rdev->mc.vram_width = numchan * chansize;
3396         /* Could aper size report 0 ? */
3397         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3398         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3399         /* Setup GPU memory space */
3400         if ((rdev->family == CHIP_PALM) ||
3401             (rdev->family == CHIP_SUMO) ||
3402             (rdev->family == CHIP_SUMO2)) {
3403                 /* size in bytes on fusion */
3404                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3405                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3406         } else {
3407                 /* size in MB on evergreen/cayman/tn */
3408                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
3409                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
3410         }
3411         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3412         r700_vram_gtt_location(rdev, &rdev->mc);
3413         radeon_update_bandwidth_info(rdev);
3414
3415         return 0;
3416 }
3417
3418 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3419 {
3420         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3421                 RREG32(GRBM_STATUS));
3422         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3423                 RREG32(GRBM_STATUS_SE0));
3424         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3425                 RREG32(GRBM_STATUS_SE1));
3426         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3427                 RREG32(SRBM_STATUS));
3428         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3429                 RREG32(SRBM_STATUS2));
3430         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3431                 RREG32(CP_STALLED_STAT1));
3432         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3433                 RREG32(CP_STALLED_STAT2));
3434         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3435                 RREG32(CP_BUSY_STAT));
3436         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3437                 RREG32(CP_STAT));
3438         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3439                 RREG32(DMA_STATUS_REG));
3440         if (rdev->family >= CHIP_CAYMAN) {
3441                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3442                          RREG32(DMA_STATUS_REG + 0x800));
3443         }
3444 }
3445
3446 bool evergreen_is_display_hung(struct radeon_device *rdev)
3447 {
3448         u32 crtc_hung = 0;
3449         u32 crtc_status[6];
3450         u32 i, j, tmp;
3451
3452         for (i = 0; i < rdev->num_crtc; i++) {
3453                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3454                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3455                         crtc_hung |= (1 << i);
3456                 }
3457         }
3458
3459         for (j = 0; j < 10; j++) {
3460                 for (i = 0; i < rdev->num_crtc; i++) {
3461                         if (crtc_hung & (1 << i)) {
3462                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3463                                 if (tmp != crtc_status[i])
3464                                         crtc_hung &= ~(1 << i);
3465                         }
3466                 }
3467                 if (crtc_hung == 0)
3468                         return false;
3469                 udelay(100);
3470         }
3471
3472         return true;
3473 }
3474
3475 static u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3476 {
3477         u32 reset_mask = 0;
3478         u32 tmp;
3479
3480         /* GRBM_STATUS */
3481         tmp = RREG32(GRBM_STATUS);
3482         if (tmp & (PA_BUSY | SC_BUSY |
3483                    SH_BUSY | SX_BUSY |
3484                    TA_BUSY | VGT_BUSY |
3485                    DB_BUSY | CB_BUSY |
3486                    SPI_BUSY | VGT_BUSY_NO_DMA))
3487                 reset_mask |= RADEON_RESET_GFX;
3488
3489         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3490                    CP_BUSY | CP_COHERENCY_BUSY))
3491                 reset_mask |= RADEON_RESET_CP;
3492
3493         if (tmp & GRBM_EE_BUSY)
3494                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3495
3496         /* DMA_STATUS_REG */
3497         tmp = RREG32(DMA_STATUS_REG);
3498         if (!(tmp & DMA_IDLE))
3499                 reset_mask |= RADEON_RESET_DMA;
3500
3501         /* SRBM_STATUS2 */
3502         tmp = RREG32(SRBM_STATUS2);
3503         if (tmp & DMA_BUSY)
3504                 reset_mask |= RADEON_RESET_DMA;
3505
3506         /* SRBM_STATUS */
3507         tmp = RREG32(SRBM_STATUS);
3508         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3509                 reset_mask |= RADEON_RESET_RLC;
3510
3511         if (tmp & IH_BUSY)
3512                 reset_mask |= RADEON_RESET_IH;
3513
3514         if (tmp & SEM_BUSY)
3515                 reset_mask |= RADEON_RESET_SEM;
3516
3517         if (tmp & GRBM_RQ_PENDING)
3518                 reset_mask |= RADEON_RESET_GRBM;
3519
3520         if (tmp & VMC_BUSY)
3521                 reset_mask |= RADEON_RESET_VMC;
3522
3523         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3524                    MCC_BUSY | MCD_BUSY))
3525                 reset_mask |= RADEON_RESET_MC;
3526
3527         if (evergreen_is_display_hung(rdev))
3528                 reset_mask |= RADEON_RESET_DISPLAY;
3529
3530         /* VM_L2_STATUS */
3531         tmp = RREG32(VM_L2_STATUS);
3532         if (tmp & L2_BUSY)
3533                 reset_mask |= RADEON_RESET_VMC;
3534
3535         /* Skip MC reset as it's mostly likely not hung, just busy */
3536         if (reset_mask & RADEON_RESET_MC) {
3537                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3538                 reset_mask &= ~RADEON_RESET_MC;
3539         }
3540
3541         return reset_mask;
3542 }
3543
3544 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3545 {
3546         struct evergreen_mc_save save;
3547         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3548         u32 tmp;
3549
3550         if (reset_mask == 0)
3551                 return;
3552
3553         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3554
3555         evergreen_print_gpu_status_regs(rdev);
3556
3557         /* Disable CP parsing/prefetching */
3558         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3559
3560         if (reset_mask & RADEON_RESET_DMA) {
3561                 /* Disable DMA */
3562                 tmp = RREG32(DMA_RB_CNTL);
3563                 tmp &= ~DMA_RB_ENABLE;
3564                 WREG32(DMA_RB_CNTL, tmp);
3565         }
3566
3567         udelay(50);
3568
3569         evergreen_mc_stop(rdev, &save);
3570         if (evergreen_mc_wait_for_idle(rdev)) {
3571                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3572         }
3573
3574         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3575                 grbm_soft_reset |= SOFT_RESET_DB |
3576                         SOFT_RESET_CB |
3577                         SOFT_RESET_PA |
3578                         SOFT_RESET_SC |
3579                         SOFT_RESET_SPI |
3580                         SOFT_RESET_SX |
3581                         SOFT_RESET_SH |
3582                         SOFT_RESET_TC |
3583                         SOFT_RESET_TA |
3584                         SOFT_RESET_VC |
3585                         SOFT_RESET_VGT;
3586         }
3587
3588         if (reset_mask & RADEON_RESET_CP) {
3589                 grbm_soft_reset |= SOFT_RESET_CP |
3590                         SOFT_RESET_VGT;
3591
3592                 srbm_soft_reset |= SOFT_RESET_GRBM;
3593         }
3594
3595         if (reset_mask & RADEON_RESET_DMA)
3596                 srbm_soft_reset |= SOFT_RESET_DMA;
3597
3598         if (reset_mask & RADEON_RESET_DISPLAY)
3599                 srbm_soft_reset |= SOFT_RESET_DC;
3600
3601         if (reset_mask & RADEON_RESET_RLC)
3602                 srbm_soft_reset |= SOFT_RESET_RLC;
3603
3604         if (reset_mask & RADEON_RESET_SEM)
3605                 srbm_soft_reset |= SOFT_RESET_SEM;
3606
3607         if (reset_mask & RADEON_RESET_IH)
3608                 srbm_soft_reset |= SOFT_RESET_IH;
3609
3610         if (reset_mask & RADEON_RESET_GRBM)
3611                 srbm_soft_reset |= SOFT_RESET_GRBM;
3612
3613         if (reset_mask & RADEON_RESET_VMC)
3614                 srbm_soft_reset |= SOFT_RESET_VMC;
3615
3616         if (!(rdev->flags & RADEON_IS_IGP)) {
3617                 if (reset_mask & RADEON_RESET_MC)
3618                         srbm_soft_reset |= SOFT_RESET_MC;
3619         }
3620
3621         if (grbm_soft_reset) {
3622                 tmp = RREG32(GRBM_SOFT_RESET);
3623                 tmp |= grbm_soft_reset;
3624                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3625                 WREG32(GRBM_SOFT_RESET, tmp);
3626                 tmp = RREG32(GRBM_SOFT_RESET);
3627
3628                 udelay(50);
3629
3630                 tmp &= ~grbm_soft_reset;
3631                 WREG32(GRBM_SOFT_RESET, tmp);
3632                 tmp = RREG32(GRBM_SOFT_RESET);
3633         }
3634
3635         if (srbm_soft_reset) {
3636                 tmp = RREG32(SRBM_SOFT_RESET);
3637                 tmp |= srbm_soft_reset;
3638                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3639                 WREG32(SRBM_SOFT_RESET, tmp);
3640                 tmp = RREG32(SRBM_SOFT_RESET);
3641
3642                 udelay(50);
3643
3644                 tmp &= ~srbm_soft_reset;
3645                 WREG32(SRBM_SOFT_RESET, tmp);
3646                 tmp = RREG32(SRBM_SOFT_RESET);
3647         }
3648
3649         /* Wait a little for things to settle down */
3650         udelay(50);
3651
3652         evergreen_mc_resume(rdev, &save);
3653         udelay(50);
3654
3655         evergreen_print_gpu_status_regs(rdev);
3656 }
3657
3658 int evergreen_asic_reset(struct radeon_device *rdev)
3659 {
3660         u32 reset_mask;
3661
3662         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3663
3664         if (reset_mask)
3665                 r600_set_bios_scratch_engine_hung(rdev, true);
3666
3667         evergreen_gpu_soft_reset(rdev, reset_mask);
3668
3669         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3670
3671         if (!reset_mask)
3672                 r600_set_bios_scratch_engine_hung(rdev, false);
3673
3674         return 0;
3675 }
3676
3677 /**
3678  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3679  *
3680  * @rdev: radeon_device pointer
3681  * @ring: radeon_ring structure holding ring information
3682  *
3683  * Check if the GFX engine is locked up.
3684  * Returns true if the engine appears to be locked up, false if not.
3685  */
3686 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3687 {
3688         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3689
3690         if (!(reset_mask & (RADEON_RESET_GFX |
3691                             RADEON_RESET_COMPUTE |
3692                             RADEON_RESET_CP))) {
3693                 radeon_ring_lockup_update(ring);
3694                 return false;
3695         }
3696         /* force CP activities */
3697         radeon_ring_force_activity(rdev, ring);
3698         return radeon_ring_test_lockup(rdev, ring);
3699 }
3700
3701 /**
3702  * evergreen_dma_is_lockup - Check if the DMA engine is locked up
3703  *
3704  * @rdev: radeon_device pointer
3705  * @ring: radeon_ring structure holding ring information
3706  *
3707  * Check if the async DMA engine is locked up.
3708  * Returns true if the engine appears to be locked up, false if not.
3709  */
3710 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3711 {
3712         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3713
3714         if (!(reset_mask & RADEON_RESET_DMA)) {
3715                 radeon_ring_lockup_update(ring);
3716                 return false;
3717         }
3718         /* force ring activities */
3719         radeon_ring_force_activity(rdev, ring);
3720         return radeon_ring_test_lockup(rdev, ring);
3721 }
3722
3723 /* Interrupts */
3724
3725 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
3726 {
3727         if (crtc >= rdev->num_crtc)
3728                 return 0;
3729         else
3730                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
3731 }
3732
3733 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
3734 {
3735         u32 tmp;
3736
3737         if (rdev->family >= CHIP_CAYMAN) {
3738                 cayman_cp_int_cntl_setup(rdev, 0,
3739                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3740                 cayman_cp_int_cntl_setup(rdev, 1, 0);
3741                 cayman_cp_int_cntl_setup(rdev, 2, 0);
3742                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3743                 WREG32(CAYMAN_DMA1_CNTL, tmp);
3744         } else
3745                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3746         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3747         WREG32(DMA_CNTL, tmp);
3748         WREG32(GRBM_INT_CNTL, 0);
3749         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3750         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3751         if (rdev->num_crtc >= 4) {
3752                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3753                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3754         }
3755         if (rdev->num_crtc >= 6) {
3756                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3757                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3758         }
3759
3760         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3761         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3762         if (rdev->num_crtc >= 4) {
3763                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3764                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3765         }
3766         if (rdev->num_crtc >= 6) {
3767                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3768                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3769         }
3770
3771         /* only one DAC on DCE6 */
3772         if (!ASIC_IS_DCE6(rdev))
3773                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3774         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3775
3776         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3777         WREG32(DC_HPD1_INT_CONTROL, tmp);
3778         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3779         WREG32(DC_HPD2_INT_CONTROL, tmp);
3780         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3781         WREG32(DC_HPD3_INT_CONTROL, tmp);
3782         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3783         WREG32(DC_HPD4_INT_CONTROL, tmp);
3784         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3785         WREG32(DC_HPD5_INT_CONTROL, tmp);
3786         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3787         WREG32(DC_HPD6_INT_CONTROL, tmp);
3788
3789 }
3790
3791 int evergreen_irq_set(struct radeon_device *rdev)
3792 {
3793         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3794         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
3795         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
3796         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
3797         u32 grbm_int_cntl = 0;
3798         u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
3799         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
3800         u32 dma_cntl, dma_cntl1 = 0;
3801
3802         if (!rdev->irq.installed) {
3803                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3804                 return -EINVAL;
3805         }
3806         /* don't enable anything if the ih is disabled */
3807         if (!rdev->ih.enabled) {
3808                 r600_disable_interrupts(rdev);
3809                 /* force the active interrupt state to all disabled */
3810                 evergreen_disable_interrupt_state(rdev);
3811                 return 0;
3812         }
3813
3814         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3815         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3816         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3817         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3818         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3819         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3820
3821         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3822         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3823         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3824         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3825         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3826         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3827
3828         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3829
3830         if (rdev->family >= CHIP_CAYMAN) {
3831                 /* enable CP interrupts on all rings */
3832                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3833                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3834                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3835                 }
3836                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
3837                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
3838                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
3839                 }
3840                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
3841                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
3842                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
3843                 }
3844         } else {
3845                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3846                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3847                         cp_int_cntl |= RB_INT_ENABLE;
3848                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3849                 }
3850         }
3851
3852         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3853                 DRM_DEBUG("r600_irq_set: sw int dma\n");
3854                 dma_cntl |= TRAP_ENABLE;
3855         }
3856
3857         if (rdev->family >= CHIP_CAYMAN) {
3858                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3859                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
3860                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
3861                         dma_cntl1 |= TRAP_ENABLE;
3862                 }
3863         }
3864
3865         if (rdev->irq.crtc_vblank_int[0] ||
3866             atomic_read(&rdev->irq.pflip[0])) {
3867                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
3868                 crtc1 |= VBLANK_INT_MASK;
3869         }
3870         if (rdev->irq.crtc_vblank_int[1] ||
3871             atomic_read(&rdev->irq.pflip[1])) {
3872                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
3873                 crtc2 |= VBLANK_INT_MASK;
3874         }
3875         if (rdev->irq.crtc_vblank_int[2] ||
3876             atomic_read(&rdev->irq.pflip[2])) {
3877                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
3878                 crtc3 |= VBLANK_INT_MASK;
3879         }
3880         if (rdev->irq.crtc_vblank_int[3] ||
3881             atomic_read(&rdev->irq.pflip[3])) {
3882                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
3883                 crtc4 |= VBLANK_INT_MASK;
3884         }
3885         if (rdev->irq.crtc_vblank_int[4] ||
3886             atomic_read(&rdev->irq.pflip[4])) {
3887                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
3888                 crtc5 |= VBLANK_INT_MASK;
3889         }
3890         if (rdev->irq.crtc_vblank_int[5] ||
3891             atomic_read(&rdev->irq.pflip[5])) {
3892                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
3893                 crtc6 |= VBLANK_INT_MASK;
3894         }
3895         if (rdev->irq.hpd[0]) {
3896                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
3897                 hpd1 |= DC_HPDx_INT_EN;
3898         }
3899         if (rdev->irq.hpd[1]) {
3900                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
3901                 hpd2 |= DC_HPDx_INT_EN;
3902         }
3903         if (rdev->irq.hpd[2]) {
3904                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
3905                 hpd3 |= DC_HPDx_INT_EN;
3906         }
3907         if (rdev->irq.hpd[3]) {
3908                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
3909                 hpd4 |= DC_HPDx_INT_EN;
3910         }
3911         if (rdev->irq.hpd[4]) {
3912                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
3913                 hpd5 |= DC_HPDx_INT_EN;
3914         }
3915         if (rdev->irq.hpd[5]) {
3916                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
3917                 hpd6 |= DC_HPDx_INT_EN;
3918         }
3919         if (rdev->irq.afmt[0]) {
3920                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
3921                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3922         }
3923         if (rdev->irq.afmt[1]) {
3924                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
3925                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3926         }
3927         if (rdev->irq.afmt[2]) {
3928                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
3929                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3930         }
3931         if (rdev->irq.afmt[3]) {
3932                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
3933                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3934         }
3935         if (rdev->irq.afmt[4]) {
3936                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
3937                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3938         }
3939         if (rdev->irq.afmt[5]) {
3940                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
3941                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3942         }
3943
3944         if (rdev->family >= CHIP_CAYMAN) {
3945                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
3946                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
3947                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
3948         } else
3949                 WREG32(CP_INT_CNTL, cp_int_cntl);
3950
3951         WREG32(DMA_CNTL, dma_cntl);
3952
3953         if (rdev->family >= CHIP_CAYMAN)
3954                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
3955
3956         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3957
3958         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
3959         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
3960         if (rdev->num_crtc >= 4) {
3961                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
3962                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
3963         }
3964         if (rdev->num_crtc >= 6) {
3965                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
3966                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
3967         }
3968
3969         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
3970         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
3971         if (rdev->num_crtc >= 4) {
3972                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
3973                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
3974         }
3975         if (rdev->num_crtc >= 6) {
3976                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
3977                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
3978         }
3979
3980         WREG32(DC_HPD1_INT_CONTROL, hpd1);
3981         WREG32(DC_HPD2_INT_CONTROL, hpd2);
3982         WREG32(DC_HPD3_INT_CONTROL, hpd3);
3983         WREG32(DC_HPD4_INT_CONTROL, hpd4);
3984         WREG32(DC_HPD5_INT_CONTROL, hpd5);
3985         WREG32(DC_HPD6_INT_CONTROL, hpd6);
3986
3987         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
3988         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
3989         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
3990         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
3991         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
3992         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
3993
3994         return 0;
3995 }
3996
3997 static void evergreen_irq_ack(struct radeon_device *rdev)
3998 {
3999         u32 tmp;
4000
4001         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4002         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4003         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4004         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4005         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4006         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4007         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4008         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4009         if (rdev->num_crtc >= 4) {
4010                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4011                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4012         }
4013         if (rdev->num_crtc >= 6) {
4014                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4015                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4016         }
4017
4018         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4019         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4020         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4021         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4022         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4023         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4024
4025         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4026                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4027         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4028                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4029         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4030                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4031         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4032                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4033         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4034                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4035         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4036                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4037
4038         if (rdev->num_crtc >= 4) {
4039                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4040                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4041                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4042                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4043                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4044                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4045                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4046                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4047                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4048                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4049                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4050                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4051         }
4052
4053         if (rdev->num_crtc >= 6) {
4054                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4055                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4056                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4057                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4058                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4059                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4060                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4061                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4062                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4063                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4064                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4065                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4066         }
4067
4068         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4069                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4070                 tmp |= DC_HPDx_INT_ACK;
4071                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4072         }
4073         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4074                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4075                 tmp |= DC_HPDx_INT_ACK;
4076                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4077         }
4078         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4079                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4080                 tmp |= DC_HPDx_INT_ACK;
4081                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4082         }
4083         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4084                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4085                 tmp |= DC_HPDx_INT_ACK;
4086                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4087         }
4088         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4089                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4090                 tmp |= DC_HPDx_INT_ACK;
4091                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4092         }
4093         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4094                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4095                 tmp |= DC_HPDx_INT_ACK;
4096                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4097         }
4098         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4099                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4100                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4101                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4102         }
4103         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4104                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4105                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4106                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4107         }
4108         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4109                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4110                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4111                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4112         }
4113         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4114                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4115                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4116                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4117         }
4118         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4119                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4120                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4121                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4122         }
4123         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4124                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4125                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4126                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4127         }
4128 }
4129
4130 static void evergreen_irq_disable(struct radeon_device *rdev)
4131 {
4132         r600_disable_interrupts(rdev);
4133         /* Wait and acknowledge irq */
4134         mdelay(1);
4135         evergreen_irq_ack(rdev);
4136         evergreen_disable_interrupt_state(rdev);
4137 }
4138
4139 void evergreen_irq_suspend(struct radeon_device *rdev)
4140 {
4141         evergreen_irq_disable(rdev);
4142         r600_rlc_stop(rdev);
4143 }
4144
4145 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4146 {
4147         u32 wptr, tmp;
4148
4149         if (rdev->wb.enabled)
4150                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4151         else
4152                 wptr = RREG32(IH_RB_WPTR);
4153
4154         if (wptr & RB_OVERFLOW) {
4155                 /* When a ring buffer overflow happen start parsing interrupt
4156                  * from the last not overwritten vector (wptr + 16). Hopefully
4157                  * this should allow us to catchup.
4158                  */
4159                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4160                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4161                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4162                 tmp = RREG32(IH_RB_CNTL);
4163                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4164                 WREG32(IH_RB_CNTL, tmp);
4165         }
4166         return (wptr & rdev->ih.ptr_mask);
4167 }
4168
4169 int evergreen_irq_process(struct radeon_device *rdev)
4170 {
4171         u32 wptr;
4172         u32 rptr;
4173         u32 src_id, src_data;
4174         u32 ring_index;
4175         bool queue_hotplug = false;
4176         bool queue_hdmi = false;
4177
4178         if (!rdev->ih.enabled || rdev->shutdown)
4179                 return IRQ_NONE;
4180
4181         wptr = evergreen_get_ih_wptr(rdev);
4182
4183 restart_ih:
4184         /* is somebody else already processing irqs? */
4185         if (atomic_xchg(&rdev->ih.lock, 1))
4186                 return IRQ_NONE;
4187
4188         rptr = rdev->ih.rptr;
4189         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4190
4191         /* Order reading of wptr vs. reading of IH ring data */
4192         rmb();
4193
4194         /* display interrupts */
4195         evergreen_irq_ack(rdev);
4196
4197         while (rptr != wptr) {
4198                 /* wptr/rptr are in bytes! */
4199                 ring_index = rptr / 4;
4200                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4201                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4202
4203                 switch (src_id) {
4204                 case 1: /* D1 vblank/vline */
4205                         switch (src_data) {
4206                         case 0: /* D1 vblank */
4207                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4208                                         if (rdev->irq.crtc_vblank_int[0]) {
4209                                                 drm_handle_vblank(rdev->ddev, 0);
4210                                                 rdev->pm.vblank_sync = true;
4211                                                 wake_up(&rdev->irq.vblank_queue);
4212                                         }
4213                                         if (atomic_read(&rdev->irq.pflip[0]))
4214                                                 radeon_crtc_handle_flip(rdev, 0);
4215                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4216                                         DRM_DEBUG("IH: D1 vblank\n");
4217                                 }
4218                                 break;
4219                         case 1: /* D1 vline */
4220                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4221                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4222                                         DRM_DEBUG("IH: D1 vline\n");
4223                                 }
4224                                 break;
4225                         default:
4226                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4227                                 break;
4228                         }
4229                         break;
4230                 case 2: /* D2 vblank/vline */
4231                         switch (src_data) {
4232                         case 0: /* D2 vblank */
4233                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4234                                         if (rdev->irq.crtc_vblank_int[1]) {
4235                                                 drm_handle_vblank(rdev->ddev, 1);
4236                                                 rdev->pm.vblank_sync = true;
4237                                                 wake_up(&rdev->irq.vblank_queue);
4238                                         }
4239                                         if (atomic_read(&rdev->irq.pflip[1]))
4240                                                 radeon_crtc_handle_flip(rdev, 1);
4241                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4242                                         DRM_DEBUG("IH: D2 vblank\n");
4243                                 }
4244                                 break;
4245                         case 1: /* D2 vline */
4246                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4247                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4248                                         DRM_DEBUG("IH: D2 vline\n");
4249                                 }
4250                                 break;
4251                         default:
4252                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4253                                 break;
4254                         }
4255                         break;
4256                 case 3: /* D3 vblank/vline */
4257                         switch (src_data) {
4258                         case 0: /* D3 vblank */
4259                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4260                                         if (rdev->irq.crtc_vblank_int[2]) {
4261                                                 drm_handle_vblank(rdev->ddev, 2);
4262                                                 rdev->pm.vblank_sync = true;
4263                                                 wake_up(&rdev->irq.vblank_queue);
4264                                         }
4265                                         if (atomic_read(&rdev->irq.pflip[2]))
4266                                                 radeon_crtc_handle_flip(rdev, 2);
4267                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4268                                         DRM_DEBUG("IH: D3 vblank\n");
4269                                 }
4270                                 break;
4271                         case 1: /* D3 vline */
4272                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4273                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4274                                         DRM_DEBUG("IH: D3 vline\n");
4275                                 }
4276                                 break;
4277                         default:
4278                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4279                                 break;
4280                         }
4281                         break;
4282                 case 4: /* D4 vblank/vline */
4283                         switch (src_data) {
4284                         case 0: /* D4 vblank */
4285                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4286                                         if (rdev->irq.crtc_vblank_int[3]) {
4287                                                 drm_handle_vblank(rdev->ddev, 3);
4288                                                 rdev->pm.vblank_sync = true;
4289                                                 wake_up(&rdev->irq.vblank_queue);
4290                                         }
4291                                         if (atomic_read(&rdev->irq.pflip[3]))
4292                                                 radeon_crtc_handle_flip(rdev, 3);
4293                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4294                                         DRM_DEBUG("IH: D4 vblank\n");
4295                                 }
4296                                 break;
4297                         case 1: /* D4 vline */
4298                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4299                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4300                                         DRM_DEBUG("IH: D4 vline\n");
4301                                 }
4302                                 break;
4303                         default:
4304                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4305                                 break;
4306                         }
4307                         break;
4308                 case 5: /* D5 vblank/vline */
4309                         switch (src_data) {
4310                         case 0: /* D5 vblank */
4311                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4312                                         if (rdev->irq.crtc_vblank_int[4]) {
4313                                                 drm_handle_vblank(rdev->ddev, 4);
4314                                                 rdev->pm.vblank_sync = true;
4315                                                 wake_up(&rdev->irq.vblank_queue);
4316                                         }
4317                                         if (atomic_read(&rdev->irq.pflip[4]))
4318                                                 radeon_crtc_handle_flip(rdev, 4);
4319                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4320                                         DRM_DEBUG("IH: D5 vblank\n");
4321                                 }
4322                                 break;
4323                         case 1: /* D5 vline */
4324                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4325                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4326                                         DRM_DEBUG("IH: D5 vline\n");
4327                                 }
4328                                 break;
4329                         default:
4330                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4331                                 break;
4332                         }
4333                         break;
4334                 case 6: /* D6 vblank/vline */
4335                         switch (src_data) {
4336                         case 0: /* D6 vblank */
4337                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4338                                         if (rdev->irq.crtc_vblank_int[5]) {
4339                                                 drm_handle_vblank(rdev->ddev, 5);
4340                                                 rdev->pm.vblank_sync = true;
4341                                                 wake_up(&rdev->irq.vblank_queue);
4342                                         }
4343                                         if (atomic_read(&rdev->irq.pflip[5]))
4344                                                 radeon_crtc_handle_flip(rdev, 5);
4345                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4346                                         DRM_DEBUG("IH: D6 vblank\n");
4347                                 }
4348                                 break;
4349                         case 1: /* D6 vline */
4350                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4351                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4352                                         DRM_DEBUG("IH: D6 vline\n");
4353                                 }
4354                                 break;
4355                         default:
4356                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4357                                 break;
4358                         }
4359                         break;
4360                 case 42: /* HPD hotplug */
4361                         switch (src_data) {
4362                         case 0:
4363                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4364                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4365                                         queue_hotplug = true;
4366                                         DRM_DEBUG("IH: HPD1\n");
4367                                 }
4368                                 break;
4369                         case 1:
4370                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4371                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4372                                         queue_hotplug = true;
4373                                         DRM_DEBUG("IH: HPD2\n");
4374                                 }
4375                                 break;
4376                         case 2:
4377                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4378                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4379                                         queue_hotplug = true;
4380                                         DRM_DEBUG("IH: HPD3\n");
4381                                 }
4382                                 break;
4383                         case 3:
4384                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4385                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4386                                         queue_hotplug = true;
4387                                         DRM_DEBUG("IH: HPD4\n");
4388                                 }
4389                                 break;
4390                         case 4:
4391                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4392                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4393                                         queue_hotplug = true;
4394                                         DRM_DEBUG("IH: HPD5\n");
4395                                 }
4396                                 break;
4397                         case 5:
4398                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4399                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4400                                         queue_hotplug = true;
4401                                         DRM_DEBUG("IH: HPD6\n");
4402                                 }
4403                                 break;
4404                         default:
4405                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4406                                 break;
4407                         }
4408                         break;
4409                 case 44: /* hdmi */
4410                         switch (src_data) {
4411                         case 0:
4412                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4413                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4414                                         queue_hdmi = true;
4415                                         DRM_DEBUG("IH: HDMI0\n");
4416                                 }
4417                                 break;
4418                         case 1:
4419                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4420                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4421                                         queue_hdmi = true;
4422                                         DRM_DEBUG("IH: HDMI1\n");
4423                                 }
4424                                 break;
4425                         case 2:
4426                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4427                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4428                                         queue_hdmi = true;
4429                                         DRM_DEBUG("IH: HDMI2\n");
4430                                 }
4431                                 break;
4432                         case 3:
4433                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4434                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4435                                         queue_hdmi = true;
4436                                         DRM_DEBUG("IH: HDMI3\n");
4437                                 }
4438                                 break;
4439                         case 4:
4440                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4441                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4442                                         queue_hdmi = true;
4443                                         DRM_DEBUG("IH: HDMI4\n");
4444                                 }
4445                                 break;
4446                         case 5:
4447                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4448                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4449                                         queue_hdmi = true;
4450                                         DRM_DEBUG("IH: HDMI5\n");
4451                                 }
4452                                 break;
4453                         default:
4454                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4455                                 break;
4456                         }
4457                 case 124: /* UVD */
4458                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4459                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4460                         break;
4461                 case 146:
4462                 case 147:
4463                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4464                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
4465                                 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
4466                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4467                                 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
4468                         /* reset addr and status */
4469                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4470                         break;
4471                 case 176: /* CP_INT in ring buffer */
4472                 case 177: /* CP_INT in IB1 */
4473                 case 178: /* CP_INT in IB2 */
4474                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4475                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4476                         break;
4477                 case 181: /* CP EOP event */
4478                         DRM_DEBUG("IH: CP EOP\n");
4479                         if (rdev->family >= CHIP_CAYMAN) {
4480                                 switch (src_data) {
4481                                 case 0:
4482                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4483                                         break;
4484                                 case 1:
4485                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4486                                         break;
4487                                 case 2:
4488                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4489                                         break;
4490                                 }
4491                         } else
4492                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4493                         break;
4494                 case 224: /* DMA trap event */
4495                         DRM_DEBUG("IH: DMA trap\n");
4496                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4497                         break;
4498                 case 233: /* GUI IDLE */
4499                         DRM_DEBUG("IH: GUI idle\n");
4500                         break;
4501                 case 244: /* DMA trap event */
4502                         if (rdev->family >= CHIP_CAYMAN) {
4503                                 DRM_DEBUG("IH: DMA1 trap\n");
4504                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4505                         }
4506                         break;
4507                 default:
4508                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4509                         break;
4510                 }
4511
4512                 /* wptr/rptr are in bytes! */
4513                 rptr += 16;
4514                 rptr &= rdev->ih.ptr_mask;
4515         }
4516         if (queue_hotplug)
4517                 schedule_work(&rdev->hotplug_work);
4518         if (queue_hdmi)
4519                 schedule_work(&rdev->audio_work);
4520         rdev->ih.rptr = rptr;
4521         WREG32(IH_RB_RPTR, rdev->ih.rptr);
4522         atomic_set(&rdev->ih.lock, 0);
4523
4524         /* make sure wptr hasn't changed while processing */
4525         wptr = evergreen_get_ih_wptr(rdev);
4526         if (wptr != rptr)
4527                 goto restart_ih;
4528
4529         return IRQ_HANDLED;
4530 }
4531
4532 /**
4533  * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
4534  *
4535  * @rdev: radeon_device pointer
4536  * @fence: radeon fence object
4537  *
4538  * Add a DMA fence packet to the ring to write
4539  * the fence seq number and DMA trap packet to generate
4540  * an interrupt if needed (evergreen-SI).
4541  */
4542 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
4543                                    struct radeon_fence *fence)
4544 {
4545         struct radeon_ring *ring = &rdev->ring[fence->ring];
4546         u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
4547         /* write the fence */
4548         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
4549         radeon_ring_write(ring, addr & 0xfffffffc);
4550         radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
4551         radeon_ring_write(ring, fence->seq);
4552         /* generate an interrupt */
4553         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
4554         /* flush HDP */
4555         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
4556         radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
4557         radeon_ring_write(ring, 1);
4558 }
4559
4560 /**
4561  * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
4562  *
4563  * @rdev: radeon_device pointer
4564  * @ib: IB object to schedule
4565  *
4566  * Schedule an IB in the DMA ring (evergreen).
4567  */
4568 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
4569                                    struct radeon_ib *ib)
4570 {
4571         struct radeon_ring *ring = &rdev->ring[ib->ring];
4572
4573         if (rdev->wb.enabled) {
4574                 u32 next_rptr = ring->wptr + 4;
4575                 while ((next_rptr & 7) != 5)
4576                         next_rptr++;
4577                 next_rptr += 3;
4578                 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
4579                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
4580                 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
4581                 radeon_ring_write(ring, next_rptr);
4582         }
4583
4584         /* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
4585          * Pad as necessary with NOPs.
4586          */
4587         while ((ring->wptr & 7) != 5)
4588                 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4589         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
4590         radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
4591         radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
4592
4593 }
4594
4595 /**
4596  * evergreen_copy_dma - copy pages using the DMA engine
4597  *
4598  * @rdev: radeon_device pointer
4599  * @src_offset: src GPU address
4600  * @dst_offset: dst GPU address
4601  * @num_gpu_pages: number of GPU pages to xfer
4602  * @fence: radeon fence object
4603  *
4604  * Copy GPU paging using the DMA engine (evergreen-cayman).
4605  * Used by the radeon ttm implementation to move pages if
4606  * registered as the asic copy callback.
4607  */
4608 int evergreen_copy_dma(struct radeon_device *rdev,
4609                        uint64_t src_offset, uint64_t dst_offset,
4610                        unsigned num_gpu_pages,
4611                        struct radeon_fence **fence)
4612 {
4613         struct radeon_semaphore *sem = NULL;
4614         int ring_index = rdev->asic->copy.dma_ring_index;
4615         struct radeon_ring *ring = &rdev->ring[ring_index];
4616         u32 size_in_dw, cur_size_in_dw;
4617         int i, num_loops;
4618         int r = 0;
4619
4620         r = radeon_semaphore_create(rdev, &sem);
4621         if (r) {
4622                 DRM_ERROR("radeon: moving bo (%d).\n", r);
4623                 return r;
4624         }
4625
4626         size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
4627         num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
4628         r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
4629         if (r) {
4630                 DRM_ERROR("radeon: moving bo (%d).\n", r);
4631                 radeon_semaphore_free(rdev, &sem, NULL);
4632                 return r;
4633         }
4634
4635         if (radeon_fence_need_sync(*fence, ring->idx)) {
4636                 radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
4637                                             ring->idx);
4638                 radeon_fence_note_sync(*fence, ring->idx);
4639         } else {
4640                 radeon_semaphore_free(rdev, &sem, NULL);
4641         }
4642
4643         for (i = 0; i < num_loops; i++) {
4644                 cur_size_in_dw = size_in_dw;
4645                 if (cur_size_in_dw > 0xFFFFF)
4646                         cur_size_in_dw = 0xFFFFF;
4647                 size_in_dw -= cur_size_in_dw;
4648                 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
4649                 radeon_ring_write(ring, dst_offset & 0xfffffffc);
4650                 radeon_ring_write(ring, src_offset & 0xfffffffc);
4651                 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
4652                 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
4653                 src_offset += cur_size_in_dw * 4;
4654                 dst_offset += cur_size_in_dw * 4;
4655         }
4656
4657         r = radeon_fence_emit(rdev, fence, ring->idx);
4658         if (r) {
4659                 radeon_ring_unlock_undo(rdev, ring);
4660                 return r;
4661         }
4662
4663         radeon_ring_unlock_commit(rdev, ring);
4664         radeon_semaphore_free(rdev, &sem, *fence);
4665
4666         return r;
4667 }
4668
4669 static int evergreen_startup(struct radeon_device *rdev)
4670 {
4671         struct radeon_ring *ring;
4672         int r;
4673
4674         /* enable pcie gen2 link */
4675         evergreen_pcie_gen2_enable(rdev);
4676
4677         if (ASIC_IS_DCE5(rdev)) {
4678                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
4679                         r = ni_init_microcode(rdev);
4680                         if (r) {
4681                                 DRM_ERROR("Failed to load firmware!\n");
4682                                 return r;
4683                         }
4684                 }
4685                 r = ni_mc_load_microcode(rdev);
4686                 if (r) {
4687                         DRM_ERROR("Failed to load MC firmware!\n");
4688                         return r;
4689                 }
4690         } else {
4691                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
4692                         r = r600_init_microcode(rdev);
4693                         if (r) {
4694                                 DRM_ERROR("Failed to load firmware!\n");
4695                                 return r;
4696                         }
4697                 }
4698         }
4699
4700         r = r600_vram_scratch_init(rdev);
4701         if (r)
4702                 return r;
4703
4704         evergreen_mc_program(rdev);
4705         if (rdev->flags & RADEON_IS_AGP) {
4706                 evergreen_agp_enable(rdev);
4707         } else {
4708                 r = evergreen_pcie_gart_enable(rdev);
4709                 if (r)
4710                         return r;
4711         }
4712         evergreen_gpu_init(rdev);
4713
4714         r = evergreen_blit_init(rdev);
4715         if (r) {
4716                 r600_blit_fini(rdev);
4717                 rdev->asic->copy.copy = NULL;
4718                 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
4719         }
4720
4721         /* allocate wb buffer */
4722         r = radeon_wb_init(rdev);
4723         if (r)
4724                 return r;
4725
4726         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
4727         if (r) {
4728                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
4729                 return r;
4730         }
4731
4732         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
4733         if (r) {
4734                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
4735                 return r;
4736         }
4737
4738         r = rv770_uvd_resume(rdev);
4739         if (!r) {
4740                 r = radeon_fence_driver_start_ring(rdev,
4741                                                    R600_RING_TYPE_UVD_INDEX);
4742                 if (r)
4743                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
4744         }
4745
4746         if (r)
4747                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4748
4749         /* Enable IRQ */
4750         r = r600_irq_init(rdev);
4751         if (r) {
4752                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
4753                 radeon_irq_kms_fini(rdev);
4754                 return r;
4755         }
4756         evergreen_irq_set(rdev);
4757
4758         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
4759         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
4760                              R600_CP_RB_RPTR, R600_CP_RB_WPTR,
4761                              0, 0xfffff, RADEON_CP_PACKET2);
4762         if (r)
4763                 return r;
4764
4765         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
4766         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
4767                              DMA_RB_RPTR, DMA_RB_WPTR,
4768                              2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4769         if (r)
4770                 return r;
4771
4772         r = evergreen_cp_load_microcode(rdev);
4773         if (r)
4774                 return r;
4775         r = evergreen_cp_resume(rdev);
4776         if (r)
4777                 return r;
4778         r = r600_dma_resume(rdev);
4779         if (r)
4780                 return r;
4781
4782         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4783         if (ring->ring_size) {
4784                 r = radeon_ring_init(rdev, ring, ring->ring_size,
4785                                      R600_WB_UVD_RPTR_OFFSET,
4786                                      UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
4787                                      0, 0xfffff, RADEON_CP_PACKET2);
4788                 if (!r)
4789                         r = r600_uvd_init(rdev);
4790
4791                 if (r)
4792                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
4793         }
4794
4795         r = radeon_ib_pool_init(rdev);
4796         if (r) {
4797                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
4798                 return r;
4799         }
4800
4801         r = r600_audio_init(rdev);
4802         if (r) {
4803                 DRM_ERROR("radeon: audio init failed\n");
4804                 return r;
4805         }
4806
4807         return 0;
4808 }
4809
4810 int evergreen_resume(struct radeon_device *rdev)
4811 {
4812         int r;
4813
4814         /* reset the asic, the gfx blocks are often in a bad state
4815          * after the driver is unloaded or after a resume
4816          */
4817         if (radeon_asic_reset(rdev))
4818                 dev_warn(rdev->dev, "GPU reset failed !\n");
4819         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
4820          * posting will perform necessary task to bring back GPU into good
4821          * shape.
4822          */
4823         /* post card */
4824         atom_asic_init(rdev->mode_info.atom_context);
4825
4826         /* init golden registers */
4827         evergreen_init_golden_registers(rdev);
4828
4829         rdev->accel_working = true;
4830         r = evergreen_startup(rdev);
4831         if (r) {
4832                 DRM_ERROR("evergreen startup failed on resume\n");
4833                 rdev->accel_working = false;
4834                 return r;
4835         }
4836
4837         return r;
4838
4839 }
4840
4841 int evergreen_suspend(struct radeon_device *rdev)
4842 {
4843         r600_audio_fini(rdev);
4844         radeon_uvd_suspend(rdev);
4845         r700_cp_stop(rdev);
4846         r600_dma_stop(rdev);
4847         r600_uvd_rbc_stop(rdev);
4848         evergreen_irq_suspend(rdev);
4849         radeon_wb_disable(rdev);
4850         evergreen_pcie_gart_disable(rdev);
4851
4852         return 0;
4853 }
4854
4855 /* Plan is to move initialization in that function and use
4856  * helper function so that radeon_device_init pretty much
4857  * do nothing more than calling asic specific function. This
4858  * should also allow to remove a bunch of callback function
4859  * like vram_info.
4860  */
4861 int evergreen_init(struct radeon_device *rdev)
4862 {
4863         int r;
4864
4865         /* Read BIOS */
4866         if (!radeon_get_bios(rdev)) {
4867                 if (ASIC_IS_AVIVO(rdev))
4868                         return -EINVAL;
4869         }
4870         /* Must be an ATOMBIOS */
4871         if (!rdev->is_atom_bios) {
4872                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
4873                 return -EINVAL;
4874         }
4875         r = radeon_atombios_init(rdev);
4876         if (r)
4877                 return r;
4878         /* reset the asic, the gfx blocks are often in a bad state
4879          * after the driver is unloaded or after a resume
4880          */
4881         if (radeon_asic_reset(rdev))
4882                 dev_warn(rdev->dev, "GPU reset failed !\n");
4883         /* Post card if necessary */
4884         if (!radeon_card_posted(rdev)) {
4885                 if (!rdev->bios) {
4886                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
4887                         return -EINVAL;
4888                 }
4889                 DRM_INFO("GPU not posted. posting now...\n");
4890                 atom_asic_init(rdev->mode_info.atom_context);
4891         }
4892         /* init golden registers */
4893         evergreen_init_golden_registers(rdev);
4894         /* Initialize scratch registers */
4895         r600_scratch_init(rdev);
4896         /* Initialize surface registers */
4897         radeon_surface_init(rdev);
4898         /* Initialize clocks */
4899         radeon_get_clock_info(rdev->ddev);
4900         /* Fence driver */
4901         r = radeon_fence_driver_init(rdev);
4902         if (r)
4903                 return r;
4904         /* initialize AGP */
4905         if (rdev->flags & RADEON_IS_AGP) {
4906                 r = radeon_agp_init(rdev);
4907                 if (r)
4908                         radeon_agp_disable(rdev);
4909         }
4910         /* initialize memory controller */
4911         r = evergreen_mc_init(rdev);
4912         if (r)
4913                 return r;
4914         /* Memory manager */
4915         r = radeon_bo_init(rdev);
4916         if (r)
4917                 return r;
4918
4919         r = radeon_irq_kms_init(rdev);
4920         if (r)
4921                 return r;
4922
4923         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
4924         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
4925
4926         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
4927         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
4928
4929         r = radeon_uvd_init(rdev);
4930         if (!r) {
4931                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4932                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
4933                                4096);
4934         }
4935
4936         rdev->ih.ring_obj = NULL;
4937         r600_ih_ring_init(rdev, 64 * 1024);
4938
4939         r = r600_pcie_gart_init(rdev);
4940         if (r)
4941                 return r;
4942
4943         rdev->accel_working = true;
4944         r = evergreen_startup(rdev);
4945         if (r) {
4946                 dev_err(rdev->dev, "disabling GPU acceleration\n");
4947                 r700_cp_fini(rdev);
4948                 r600_dma_fini(rdev);
4949                 r600_irq_fini(rdev);
4950                 radeon_wb_fini(rdev);
4951                 radeon_ib_pool_fini(rdev);
4952                 radeon_irq_kms_fini(rdev);
4953                 evergreen_pcie_gart_fini(rdev);
4954                 rdev->accel_working = false;
4955         }
4956
4957         /* Don't start up if the MC ucode is missing on BTC parts.
4958          * The default clocks and voltages before the MC ucode
4959          * is loaded are not suffient for advanced operations.
4960          */
4961         if (ASIC_IS_DCE5(rdev)) {
4962                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
4963                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
4964                         return -EINVAL;
4965                 }
4966         }
4967
4968         return 0;
4969 }
4970
4971 void evergreen_fini(struct radeon_device *rdev)
4972 {
4973         r600_audio_fini(rdev);
4974         r600_blit_fini(rdev);
4975         r700_cp_fini(rdev);
4976         r600_dma_fini(rdev);
4977         r600_irq_fini(rdev);
4978         radeon_wb_fini(rdev);
4979         radeon_ib_pool_fini(rdev);
4980         radeon_irq_kms_fini(rdev);
4981         evergreen_pcie_gart_fini(rdev);
4982         radeon_uvd_fini(rdev);
4983         r600_vram_scratch_fini(rdev);
4984         radeon_gem_fini(rdev);
4985         radeon_fence_driver_fini(rdev);
4986         radeon_agp_fini(rdev);
4987         radeon_bo_fini(rdev);
4988         radeon_atombios_fini(rdev);
4989         kfree(rdev->bios);
4990         rdev->bios = NULL;
4991 }
4992
4993 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
4994 {
4995         u32 link_width_cntl, speed_cntl, mask;
4996         int ret;
4997
4998         if (radeon_pcie_gen2 == 0)
4999                 return;
5000
5001         if (rdev->flags & RADEON_IS_IGP)
5002                 return;
5003
5004         if (!(rdev->flags & RADEON_IS_PCIE))
5005                 return;
5006
5007         /* x2 cards have a special sequence */
5008         if (ASIC_IS_X2(rdev))
5009                 return;
5010
5011         ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
5012         if (ret != 0)
5013                 return;
5014
5015         if (!(mask & DRM_PCIE_SPEED_50))
5016                 return;
5017
5018         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5019         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5020                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5021                 return;
5022         }
5023
5024         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5025
5026         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5027             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5028
5029                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5030                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5031                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5032
5033                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5034                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5035                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5036
5037                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5038                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5039                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5040
5041                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5042                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5043                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5044
5045                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5046                 speed_cntl |= LC_GEN2_EN_STRAP;
5047                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5048
5049         } else {
5050                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5051                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5052                 if (1)
5053                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5054                 else
5055                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5056                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5057         }
5058 }