Merge remote-tracking branch 'lsk/v3.10/topic/gator' into linux-linaro-lsk
[firefly-linux-kernel-4.4.55.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36
37 #define EVERGREEN_PFP_UCODE_SIZE 1120
38 #define EVERGREEN_PM4_UCODE_SIZE 1376
39
40 static const u32 crtc_offsets[6] =
41 {
42         EVERGREEN_CRTC0_REGISTER_OFFSET,
43         EVERGREEN_CRTC1_REGISTER_OFFSET,
44         EVERGREEN_CRTC2_REGISTER_OFFSET,
45         EVERGREEN_CRTC3_REGISTER_OFFSET,
46         EVERGREEN_CRTC4_REGISTER_OFFSET,
47         EVERGREEN_CRTC5_REGISTER_OFFSET
48 };
49
50 static void evergreen_gpu_init(struct radeon_device *rdev);
51 void evergreen_fini(struct radeon_device *rdev);
52 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
53 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
54                                      int ring, u32 cp_int_cntl);
55
56 static const u32 evergreen_golden_registers[] =
57 {
58         0x3f90, 0xffff0000, 0xff000000,
59         0x9148, 0xffff0000, 0xff000000,
60         0x3f94, 0xffff0000, 0xff000000,
61         0x914c, 0xffff0000, 0xff000000,
62         0x9b7c, 0xffffffff, 0x00000000,
63         0x8a14, 0xffffffff, 0x00000007,
64         0x8b10, 0xffffffff, 0x00000000,
65         0x960c, 0xffffffff, 0x54763210,
66         0x88c4, 0xffffffff, 0x000000c2,
67         0x88d4, 0xffffffff, 0x00000010,
68         0x8974, 0xffffffff, 0x00000000,
69         0xc78, 0x00000080, 0x00000080,
70         0x5eb4, 0xffffffff, 0x00000002,
71         0x5e78, 0xffffffff, 0x001000f0,
72         0x6104, 0x01000300, 0x00000000,
73         0x5bc0, 0x00300000, 0x00000000,
74         0x7030, 0xffffffff, 0x00000011,
75         0x7c30, 0xffffffff, 0x00000011,
76         0x10830, 0xffffffff, 0x00000011,
77         0x11430, 0xffffffff, 0x00000011,
78         0x12030, 0xffffffff, 0x00000011,
79         0x12c30, 0xffffffff, 0x00000011,
80         0xd02c, 0xffffffff, 0x08421000,
81         0x240c, 0xffffffff, 0x00000380,
82         0x8b24, 0xffffffff, 0x00ff0fff,
83         0x28a4c, 0x06000000, 0x06000000,
84         0x10c, 0x00000001, 0x00000001,
85         0x8d00, 0xffffffff, 0x100e4848,
86         0x8d04, 0xffffffff, 0x00164745,
87         0x8c00, 0xffffffff, 0xe4000003,
88         0x8c04, 0xffffffff, 0x40600060,
89         0x8c08, 0xffffffff, 0x001c001c,
90         0x8cf0, 0xffffffff, 0x08e00620,
91         0x8c20, 0xffffffff, 0x00800080,
92         0x8c24, 0xffffffff, 0x00800080,
93         0x8c18, 0xffffffff, 0x20202078,
94         0x8c1c, 0xffffffff, 0x00001010,
95         0x28350, 0xffffffff, 0x00000000,
96         0xa008, 0xffffffff, 0x00010000,
97         0x5c4, 0xffffffff, 0x00000001,
98         0x9508, 0xffffffff, 0x00000002,
99         0x913c, 0x0000000f, 0x0000000a
100 };
101
102 static const u32 evergreen_golden_registers2[] =
103 {
104         0x2f4c, 0xffffffff, 0x00000000,
105         0x54f4, 0xffffffff, 0x00000000,
106         0x54f0, 0xffffffff, 0x00000000,
107         0x5498, 0xffffffff, 0x00000000,
108         0x549c, 0xffffffff, 0x00000000,
109         0x5494, 0xffffffff, 0x00000000,
110         0x53cc, 0xffffffff, 0x00000000,
111         0x53c8, 0xffffffff, 0x00000000,
112         0x53c4, 0xffffffff, 0x00000000,
113         0x53c0, 0xffffffff, 0x00000000,
114         0x53bc, 0xffffffff, 0x00000000,
115         0x53b8, 0xffffffff, 0x00000000,
116         0x53b4, 0xffffffff, 0x00000000,
117         0x53b0, 0xffffffff, 0x00000000
118 };
119
120 static const u32 cypress_mgcg_init[] =
121 {
122         0x802c, 0xffffffff, 0xc0000000,
123         0x5448, 0xffffffff, 0x00000100,
124         0x55e4, 0xffffffff, 0x00000100,
125         0x160c, 0xffffffff, 0x00000100,
126         0x5644, 0xffffffff, 0x00000100,
127         0xc164, 0xffffffff, 0x00000100,
128         0x8a18, 0xffffffff, 0x00000100,
129         0x897c, 0xffffffff, 0x06000100,
130         0x8b28, 0xffffffff, 0x00000100,
131         0x9144, 0xffffffff, 0x00000100,
132         0x9a60, 0xffffffff, 0x00000100,
133         0x9868, 0xffffffff, 0x00000100,
134         0x8d58, 0xffffffff, 0x00000100,
135         0x9510, 0xffffffff, 0x00000100,
136         0x949c, 0xffffffff, 0x00000100,
137         0x9654, 0xffffffff, 0x00000100,
138         0x9030, 0xffffffff, 0x00000100,
139         0x9034, 0xffffffff, 0x00000100,
140         0x9038, 0xffffffff, 0x00000100,
141         0x903c, 0xffffffff, 0x00000100,
142         0x9040, 0xffffffff, 0x00000100,
143         0xa200, 0xffffffff, 0x00000100,
144         0xa204, 0xffffffff, 0x00000100,
145         0xa208, 0xffffffff, 0x00000100,
146         0xa20c, 0xffffffff, 0x00000100,
147         0x971c, 0xffffffff, 0x00000100,
148         0x977c, 0xffffffff, 0x00000100,
149         0x3f80, 0xffffffff, 0x00000100,
150         0xa210, 0xffffffff, 0x00000100,
151         0xa214, 0xffffffff, 0x00000100,
152         0x4d8, 0xffffffff, 0x00000100,
153         0x9784, 0xffffffff, 0x00000100,
154         0x9698, 0xffffffff, 0x00000100,
155         0x4d4, 0xffffffff, 0x00000200,
156         0x30cc, 0xffffffff, 0x00000100,
157         0xd0c0, 0xffffffff, 0xff000100,
158         0x802c, 0xffffffff, 0x40000000,
159         0x915c, 0xffffffff, 0x00010000,
160         0x9160, 0xffffffff, 0x00030002,
161         0x9178, 0xffffffff, 0x00070000,
162         0x917c, 0xffffffff, 0x00030002,
163         0x9180, 0xffffffff, 0x00050004,
164         0x918c, 0xffffffff, 0x00010006,
165         0x9190, 0xffffffff, 0x00090008,
166         0x9194, 0xffffffff, 0x00070000,
167         0x9198, 0xffffffff, 0x00030002,
168         0x919c, 0xffffffff, 0x00050004,
169         0x91a8, 0xffffffff, 0x00010006,
170         0x91ac, 0xffffffff, 0x00090008,
171         0x91b0, 0xffffffff, 0x00070000,
172         0x91b4, 0xffffffff, 0x00030002,
173         0x91b8, 0xffffffff, 0x00050004,
174         0x91c4, 0xffffffff, 0x00010006,
175         0x91c8, 0xffffffff, 0x00090008,
176         0x91cc, 0xffffffff, 0x00070000,
177         0x91d0, 0xffffffff, 0x00030002,
178         0x91d4, 0xffffffff, 0x00050004,
179         0x91e0, 0xffffffff, 0x00010006,
180         0x91e4, 0xffffffff, 0x00090008,
181         0x91e8, 0xffffffff, 0x00000000,
182         0x91ec, 0xffffffff, 0x00070000,
183         0x91f0, 0xffffffff, 0x00030002,
184         0x91f4, 0xffffffff, 0x00050004,
185         0x9200, 0xffffffff, 0x00010006,
186         0x9204, 0xffffffff, 0x00090008,
187         0x9208, 0xffffffff, 0x00070000,
188         0x920c, 0xffffffff, 0x00030002,
189         0x9210, 0xffffffff, 0x00050004,
190         0x921c, 0xffffffff, 0x00010006,
191         0x9220, 0xffffffff, 0x00090008,
192         0x9224, 0xffffffff, 0x00070000,
193         0x9228, 0xffffffff, 0x00030002,
194         0x922c, 0xffffffff, 0x00050004,
195         0x9238, 0xffffffff, 0x00010006,
196         0x923c, 0xffffffff, 0x00090008,
197         0x9240, 0xffffffff, 0x00070000,
198         0x9244, 0xffffffff, 0x00030002,
199         0x9248, 0xffffffff, 0x00050004,
200         0x9254, 0xffffffff, 0x00010006,
201         0x9258, 0xffffffff, 0x00090008,
202         0x925c, 0xffffffff, 0x00070000,
203         0x9260, 0xffffffff, 0x00030002,
204         0x9264, 0xffffffff, 0x00050004,
205         0x9270, 0xffffffff, 0x00010006,
206         0x9274, 0xffffffff, 0x00090008,
207         0x9278, 0xffffffff, 0x00070000,
208         0x927c, 0xffffffff, 0x00030002,
209         0x9280, 0xffffffff, 0x00050004,
210         0x928c, 0xffffffff, 0x00010006,
211         0x9290, 0xffffffff, 0x00090008,
212         0x9294, 0xffffffff, 0x00000000,
213         0x929c, 0xffffffff, 0x00000001,
214         0x802c, 0xffffffff, 0x40010000,
215         0x915c, 0xffffffff, 0x00010000,
216         0x9160, 0xffffffff, 0x00030002,
217         0x9178, 0xffffffff, 0x00070000,
218         0x917c, 0xffffffff, 0x00030002,
219         0x9180, 0xffffffff, 0x00050004,
220         0x918c, 0xffffffff, 0x00010006,
221         0x9190, 0xffffffff, 0x00090008,
222         0x9194, 0xffffffff, 0x00070000,
223         0x9198, 0xffffffff, 0x00030002,
224         0x919c, 0xffffffff, 0x00050004,
225         0x91a8, 0xffffffff, 0x00010006,
226         0x91ac, 0xffffffff, 0x00090008,
227         0x91b0, 0xffffffff, 0x00070000,
228         0x91b4, 0xffffffff, 0x00030002,
229         0x91b8, 0xffffffff, 0x00050004,
230         0x91c4, 0xffffffff, 0x00010006,
231         0x91c8, 0xffffffff, 0x00090008,
232         0x91cc, 0xffffffff, 0x00070000,
233         0x91d0, 0xffffffff, 0x00030002,
234         0x91d4, 0xffffffff, 0x00050004,
235         0x91e0, 0xffffffff, 0x00010006,
236         0x91e4, 0xffffffff, 0x00090008,
237         0x91e8, 0xffffffff, 0x00000000,
238         0x91ec, 0xffffffff, 0x00070000,
239         0x91f0, 0xffffffff, 0x00030002,
240         0x91f4, 0xffffffff, 0x00050004,
241         0x9200, 0xffffffff, 0x00010006,
242         0x9204, 0xffffffff, 0x00090008,
243         0x9208, 0xffffffff, 0x00070000,
244         0x920c, 0xffffffff, 0x00030002,
245         0x9210, 0xffffffff, 0x00050004,
246         0x921c, 0xffffffff, 0x00010006,
247         0x9220, 0xffffffff, 0x00090008,
248         0x9224, 0xffffffff, 0x00070000,
249         0x9228, 0xffffffff, 0x00030002,
250         0x922c, 0xffffffff, 0x00050004,
251         0x9238, 0xffffffff, 0x00010006,
252         0x923c, 0xffffffff, 0x00090008,
253         0x9240, 0xffffffff, 0x00070000,
254         0x9244, 0xffffffff, 0x00030002,
255         0x9248, 0xffffffff, 0x00050004,
256         0x9254, 0xffffffff, 0x00010006,
257         0x9258, 0xffffffff, 0x00090008,
258         0x925c, 0xffffffff, 0x00070000,
259         0x9260, 0xffffffff, 0x00030002,
260         0x9264, 0xffffffff, 0x00050004,
261         0x9270, 0xffffffff, 0x00010006,
262         0x9274, 0xffffffff, 0x00090008,
263         0x9278, 0xffffffff, 0x00070000,
264         0x927c, 0xffffffff, 0x00030002,
265         0x9280, 0xffffffff, 0x00050004,
266         0x928c, 0xffffffff, 0x00010006,
267         0x9290, 0xffffffff, 0x00090008,
268         0x9294, 0xffffffff, 0x00000000,
269         0x929c, 0xffffffff, 0x00000001,
270         0x802c, 0xffffffff, 0xc0000000
271 };
272
273 static const u32 redwood_mgcg_init[] =
274 {
275         0x802c, 0xffffffff, 0xc0000000,
276         0x5448, 0xffffffff, 0x00000100,
277         0x55e4, 0xffffffff, 0x00000100,
278         0x160c, 0xffffffff, 0x00000100,
279         0x5644, 0xffffffff, 0x00000100,
280         0xc164, 0xffffffff, 0x00000100,
281         0x8a18, 0xffffffff, 0x00000100,
282         0x897c, 0xffffffff, 0x06000100,
283         0x8b28, 0xffffffff, 0x00000100,
284         0x9144, 0xffffffff, 0x00000100,
285         0x9a60, 0xffffffff, 0x00000100,
286         0x9868, 0xffffffff, 0x00000100,
287         0x8d58, 0xffffffff, 0x00000100,
288         0x9510, 0xffffffff, 0x00000100,
289         0x949c, 0xffffffff, 0x00000100,
290         0x9654, 0xffffffff, 0x00000100,
291         0x9030, 0xffffffff, 0x00000100,
292         0x9034, 0xffffffff, 0x00000100,
293         0x9038, 0xffffffff, 0x00000100,
294         0x903c, 0xffffffff, 0x00000100,
295         0x9040, 0xffffffff, 0x00000100,
296         0xa200, 0xffffffff, 0x00000100,
297         0xa204, 0xffffffff, 0x00000100,
298         0xa208, 0xffffffff, 0x00000100,
299         0xa20c, 0xffffffff, 0x00000100,
300         0x971c, 0xffffffff, 0x00000100,
301         0x977c, 0xffffffff, 0x00000100,
302         0x3f80, 0xffffffff, 0x00000100,
303         0xa210, 0xffffffff, 0x00000100,
304         0xa214, 0xffffffff, 0x00000100,
305         0x4d8, 0xffffffff, 0x00000100,
306         0x9784, 0xffffffff, 0x00000100,
307         0x9698, 0xffffffff, 0x00000100,
308         0x4d4, 0xffffffff, 0x00000200,
309         0x30cc, 0xffffffff, 0x00000100,
310         0xd0c0, 0xffffffff, 0xff000100,
311         0x802c, 0xffffffff, 0x40000000,
312         0x915c, 0xffffffff, 0x00010000,
313         0x9160, 0xffffffff, 0x00030002,
314         0x9178, 0xffffffff, 0x00070000,
315         0x917c, 0xffffffff, 0x00030002,
316         0x9180, 0xffffffff, 0x00050004,
317         0x918c, 0xffffffff, 0x00010006,
318         0x9190, 0xffffffff, 0x00090008,
319         0x9194, 0xffffffff, 0x00070000,
320         0x9198, 0xffffffff, 0x00030002,
321         0x919c, 0xffffffff, 0x00050004,
322         0x91a8, 0xffffffff, 0x00010006,
323         0x91ac, 0xffffffff, 0x00090008,
324         0x91b0, 0xffffffff, 0x00070000,
325         0x91b4, 0xffffffff, 0x00030002,
326         0x91b8, 0xffffffff, 0x00050004,
327         0x91c4, 0xffffffff, 0x00010006,
328         0x91c8, 0xffffffff, 0x00090008,
329         0x91cc, 0xffffffff, 0x00070000,
330         0x91d0, 0xffffffff, 0x00030002,
331         0x91d4, 0xffffffff, 0x00050004,
332         0x91e0, 0xffffffff, 0x00010006,
333         0x91e4, 0xffffffff, 0x00090008,
334         0x91e8, 0xffffffff, 0x00000000,
335         0x91ec, 0xffffffff, 0x00070000,
336         0x91f0, 0xffffffff, 0x00030002,
337         0x91f4, 0xffffffff, 0x00050004,
338         0x9200, 0xffffffff, 0x00010006,
339         0x9204, 0xffffffff, 0x00090008,
340         0x9294, 0xffffffff, 0x00000000,
341         0x929c, 0xffffffff, 0x00000001,
342         0x802c, 0xffffffff, 0xc0000000
343 };
344
345 static const u32 cedar_golden_registers[] =
346 {
347         0x3f90, 0xffff0000, 0xff000000,
348         0x9148, 0xffff0000, 0xff000000,
349         0x3f94, 0xffff0000, 0xff000000,
350         0x914c, 0xffff0000, 0xff000000,
351         0x9b7c, 0xffffffff, 0x00000000,
352         0x8a14, 0xffffffff, 0x00000007,
353         0x8b10, 0xffffffff, 0x00000000,
354         0x960c, 0xffffffff, 0x54763210,
355         0x88c4, 0xffffffff, 0x000000c2,
356         0x88d4, 0xffffffff, 0x00000000,
357         0x8974, 0xffffffff, 0x00000000,
358         0xc78, 0x00000080, 0x00000080,
359         0x5eb4, 0xffffffff, 0x00000002,
360         0x5e78, 0xffffffff, 0x001000f0,
361         0x6104, 0x01000300, 0x00000000,
362         0x5bc0, 0x00300000, 0x00000000,
363         0x7030, 0xffffffff, 0x00000011,
364         0x7c30, 0xffffffff, 0x00000011,
365         0x10830, 0xffffffff, 0x00000011,
366         0x11430, 0xffffffff, 0x00000011,
367         0xd02c, 0xffffffff, 0x08421000,
368         0x240c, 0xffffffff, 0x00000380,
369         0x8b24, 0xffffffff, 0x00ff0fff,
370         0x28a4c, 0x06000000, 0x06000000,
371         0x10c, 0x00000001, 0x00000001,
372         0x8d00, 0xffffffff, 0x100e4848,
373         0x8d04, 0xffffffff, 0x00164745,
374         0x8c00, 0xffffffff, 0xe4000003,
375         0x8c04, 0xffffffff, 0x40600060,
376         0x8c08, 0xffffffff, 0x001c001c,
377         0x8cf0, 0xffffffff, 0x08e00410,
378         0x8c20, 0xffffffff, 0x00800080,
379         0x8c24, 0xffffffff, 0x00800080,
380         0x8c18, 0xffffffff, 0x20202078,
381         0x8c1c, 0xffffffff, 0x00001010,
382         0x28350, 0xffffffff, 0x00000000,
383         0xa008, 0xffffffff, 0x00010000,
384         0x5c4, 0xffffffff, 0x00000001,
385         0x9508, 0xffffffff, 0x00000002
386 };
387
388 static const u32 cedar_mgcg_init[] =
389 {
390         0x802c, 0xffffffff, 0xc0000000,
391         0x5448, 0xffffffff, 0x00000100,
392         0x55e4, 0xffffffff, 0x00000100,
393         0x160c, 0xffffffff, 0x00000100,
394         0x5644, 0xffffffff, 0x00000100,
395         0xc164, 0xffffffff, 0x00000100,
396         0x8a18, 0xffffffff, 0x00000100,
397         0x897c, 0xffffffff, 0x06000100,
398         0x8b28, 0xffffffff, 0x00000100,
399         0x9144, 0xffffffff, 0x00000100,
400         0x9a60, 0xffffffff, 0x00000100,
401         0x9868, 0xffffffff, 0x00000100,
402         0x8d58, 0xffffffff, 0x00000100,
403         0x9510, 0xffffffff, 0x00000100,
404         0x949c, 0xffffffff, 0x00000100,
405         0x9654, 0xffffffff, 0x00000100,
406         0x9030, 0xffffffff, 0x00000100,
407         0x9034, 0xffffffff, 0x00000100,
408         0x9038, 0xffffffff, 0x00000100,
409         0x903c, 0xffffffff, 0x00000100,
410         0x9040, 0xffffffff, 0x00000100,
411         0xa200, 0xffffffff, 0x00000100,
412         0xa204, 0xffffffff, 0x00000100,
413         0xa208, 0xffffffff, 0x00000100,
414         0xa20c, 0xffffffff, 0x00000100,
415         0x971c, 0xffffffff, 0x00000100,
416         0x977c, 0xffffffff, 0x00000100,
417         0x3f80, 0xffffffff, 0x00000100,
418         0xa210, 0xffffffff, 0x00000100,
419         0xa214, 0xffffffff, 0x00000100,
420         0x4d8, 0xffffffff, 0x00000100,
421         0x9784, 0xffffffff, 0x00000100,
422         0x9698, 0xffffffff, 0x00000100,
423         0x4d4, 0xffffffff, 0x00000200,
424         0x30cc, 0xffffffff, 0x00000100,
425         0xd0c0, 0xffffffff, 0xff000100,
426         0x802c, 0xffffffff, 0x40000000,
427         0x915c, 0xffffffff, 0x00010000,
428         0x9178, 0xffffffff, 0x00050000,
429         0x917c, 0xffffffff, 0x00030002,
430         0x918c, 0xffffffff, 0x00010004,
431         0x9190, 0xffffffff, 0x00070006,
432         0x9194, 0xffffffff, 0x00050000,
433         0x9198, 0xffffffff, 0x00030002,
434         0x91a8, 0xffffffff, 0x00010004,
435         0x91ac, 0xffffffff, 0x00070006,
436         0x91e8, 0xffffffff, 0x00000000,
437         0x9294, 0xffffffff, 0x00000000,
438         0x929c, 0xffffffff, 0x00000001,
439         0x802c, 0xffffffff, 0xc0000000
440 };
441
442 static const u32 juniper_mgcg_init[] =
443 {
444         0x802c, 0xffffffff, 0xc0000000,
445         0x5448, 0xffffffff, 0x00000100,
446         0x55e4, 0xffffffff, 0x00000100,
447         0x160c, 0xffffffff, 0x00000100,
448         0x5644, 0xffffffff, 0x00000100,
449         0xc164, 0xffffffff, 0x00000100,
450         0x8a18, 0xffffffff, 0x00000100,
451         0x897c, 0xffffffff, 0x06000100,
452         0x8b28, 0xffffffff, 0x00000100,
453         0x9144, 0xffffffff, 0x00000100,
454         0x9a60, 0xffffffff, 0x00000100,
455         0x9868, 0xffffffff, 0x00000100,
456         0x8d58, 0xffffffff, 0x00000100,
457         0x9510, 0xffffffff, 0x00000100,
458         0x949c, 0xffffffff, 0x00000100,
459         0x9654, 0xffffffff, 0x00000100,
460         0x9030, 0xffffffff, 0x00000100,
461         0x9034, 0xffffffff, 0x00000100,
462         0x9038, 0xffffffff, 0x00000100,
463         0x903c, 0xffffffff, 0x00000100,
464         0x9040, 0xffffffff, 0x00000100,
465         0xa200, 0xffffffff, 0x00000100,
466         0xa204, 0xffffffff, 0x00000100,
467         0xa208, 0xffffffff, 0x00000100,
468         0xa20c, 0xffffffff, 0x00000100,
469         0x971c, 0xffffffff, 0x00000100,
470         0xd0c0, 0xffffffff, 0xff000100,
471         0x802c, 0xffffffff, 0x40000000,
472         0x915c, 0xffffffff, 0x00010000,
473         0x9160, 0xffffffff, 0x00030002,
474         0x9178, 0xffffffff, 0x00070000,
475         0x917c, 0xffffffff, 0x00030002,
476         0x9180, 0xffffffff, 0x00050004,
477         0x918c, 0xffffffff, 0x00010006,
478         0x9190, 0xffffffff, 0x00090008,
479         0x9194, 0xffffffff, 0x00070000,
480         0x9198, 0xffffffff, 0x00030002,
481         0x919c, 0xffffffff, 0x00050004,
482         0x91a8, 0xffffffff, 0x00010006,
483         0x91ac, 0xffffffff, 0x00090008,
484         0x91b0, 0xffffffff, 0x00070000,
485         0x91b4, 0xffffffff, 0x00030002,
486         0x91b8, 0xffffffff, 0x00050004,
487         0x91c4, 0xffffffff, 0x00010006,
488         0x91c8, 0xffffffff, 0x00090008,
489         0x91cc, 0xffffffff, 0x00070000,
490         0x91d0, 0xffffffff, 0x00030002,
491         0x91d4, 0xffffffff, 0x00050004,
492         0x91e0, 0xffffffff, 0x00010006,
493         0x91e4, 0xffffffff, 0x00090008,
494         0x91e8, 0xffffffff, 0x00000000,
495         0x91ec, 0xffffffff, 0x00070000,
496         0x91f0, 0xffffffff, 0x00030002,
497         0x91f4, 0xffffffff, 0x00050004,
498         0x9200, 0xffffffff, 0x00010006,
499         0x9204, 0xffffffff, 0x00090008,
500         0x9208, 0xffffffff, 0x00070000,
501         0x920c, 0xffffffff, 0x00030002,
502         0x9210, 0xffffffff, 0x00050004,
503         0x921c, 0xffffffff, 0x00010006,
504         0x9220, 0xffffffff, 0x00090008,
505         0x9224, 0xffffffff, 0x00070000,
506         0x9228, 0xffffffff, 0x00030002,
507         0x922c, 0xffffffff, 0x00050004,
508         0x9238, 0xffffffff, 0x00010006,
509         0x923c, 0xffffffff, 0x00090008,
510         0x9240, 0xffffffff, 0x00070000,
511         0x9244, 0xffffffff, 0x00030002,
512         0x9248, 0xffffffff, 0x00050004,
513         0x9254, 0xffffffff, 0x00010006,
514         0x9258, 0xffffffff, 0x00090008,
515         0x925c, 0xffffffff, 0x00070000,
516         0x9260, 0xffffffff, 0x00030002,
517         0x9264, 0xffffffff, 0x00050004,
518         0x9270, 0xffffffff, 0x00010006,
519         0x9274, 0xffffffff, 0x00090008,
520         0x9278, 0xffffffff, 0x00070000,
521         0x927c, 0xffffffff, 0x00030002,
522         0x9280, 0xffffffff, 0x00050004,
523         0x928c, 0xffffffff, 0x00010006,
524         0x9290, 0xffffffff, 0x00090008,
525         0x9294, 0xffffffff, 0x00000000,
526         0x929c, 0xffffffff, 0x00000001,
527         0x802c, 0xffffffff, 0xc0000000,
528         0x977c, 0xffffffff, 0x00000100,
529         0x3f80, 0xffffffff, 0x00000100,
530         0xa210, 0xffffffff, 0x00000100,
531         0xa214, 0xffffffff, 0x00000100,
532         0x4d8, 0xffffffff, 0x00000100,
533         0x9784, 0xffffffff, 0x00000100,
534         0x9698, 0xffffffff, 0x00000100,
535         0x4d4, 0xffffffff, 0x00000200,
536         0x30cc, 0xffffffff, 0x00000100,
537         0x802c, 0xffffffff, 0xc0000000
538 };
539
540 static const u32 supersumo_golden_registers[] =
541 {
542         0x5eb4, 0xffffffff, 0x00000002,
543         0x5c4, 0xffffffff, 0x00000001,
544         0x7030, 0xffffffff, 0x00000011,
545         0x7c30, 0xffffffff, 0x00000011,
546         0x6104, 0x01000300, 0x00000000,
547         0x5bc0, 0x00300000, 0x00000000,
548         0x8c04, 0xffffffff, 0x40600060,
549         0x8c08, 0xffffffff, 0x001c001c,
550         0x8c20, 0xffffffff, 0x00800080,
551         0x8c24, 0xffffffff, 0x00800080,
552         0x8c18, 0xffffffff, 0x20202078,
553         0x8c1c, 0xffffffff, 0x00001010,
554         0x918c, 0xffffffff, 0x00010006,
555         0x91a8, 0xffffffff, 0x00010006,
556         0x91c4, 0xffffffff, 0x00010006,
557         0x91e0, 0xffffffff, 0x00010006,
558         0x9200, 0xffffffff, 0x00010006,
559         0x9150, 0xffffffff, 0x6e944040,
560         0x917c, 0xffffffff, 0x00030002,
561         0x9180, 0xffffffff, 0x00050004,
562         0x9198, 0xffffffff, 0x00030002,
563         0x919c, 0xffffffff, 0x00050004,
564         0x91b4, 0xffffffff, 0x00030002,
565         0x91b8, 0xffffffff, 0x00050004,
566         0x91d0, 0xffffffff, 0x00030002,
567         0x91d4, 0xffffffff, 0x00050004,
568         0x91f0, 0xffffffff, 0x00030002,
569         0x91f4, 0xffffffff, 0x00050004,
570         0x915c, 0xffffffff, 0x00010000,
571         0x9160, 0xffffffff, 0x00030002,
572         0x3f90, 0xffff0000, 0xff000000,
573         0x9178, 0xffffffff, 0x00070000,
574         0x9194, 0xffffffff, 0x00070000,
575         0x91b0, 0xffffffff, 0x00070000,
576         0x91cc, 0xffffffff, 0x00070000,
577         0x91ec, 0xffffffff, 0x00070000,
578         0x9148, 0xffff0000, 0xff000000,
579         0x9190, 0xffffffff, 0x00090008,
580         0x91ac, 0xffffffff, 0x00090008,
581         0x91c8, 0xffffffff, 0x00090008,
582         0x91e4, 0xffffffff, 0x00090008,
583         0x9204, 0xffffffff, 0x00090008,
584         0x3f94, 0xffff0000, 0xff000000,
585         0x914c, 0xffff0000, 0xff000000,
586         0x929c, 0xffffffff, 0x00000001,
587         0x8a18, 0xffffffff, 0x00000100,
588         0x8b28, 0xffffffff, 0x00000100,
589         0x9144, 0xffffffff, 0x00000100,
590         0x5644, 0xffffffff, 0x00000100,
591         0x9b7c, 0xffffffff, 0x00000000,
592         0x8030, 0xffffffff, 0x0000100a,
593         0x8a14, 0xffffffff, 0x00000007,
594         0x8b24, 0xffffffff, 0x00ff0fff,
595         0x8b10, 0xffffffff, 0x00000000,
596         0x28a4c, 0x06000000, 0x06000000,
597         0x4d8, 0xffffffff, 0x00000100,
598         0x913c, 0xffff000f, 0x0100000a,
599         0x960c, 0xffffffff, 0x54763210,
600         0x88c4, 0xffffffff, 0x000000c2,
601         0x88d4, 0xffffffff, 0x00000010,
602         0x8974, 0xffffffff, 0x00000000,
603         0xc78, 0x00000080, 0x00000080,
604         0x5e78, 0xffffffff, 0x001000f0,
605         0xd02c, 0xffffffff, 0x08421000,
606         0xa008, 0xffffffff, 0x00010000,
607         0x8d00, 0xffffffff, 0x100e4848,
608         0x8d04, 0xffffffff, 0x00164745,
609         0x8c00, 0xffffffff, 0xe4000003,
610         0x8cf0, 0x1fffffff, 0x08e00620,
611         0x28350, 0xffffffff, 0x00000000,
612         0x9508, 0xffffffff, 0x00000002
613 };
614
615 static const u32 sumo_golden_registers[] =
616 {
617         0x900c, 0x00ffffff, 0x0017071f,
618         0x8c18, 0xffffffff, 0x10101060,
619         0x8c1c, 0xffffffff, 0x00001010,
620         0x8c30, 0x0000000f, 0x00000005,
621         0x9688, 0x0000000f, 0x00000007
622 };
623
624 static const u32 wrestler_golden_registers[] =
625 {
626         0x5eb4, 0xffffffff, 0x00000002,
627         0x5c4, 0xffffffff, 0x00000001,
628         0x7030, 0xffffffff, 0x00000011,
629         0x7c30, 0xffffffff, 0x00000011,
630         0x6104, 0x01000300, 0x00000000,
631         0x5bc0, 0x00300000, 0x00000000,
632         0x918c, 0xffffffff, 0x00010006,
633         0x91a8, 0xffffffff, 0x00010006,
634         0x9150, 0xffffffff, 0x6e944040,
635         0x917c, 0xffffffff, 0x00030002,
636         0x9198, 0xffffffff, 0x00030002,
637         0x915c, 0xffffffff, 0x00010000,
638         0x3f90, 0xffff0000, 0xff000000,
639         0x9178, 0xffffffff, 0x00070000,
640         0x9194, 0xffffffff, 0x00070000,
641         0x9148, 0xffff0000, 0xff000000,
642         0x9190, 0xffffffff, 0x00090008,
643         0x91ac, 0xffffffff, 0x00090008,
644         0x3f94, 0xffff0000, 0xff000000,
645         0x914c, 0xffff0000, 0xff000000,
646         0x929c, 0xffffffff, 0x00000001,
647         0x8a18, 0xffffffff, 0x00000100,
648         0x8b28, 0xffffffff, 0x00000100,
649         0x9144, 0xffffffff, 0x00000100,
650         0x9b7c, 0xffffffff, 0x00000000,
651         0x8030, 0xffffffff, 0x0000100a,
652         0x8a14, 0xffffffff, 0x00000001,
653         0x8b24, 0xffffffff, 0x00ff0fff,
654         0x8b10, 0xffffffff, 0x00000000,
655         0x28a4c, 0x06000000, 0x06000000,
656         0x4d8, 0xffffffff, 0x00000100,
657         0x913c, 0xffff000f, 0x0100000a,
658         0x960c, 0xffffffff, 0x54763210,
659         0x88c4, 0xffffffff, 0x000000c2,
660         0x88d4, 0xffffffff, 0x00000010,
661         0x8974, 0xffffffff, 0x00000000,
662         0xc78, 0x00000080, 0x00000080,
663         0x5e78, 0xffffffff, 0x001000f0,
664         0xd02c, 0xffffffff, 0x08421000,
665         0xa008, 0xffffffff, 0x00010000,
666         0x8d00, 0xffffffff, 0x100e4848,
667         0x8d04, 0xffffffff, 0x00164745,
668         0x8c00, 0xffffffff, 0xe4000003,
669         0x8cf0, 0x1fffffff, 0x08e00410,
670         0x28350, 0xffffffff, 0x00000000,
671         0x9508, 0xffffffff, 0x00000002,
672         0x900c, 0xffffffff, 0x0017071f,
673         0x8c18, 0xffffffff, 0x10101060,
674         0x8c1c, 0xffffffff, 0x00001010
675 };
676
677 static const u32 barts_golden_registers[] =
678 {
679         0x5eb4, 0xffffffff, 0x00000002,
680         0x5e78, 0x8f311ff1, 0x001000f0,
681         0x3f90, 0xffff0000, 0xff000000,
682         0x9148, 0xffff0000, 0xff000000,
683         0x3f94, 0xffff0000, 0xff000000,
684         0x914c, 0xffff0000, 0xff000000,
685         0xc78, 0x00000080, 0x00000080,
686         0xbd4, 0x70073777, 0x00010001,
687         0xd02c, 0xbfffff1f, 0x08421000,
688         0xd0b8, 0x03773777, 0x02011003,
689         0x5bc0, 0x00200000, 0x50100000,
690         0x98f8, 0x33773777, 0x02011003,
691         0x98fc, 0xffffffff, 0x76543210,
692         0x7030, 0x31000311, 0x00000011,
693         0x2f48, 0x00000007, 0x02011003,
694         0x6b28, 0x00000010, 0x00000012,
695         0x7728, 0x00000010, 0x00000012,
696         0x10328, 0x00000010, 0x00000012,
697         0x10f28, 0x00000010, 0x00000012,
698         0x11b28, 0x00000010, 0x00000012,
699         0x12728, 0x00000010, 0x00000012,
700         0x240c, 0x000007ff, 0x00000380,
701         0x8a14, 0xf000001f, 0x00000007,
702         0x8b24, 0x3fff3fff, 0x00ff0fff,
703         0x8b10, 0x0000ff0f, 0x00000000,
704         0x28a4c, 0x07ffffff, 0x06000000,
705         0x10c, 0x00000001, 0x00010003,
706         0xa02c, 0xffffffff, 0x0000009b,
707         0x913c, 0x0000000f, 0x0100000a,
708         0x8d00, 0xffff7f7f, 0x100e4848,
709         0x8d04, 0x00ffffff, 0x00164745,
710         0x8c00, 0xfffc0003, 0xe4000003,
711         0x8c04, 0xf8ff00ff, 0x40600060,
712         0x8c08, 0x00ff00ff, 0x001c001c,
713         0x8cf0, 0x1fff1fff, 0x08e00620,
714         0x8c20, 0x0fff0fff, 0x00800080,
715         0x8c24, 0x0fff0fff, 0x00800080,
716         0x8c18, 0xffffffff, 0x20202078,
717         0x8c1c, 0x0000ffff, 0x00001010,
718         0x28350, 0x00000f01, 0x00000000,
719         0x9508, 0x3700001f, 0x00000002,
720         0x960c, 0xffffffff, 0x54763210,
721         0x88c4, 0x001f3ae3, 0x000000c2,
722         0x88d4, 0x0000001f, 0x00000010,
723         0x8974, 0xffffffff, 0x00000000
724 };
725
726 static const u32 turks_golden_registers[] =
727 {
728         0x5eb4, 0xffffffff, 0x00000002,
729         0x5e78, 0x8f311ff1, 0x001000f0,
730         0x8c8, 0x00003000, 0x00001070,
731         0x8cc, 0x000fffff, 0x00040035,
732         0x3f90, 0xffff0000, 0xfff00000,
733         0x9148, 0xffff0000, 0xfff00000,
734         0x3f94, 0xffff0000, 0xfff00000,
735         0x914c, 0xffff0000, 0xfff00000,
736         0xc78, 0x00000080, 0x00000080,
737         0xbd4, 0x00073007, 0x00010002,
738         0xd02c, 0xbfffff1f, 0x08421000,
739         0xd0b8, 0x03773777, 0x02010002,
740         0x5bc0, 0x00200000, 0x50100000,
741         0x98f8, 0x33773777, 0x00010002,
742         0x98fc, 0xffffffff, 0x33221100,
743         0x7030, 0x31000311, 0x00000011,
744         0x2f48, 0x33773777, 0x00010002,
745         0x6b28, 0x00000010, 0x00000012,
746         0x7728, 0x00000010, 0x00000012,
747         0x10328, 0x00000010, 0x00000012,
748         0x10f28, 0x00000010, 0x00000012,
749         0x11b28, 0x00000010, 0x00000012,
750         0x12728, 0x00000010, 0x00000012,
751         0x240c, 0x000007ff, 0x00000380,
752         0x8a14, 0xf000001f, 0x00000007,
753         0x8b24, 0x3fff3fff, 0x00ff0fff,
754         0x8b10, 0x0000ff0f, 0x00000000,
755         0x28a4c, 0x07ffffff, 0x06000000,
756         0x10c, 0x00000001, 0x00010003,
757         0xa02c, 0xffffffff, 0x0000009b,
758         0x913c, 0x0000000f, 0x0100000a,
759         0x8d00, 0xffff7f7f, 0x100e4848,
760         0x8d04, 0x00ffffff, 0x00164745,
761         0x8c00, 0xfffc0003, 0xe4000003,
762         0x8c04, 0xf8ff00ff, 0x40600060,
763         0x8c08, 0x00ff00ff, 0x001c001c,
764         0x8cf0, 0x1fff1fff, 0x08e00410,
765         0x8c20, 0x0fff0fff, 0x00800080,
766         0x8c24, 0x0fff0fff, 0x00800080,
767         0x8c18, 0xffffffff, 0x20202078,
768         0x8c1c, 0x0000ffff, 0x00001010,
769         0x28350, 0x00000f01, 0x00000000,
770         0x9508, 0x3700001f, 0x00000002,
771         0x960c, 0xffffffff, 0x54763210,
772         0x88c4, 0x001f3ae3, 0x000000c2,
773         0x88d4, 0x0000001f, 0x00000010,
774         0x8974, 0xffffffff, 0x00000000
775 };
776
777 static const u32 caicos_golden_registers[] =
778 {
779         0x5eb4, 0xffffffff, 0x00000002,
780         0x5e78, 0x8f311ff1, 0x001000f0,
781         0x8c8, 0x00003420, 0x00001450,
782         0x8cc, 0x000fffff, 0x00040035,
783         0x3f90, 0xffff0000, 0xfffc0000,
784         0x9148, 0xffff0000, 0xfffc0000,
785         0x3f94, 0xffff0000, 0xfffc0000,
786         0x914c, 0xffff0000, 0xfffc0000,
787         0xc78, 0x00000080, 0x00000080,
788         0xbd4, 0x00073007, 0x00010001,
789         0xd02c, 0xbfffff1f, 0x08421000,
790         0xd0b8, 0x03773777, 0x02010001,
791         0x5bc0, 0x00200000, 0x50100000,
792         0x98f8, 0x33773777, 0x02010001,
793         0x98fc, 0xffffffff, 0x33221100,
794         0x7030, 0x31000311, 0x00000011,
795         0x2f48, 0x33773777, 0x02010001,
796         0x6b28, 0x00000010, 0x00000012,
797         0x7728, 0x00000010, 0x00000012,
798         0x10328, 0x00000010, 0x00000012,
799         0x10f28, 0x00000010, 0x00000012,
800         0x11b28, 0x00000010, 0x00000012,
801         0x12728, 0x00000010, 0x00000012,
802         0x240c, 0x000007ff, 0x00000380,
803         0x8a14, 0xf000001f, 0x00000001,
804         0x8b24, 0x3fff3fff, 0x00ff0fff,
805         0x8b10, 0x0000ff0f, 0x00000000,
806         0x28a4c, 0x07ffffff, 0x06000000,
807         0x10c, 0x00000001, 0x00010003,
808         0xa02c, 0xffffffff, 0x0000009b,
809         0x913c, 0x0000000f, 0x0100000a,
810         0x8d00, 0xffff7f7f, 0x100e4848,
811         0x8d04, 0x00ffffff, 0x00164745,
812         0x8c00, 0xfffc0003, 0xe4000003,
813         0x8c04, 0xf8ff00ff, 0x40600060,
814         0x8c08, 0x00ff00ff, 0x001c001c,
815         0x8cf0, 0x1fff1fff, 0x08e00410,
816         0x8c20, 0x0fff0fff, 0x00800080,
817         0x8c24, 0x0fff0fff, 0x00800080,
818         0x8c18, 0xffffffff, 0x20202078,
819         0x8c1c, 0x0000ffff, 0x00001010,
820         0x28350, 0x00000f01, 0x00000000,
821         0x9508, 0x3700001f, 0x00000002,
822         0x960c, 0xffffffff, 0x54763210,
823         0x88c4, 0x001f3ae3, 0x000000c2,
824         0x88d4, 0x0000001f, 0x00000010,
825         0x8974, 0xffffffff, 0x00000000
826 };
827
828 static void evergreen_init_golden_registers(struct radeon_device *rdev)
829 {
830         switch (rdev->family) {
831         case CHIP_CYPRESS:
832         case CHIP_HEMLOCK:
833                 radeon_program_register_sequence(rdev,
834                                                  evergreen_golden_registers,
835                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
836                 radeon_program_register_sequence(rdev,
837                                                  evergreen_golden_registers2,
838                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
839                 radeon_program_register_sequence(rdev,
840                                                  cypress_mgcg_init,
841                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
842                 break;
843         case CHIP_JUNIPER:
844                 radeon_program_register_sequence(rdev,
845                                                  evergreen_golden_registers,
846                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
847                 radeon_program_register_sequence(rdev,
848                                                  evergreen_golden_registers2,
849                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
850                 radeon_program_register_sequence(rdev,
851                                                  juniper_mgcg_init,
852                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
853                 break;
854         case CHIP_REDWOOD:
855                 radeon_program_register_sequence(rdev,
856                                                  evergreen_golden_registers,
857                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
858                 radeon_program_register_sequence(rdev,
859                                                  evergreen_golden_registers2,
860                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
861                 radeon_program_register_sequence(rdev,
862                                                  redwood_mgcg_init,
863                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
864                 break;
865         case CHIP_CEDAR:
866                 radeon_program_register_sequence(rdev,
867                                                  cedar_golden_registers,
868                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
869                 radeon_program_register_sequence(rdev,
870                                                  evergreen_golden_registers2,
871                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
872                 radeon_program_register_sequence(rdev,
873                                                  cedar_mgcg_init,
874                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
875                 break;
876         case CHIP_PALM:
877                 radeon_program_register_sequence(rdev,
878                                                  wrestler_golden_registers,
879                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
880                 break;
881         case CHIP_SUMO:
882                 radeon_program_register_sequence(rdev,
883                                                  supersumo_golden_registers,
884                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
885                 break;
886         case CHIP_SUMO2:
887                 radeon_program_register_sequence(rdev,
888                                                  supersumo_golden_registers,
889                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
890                 radeon_program_register_sequence(rdev,
891                                                  sumo_golden_registers,
892                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
893                 break;
894         case CHIP_BARTS:
895                 radeon_program_register_sequence(rdev,
896                                                  barts_golden_registers,
897                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
898                 break;
899         case CHIP_TURKS:
900                 radeon_program_register_sequence(rdev,
901                                                  turks_golden_registers,
902                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
903                 break;
904         case CHIP_CAICOS:
905                 radeon_program_register_sequence(rdev,
906                                                  caicos_golden_registers,
907                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
908                 break;
909         default:
910                 break;
911         }
912 }
913
914 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
915                              unsigned *bankh, unsigned *mtaspect,
916                              unsigned *tile_split)
917 {
918         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
919         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
920         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
921         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
922         switch (*bankw) {
923         default:
924         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
925         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
926         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
927         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
928         }
929         switch (*bankh) {
930         default:
931         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
932         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
933         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
934         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
935         }
936         switch (*mtaspect) {
937         default:
938         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
939         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
940         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
941         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
942         }
943 }
944
945 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
946                               u32 cntl_reg, u32 status_reg)
947 {
948         int r, i;
949         struct atom_clock_dividers dividers;
950
951         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
952                                            clock, false, &dividers);
953         if (r)
954                 return r;
955
956         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
957
958         for (i = 0; i < 100; i++) {
959                 if (RREG32(status_reg) & DCLK_STATUS)
960                         break;
961                 mdelay(10);
962         }
963         if (i == 100)
964                 return -ETIMEDOUT;
965
966         return 0;
967 }
968
969 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
970 {
971         int r = 0;
972         u32 cg_scratch = RREG32(CG_SCRATCH1);
973
974         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
975         if (r)
976                 goto done;
977         cg_scratch &= 0xffff0000;
978         cg_scratch |= vclk / 100; /* Mhz */
979
980         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
981         if (r)
982                 goto done;
983         cg_scratch &= 0x0000ffff;
984         cg_scratch |= (dclk / 100) << 16; /* Mhz */
985
986 done:
987         WREG32(CG_SCRATCH1, cg_scratch);
988
989         return r;
990 }
991
992 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
993 {
994         /* start off with something large */
995         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
996         int r;
997
998         /* bypass vclk and dclk with bclk */
999         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1000                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1001                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1002
1003         /* put PLL in bypass mode */
1004         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1005
1006         if (!vclk || !dclk) {
1007                 /* keep the Bypass mode, put PLL to sleep */
1008                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1009                 return 0;
1010         }
1011
1012         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1013                                           16384, 0x03FFFFFF, 0, 128, 5,
1014                                           &fb_div, &vclk_div, &dclk_div);
1015         if (r)
1016                 return r;
1017
1018         /* set VCO_MODE to 1 */
1019         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1020
1021         /* toggle UPLL_SLEEP to 1 then back to 0 */
1022         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1023         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1024
1025         /* deassert UPLL_RESET */
1026         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1027
1028         mdelay(1);
1029
1030         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1031         if (r)
1032                 return r;
1033
1034         /* assert UPLL_RESET again */
1035         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1036
1037         /* disable spread spectrum. */
1038         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1039
1040         /* set feedback divider */
1041         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1042
1043         /* set ref divider to 0 */
1044         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1045
1046         if (fb_div < 307200)
1047                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1048         else
1049                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1050
1051         /* set PDIV_A and PDIV_B */
1052         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1053                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1054                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1055
1056         /* give the PLL some time to settle */
1057         mdelay(15);
1058
1059         /* deassert PLL_RESET */
1060         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1061
1062         mdelay(15);
1063
1064         /* switch from bypass mode to normal mode */
1065         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1066
1067         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1068         if (r)
1069                 return r;
1070
1071         /* switch VCLK and DCLK selection */
1072         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1073                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1074                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1075
1076         mdelay(100);
1077
1078         return 0;
1079 }
1080
1081 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1082 {
1083         u16 ctl, v;
1084         int err;
1085
1086         err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
1087         if (err)
1088                 return;
1089
1090         v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1091
1092         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1093          * to avoid hangs or perfomance issues
1094          */
1095         if ((v == 0) || (v == 6) || (v == 7)) {
1096                 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1097                 ctl |= (2 << 12);
1098                 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
1099         }
1100 }
1101
1102 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1103 {
1104         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1105                 return true;
1106         else
1107                 return false;
1108 }
1109
1110 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1111 {
1112         u32 pos1, pos2;
1113
1114         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1115         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1116
1117         if (pos1 != pos2)
1118                 return true;
1119         else
1120                 return false;
1121 }
1122
1123 /**
1124  * dce4_wait_for_vblank - vblank wait asic callback.
1125  *
1126  * @rdev: radeon_device pointer
1127  * @crtc: crtc to wait for vblank on
1128  *
1129  * Wait for vblank on the requested crtc (evergreen+).
1130  */
1131 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1132 {
1133         unsigned i = 0;
1134
1135         if (crtc >= rdev->num_crtc)
1136                 return;
1137
1138         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1139                 return;
1140
1141         /* depending on when we hit vblank, we may be close to active; if so,
1142          * wait for another frame.
1143          */
1144         while (dce4_is_in_vblank(rdev, crtc)) {
1145                 if (i++ % 100 == 0) {
1146                         if (!dce4_is_counter_moving(rdev, crtc))
1147                                 break;
1148                 }
1149         }
1150
1151         while (!dce4_is_in_vblank(rdev, crtc)) {
1152                 if (i++ % 100 == 0) {
1153                         if (!dce4_is_counter_moving(rdev, crtc))
1154                                 break;
1155                 }
1156         }
1157 }
1158
1159 /**
1160  * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1161  *
1162  * @rdev: radeon_device pointer
1163  * @crtc: crtc to prepare for pageflip on
1164  *
1165  * Pre-pageflip callback (evergreen+).
1166  * Enables the pageflip irq (vblank irq).
1167  */
1168 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1169 {
1170         /* enable the pflip int */
1171         radeon_irq_kms_pflip_irq_get(rdev, crtc);
1172 }
1173
1174 /**
1175  * evergreen_post_page_flip - pos-pageflip callback.
1176  *
1177  * @rdev: radeon_device pointer
1178  * @crtc: crtc to cleanup pageflip on
1179  *
1180  * Post-pageflip callback (evergreen+).
1181  * Disables the pageflip irq (vblank irq).
1182  */
1183 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1184 {
1185         /* disable the pflip int */
1186         radeon_irq_kms_pflip_irq_put(rdev, crtc);
1187 }
1188
1189 /**
1190  * evergreen_page_flip - pageflip callback.
1191  *
1192  * @rdev: radeon_device pointer
1193  * @crtc_id: crtc to cleanup pageflip on
1194  * @crtc_base: new address of the crtc (GPU MC address)
1195  *
1196  * Does the actual pageflip (evergreen+).
1197  * During vblank we take the crtc lock and wait for the update_pending
1198  * bit to go high, when it does, we release the lock, and allow the
1199  * double buffered update to take place.
1200  * Returns the current update pending status.
1201  */
1202 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1203 {
1204         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1205         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1206         int i;
1207
1208         /* Lock the graphics update lock */
1209         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1210         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1211
1212         /* update the scanout addresses */
1213         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1214                upper_32_bits(crtc_base));
1215         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1216                (u32)crtc_base);
1217
1218         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1219                upper_32_bits(crtc_base));
1220         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1221                (u32)crtc_base);
1222
1223         /* Wait for update_pending to go high. */
1224         for (i = 0; i < rdev->usec_timeout; i++) {
1225                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1226                         break;
1227                 udelay(1);
1228         }
1229         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1230
1231         /* Unlock the lock, so double-buffering can take place inside vblank */
1232         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1233         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1234
1235         /* Return current update_pending status: */
1236         return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1237 }
1238
1239 /* get temperature in millidegrees */
1240 int evergreen_get_temp(struct radeon_device *rdev)
1241 {
1242         u32 temp, toffset;
1243         int actual_temp = 0;
1244
1245         if (rdev->family == CHIP_JUNIPER) {
1246                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1247                         TOFFSET_SHIFT;
1248                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1249                         TS0_ADC_DOUT_SHIFT;
1250
1251                 if (toffset & 0x100)
1252                         actual_temp = temp / 2 - (0x200 - toffset);
1253                 else
1254                         actual_temp = temp / 2 + toffset;
1255
1256                 actual_temp = actual_temp * 1000;
1257
1258         } else {
1259                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1260                         ASIC_T_SHIFT;
1261
1262                 if (temp & 0x400)
1263                         actual_temp = -256;
1264                 else if (temp & 0x200)
1265                         actual_temp = 255;
1266                 else if (temp & 0x100) {
1267                         actual_temp = temp & 0x1ff;
1268                         actual_temp |= ~0x1ff;
1269                 } else
1270                         actual_temp = temp & 0xff;
1271
1272                 actual_temp = (actual_temp * 1000) / 2;
1273         }
1274
1275         return actual_temp;
1276 }
1277
1278 int sumo_get_temp(struct radeon_device *rdev)
1279 {
1280         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1281         int actual_temp = temp - 49;
1282
1283         return actual_temp * 1000;
1284 }
1285
1286 /**
1287  * sumo_pm_init_profile - Initialize power profiles callback.
1288  *
1289  * @rdev: radeon_device pointer
1290  *
1291  * Initialize the power states used in profile mode
1292  * (sumo, trinity, SI).
1293  * Used for profile mode only.
1294  */
1295 void sumo_pm_init_profile(struct radeon_device *rdev)
1296 {
1297         int idx;
1298
1299         /* default */
1300         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1301         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1302         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1303         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1304
1305         /* low,mid sh/mh */
1306         if (rdev->flags & RADEON_IS_MOBILITY)
1307                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1308         else
1309                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1310
1311         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1312         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1313         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1314         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1315
1316         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1317         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1318         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1319         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1320
1321         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1322         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1323         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1324         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1325
1326         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1327         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1328         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1329         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1330
1331         /* high sh/mh */
1332         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1333         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1334         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1335         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1336         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1337                 rdev->pm.power_state[idx].num_clock_modes - 1;
1338
1339         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1340         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1341         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1342         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1343                 rdev->pm.power_state[idx].num_clock_modes - 1;
1344 }
1345
1346 /**
1347  * btc_pm_init_profile - Initialize power profiles callback.
1348  *
1349  * @rdev: radeon_device pointer
1350  *
1351  * Initialize the power states used in profile mode
1352  * (BTC, cayman).
1353  * Used for profile mode only.
1354  */
1355 void btc_pm_init_profile(struct radeon_device *rdev)
1356 {
1357         int idx;
1358
1359         /* default */
1360         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1361         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1362         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1363         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1364         /* starting with BTC, there is one state that is used for both
1365          * MH and SH.  Difference is that we always use the high clock index for
1366          * mclk.
1367          */
1368         if (rdev->flags & RADEON_IS_MOBILITY)
1369                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1370         else
1371                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1372         /* low sh */
1373         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1374         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1375         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1376         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1377         /* mid sh */
1378         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1379         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1380         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1381         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1382         /* high sh */
1383         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1384         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1385         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1386         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1387         /* low mh */
1388         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1389         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1390         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1391         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1392         /* mid mh */
1393         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1394         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1395         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1396         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1397         /* high mh */
1398         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1399         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1400         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1401         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1402 }
1403
1404 /**
1405  * evergreen_pm_misc - set additional pm hw parameters callback.
1406  *
1407  * @rdev: radeon_device pointer
1408  *
1409  * Set non-clock parameters associated with a power state
1410  * (voltage, etc.) (evergreen+).
1411  */
1412 void evergreen_pm_misc(struct radeon_device *rdev)
1413 {
1414         int req_ps_idx = rdev->pm.requested_power_state_index;
1415         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1416         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1417         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1418
1419         if (voltage->type == VOLTAGE_SW) {
1420                 /* 0xff01 is a flag rather then an actual voltage */
1421                 if (voltage->voltage == 0xff01)
1422                         return;
1423                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1424                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1425                         rdev->pm.current_vddc = voltage->voltage;
1426                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1427                 }
1428
1429                 /* starting with BTC, there is one state that is used for both
1430                  * MH and SH.  Difference is that we always use the high clock index for
1431                  * mclk and vddci.
1432                  */
1433                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1434                     (rdev->family >= CHIP_BARTS) &&
1435                     rdev->pm.active_crtc_count &&
1436                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1437                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1438                         voltage = &rdev->pm.power_state[req_ps_idx].
1439                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1440
1441                 /* 0xff01 is a flag rather then an actual voltage */
1442                 if (voltage->vddci == 0xff01)
1443                         return;
1444                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1445                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1446                         rdev->pm.current_vddci = voltage->vddci;
1447                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1448                 }
1449         }
1450 }
1451
1452 /**
1453  * evergreen_pm_prepare - pre-power state change callback.
1454  *
1455  * @rdev: radeon_device pointer
1456  *
1457  * Prepare for a power state change (evergreen+).
1458  */
1459 void evergreen_pm_prepare(struct radeon_device *rdev)
1460 {
1461         struct drm_device *ddev = rdev->ddev;
1462         struct drm_crtc *crtc;
1463         struct radeon_crtc *radeon_crtc;
1464         u32 tmp;
1465
1466         /* disable any active CRTCs */
1467         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1468                 radeon_crtc = to_radeon_crtc(crtc);
1469                 if (radeon_crtc->enabled) {
1470                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1471                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1472                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1473                 }
1474         }
1475 }
1476
1477 /**
1478  * evergreen_pm_finish - post-power state change callback.
1479  *
1480  * @rdev: radeon_device pointer
1481  *
1482  * Clean up after a power state change (evergreen+).
1483  */
1484 void evergreen_pm_finish(struct radeon_device *rdev)
1485 {
1486         struct drm_device *ddev = rdev->ddev;
1487         struct drm_crtc *crtc;
1488         struct radeon_crtc *radeon_crtc;
1489         u32 tmp;
1490
1491         /* enable any active CRTCs */
1492         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1493                 radeon_crtc = to_radeon_crtc(crtc);
1494                 if (radeon_crtc->enabled) {
1495                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1496                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1497                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1498                 }
1499         }
1500 }
1501
1502 /**
1503  * evergreen_hpd_sense - hpd sense callback.
1504  *
1505  * @rdev: radeon_device pointer
1506  * @hpd: hpd (hotplug detect) pin
1507  *
1508  * Checks if a digital monitor is connected (evergreen+).
1509  * Returns true if connected, false if not connected.
1510  */
1511 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1512 {
1513         bool connected = false;
1514
1515         switch (hpd) {
1516         case RADEON_HPD_1:
1517                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1518                         connected = true;
1519                 break;
1520         case RADEON_HPD_2:
1521                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1522                         connected = true;
1523                 break;
1524         case RADEON_HPD_3:
1525                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1526                         connected = true;
1527                 break;
1528         case RADEON_HPD_4:
1529                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1530                         connected = true;
1531                 break;
1532         case RADEON_HPD_5:
1533                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1534                         connected = true;
1535                 break;
1536         case RADEON_HPD_6:
1537                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1538                         connected = true;
1539                         break;
1540         default:
1541                 break;
1542         }
1543
1544         return connected;
1545 }
1546
1547 /**
1548  * evergreen_hpd_set_polarity - hpd set polarity callback.
1549  *
1550  * @rdev: radeon_device pointer
1551  * @hpd: hpd (hotplug detect) pin
1552  *
1553  * Set the polarity of the hpd pin (evergreen+).
1554  */
1555 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1556                                 enum radeon_hpd_id hpd)
1557 {
1558         u32 tmp;
1559         bool connected = evergreen_hpd_sense(rdev, hpd);
1560
1561         switch (hpd) {
1562         case RADEON_HPD_1:
1563                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1564                 if (connected)
1565                         tmp &= ~DC_HPDx_INT_POLARITY;
1566                 else
1567                         tmp |= DC_HPDx_INT_POLARITY;
1568                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1569                 break;
1570         case RADEON_HPD_2:
1571                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1572                 if (connected)
1573                         tmp &= ~DC_HPDx_INT_POLARITY;
1574                 else
1575                         tmp |= DC_HPDx_INT_POLARITY;
1576                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1577                 break;
1578         case RADEON_HPD_3:
1579                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1580                 if (connected)
1581                         tmp &= ~DC_HPDx_INT_POLARITY;
1582                 else
1583                         tmp |= DC_HPDx_INT_POLARITY;
1584                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1585                 break;
1586         case RADEON_HPD_4:
1587                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1588                 if (connected)
1589                         tmp &= ~DC_HPDx_INT_POLARITY;
1590                 else
1591                         tmp |= DC_HPDx_INT_POLARITY;
1592                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1593                 break;
1594         case RADEON_HPD_5:
1595                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1596                 if (connected)
1597                         tmp &= ~DC_HPDx_INT_POLARITY;
1598                 else
1599                         tmp |= DC_HPDx_INT_POLARITY;
1600                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1601                         break;
1602         case RADEON_HPD_6:
1603                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1604                 if (connected)
1605                         tmp &= ~DC_HPDx_INT_POLARITY;
1606                 else
1607                         tmp |= DC_HPDx_INT_POLARITY;
1608                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1609                 break;
1610         default:
1611                 break;
1612         }
1613 }
1614
1615 /**
1616  * evergreen_hpd_init - hpd setup callback.
1617  *
1618  * @rdev: radeon_device pointer
1619  *
1620  * Setup the hpd pins used by the card (evergreen+).
1621  * Enable the pin, set the polarity, and enable the hpd interrupts.
1622  */
1623 void evergreen_hpd_init(struct radeon_device *rdev)
1624 {
1625         struct drm_device *dev = rdev->ddev;
1626         struct drm_connector *connector;
1627         unsigned enabled = 0;
1628         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1629                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1630
1631         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1632                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1633
1634                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1635                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1636                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1637                          * aux dp channel on imac and help (but not completely fix)
1638                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1639                          * also avoid interrupt storms during dpms.
1640                          */
1641                         continue;
1642                 }
1643                 switch (radeon_connector->hpd.hpd) {
1644                 case RADEON_HPD_1:
1645                         WREG32(DC_HPD1_CONTROL, tmp);
1646                         break;
1647                 case RADEON_HPD_2:
1648                         WREG32(DC_HPD2_CONTROL, tmp);
1649                         break;
1650                 case RADEON_HPD_3:
1651                         WREG32(DC_HPD3_CONTROL, tmp);
1652                         break;
1653                 case RADEON_HPD_4:
1654                         WREG32(DC_HPD4_CONTROL, tmp);
1655                         break;
1656                 case RADEON_HPD_5:
1657                         WREG32(DC_HPD5_CONTROL, tmp);
1658                         break;
1659                 case RADEON_HPD_6:
1660                         WREG32(DC_HPD6_CONTROL, tmp);
1661                         break;
1662                 default:
1663                         break;
1664                 }
1665                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1666                 enabled |= 1 << radeon_connector->hpd.hpd;
1667         }
1668         radeon_irq_kms_enable_hpd(rdev, enabled);
1669 }
1670
1671 /**
1672  * evergreen_hpd_fini - hpd tear down callback.
1673  *
1674  * @rdev: radeon_device pointer
1675  *
1676  * Tear down the hpd pins used by the card (evergreen+).
1677  * Disable the hpd interrupts.
1678  */
1679 void evergreen_hpd_fini(struct radeon_device *rdev)
1680 {
1681         struct drm_device *dev = rdev->ddev;
1682         struct drm_connector *connector;
1683         unsigned disabled = 0;
1684
1685         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1686                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1687                 switch (radeon_connector->hpd.hpd) {
1688                 case RADEON_HPD_1:
1689                         WREG32(DC_HPD1_CONTROL, 0);
1690                         break;
1691                 case RADEON_HPD_2:
1692                         WREG32(DC_HPD2_CONTROL, 0);
1693                         break;
1694                 case RADEON_HPD_3:
1695                         WREG32(DC_HPD3_CONTROL, 0);
1696                         break;
1697                 case RADEON_HPD_4:
1698                         WREG32(DC_HPD4_CONTROL, 0);
1699                         break;
1700                 case RADEON_HPD_5:
1701                         WREG32(DC_HPD5_CONTROL, 0);
1702                         break;
1703                 case RADEON_HPD_6:
1704                         WREG32(DC_HPD6_CONTROL, 0);
1705                         break;
1706                 default:
1707                         break;
1708                 }
1709                 disabled |= 1 << radeon_connector->hpd.hpd;
1710         }
1711         radeon_irq_kms_disable_hpd(rdev, disabled);
1712 }
1713
1714 /* watermark setup */
1715
1716 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1717                                         struct radeon_crtc *radeon_crtc,
1718                                         struct drm_display_mode *mode,
1719                                         struct drm_display_mode *other_mode)
1720 {
1721         u32 tmp, buffer_alloc, i;
1722         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1723         /*
1724          * Line Buffer Setup
1725          * There are 3 line buffers, each one shared by 2 display controllers.
1726          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1727          * the display controllers.  The paritioning is done via one of four
1728          * preset allocations specified in bits 2:0:
1729          * first display controller
1730          *  0 - first half of lb (3840 * 2)
1731          *  1 - first 3/4 of lb (5760 * 2)
1732          *  2 - whole lb (7680 * 2), other crtc must be disabled
1733          *  3 - first 1/4 of lb (1920 * 2)
1734          * second display controller
1735          *  4 - second half of lb (3840 * 2)
1736          *  5 - second 3/4 of lb (5760 * 2)
1737          *  6 - whole lb (7680 * 2), other crtc must be disabled
1738          *  7 - last 1/4 of lb (1920 * 2)
1739          */
1740         /* this can get tricky if we have two large displays on a paired group
1741          * of crtcs.  Ideally for multiple large displays we'd assign them to
1742          * non-linked crtcs for maximum line buffer allocation.
1743          */
1744         if (radeon_crtc->base.enabled && mode) {
1745                 if (other_mode) {
1746                         tmp = 0; /* 1/2 */
1747                         buffer_alloc = 1;
1748                 } else {
1749                         tmp = 2; /* whole */
1750                         buffer_alloc = 2;
1751                 }
1752         } else {
1753                 tmp = 0;
1754                 buffer_alloc = 0;
1755         }
1756
1757         /* second controller of the pair uses second half of the lb */
1758         if (radeon_crtc->crtc_id % 2)
1759                 tmp += 4;
1760         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1761
1762         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1763                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1764                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1765                 for (i = 0; i < rdev->usec_timeout; i++) {
1766                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1767                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1768                                 break;
1769                         udelay(1);
1770                 }
1771         }
1772
1773         if (radeon_crtc->base.enabled && mode) {
1774                 switch (tmp) {
1775                 case 0:
1776                 case 4:
1777                 default:
1778                         if (ASIC_IS_DCE5(rdev))
1779                                 return 4096 * 2;
1780                         else
1781                                 return 3840 * 2;
1782                 case 1:
1783                 case 5:
1784                         if (ASIC_IS_DCE5(rdev))
1785                                 return 6144 * 2;
1786                         else
1787                                 return 5760 * 2;
1788                 case 2:
1789                 case 6:
1790                         if (ASIC_IS_DCE5(rdev))
1791                                 return 8192 * 2;
1792                         else
1793                                 return 7680 * 2;
1794                 case 3:
1795                 case 7:
1796                         if (ASIC_IS_DCE5(rdev))
1797                                 return 2048 * 2;
1798                         else
1799                                 return 1920 * 2;
1800                 }
1801         }
1802
1803         /* controller not enabled, so no lb used */
1804         return 0;
1805 }
1806
1807 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1808 {
1809         u32 tmp = RREG32(MC_SHARED_CHMAP);
1810
1811         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1812         case 0:
1813         default:
1814                 return 1;
1815         case 1:
1816                 return 2;
1817         case 2:
1818                 return 4;
1819         case 3:
1820                 return 8;
1821         }
1822 }
1823
1824 struct evergreen_wm_params {
1825         u32 dram_channels; /* number of dram channels */
1826         u32 yclk;          /* bandwidth per dram data pin in kHz */
1827         u32 sclk;          /* engine clock in kHz */
1828         u32 disp_clk;      /* display clock in kHz */
1829         u32 src_width;     /* viewport width */
1830         u32 active_time;   /* active display time in ns */
1831         u32 blank_time;    /* blank time in ns */
1832         bool interlaced;    /* mode is interlaced */
1833         fixed20_12 vsc;    /* vertical scale ratio */
1834         u32 num_heads;     /* number of active crtcs */
1835         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1836         u32 lb_size;       /* line buffer allocated to pipe */
1837         u32 vtaps;         /* vertical scaler taps */
1838 };
1839
1840 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1841 {
1842         /* Calculate DRAM Bandwidth and the part allocated to display. */
1843         fixed20_12 dram_efficiency; /* 0.7 */
1844         fixed20_12 yclk, dram_channels, bandwidth;
1845         fixed20_12 a;
1846
1847         a.full = dfixed_const(1000);
1848         yclk.full = dfixed_const(wm->yclk);
1849         yclk.full = dfixed_div(yclk, a);
1850         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1851         a.full = dfixed_const(10);
1852         dram_efficiency.full = dfixed_const(7);
1853         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1854         bandwidth.full = dfixed_mul(dram_channels, yclk);
1855         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1856
1857         return dfixed_trunc(bandwidth);
1858 }
1859
1860 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1861 {
1862         /* Calculate DRAM Bandwidth and the part allocated to display. */
1863         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1864         fixed20_12 yclk, dram_channels, bandwidth;
1865         fixed20_12 a;
1866
1867         a.full = dfixed_const(1000);
1868         yclk.full = dfixed_const(wm->yclk);
1869         yclk.full = dfixed_div(yclk, a);
1870         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1871         a.full = dfixed_const(10);
1872         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1873         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1874         bandwidth.full = dfixed_mul(dram_channels, yclk);
1875         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1876
1877         return dfixed_trunc(bandwidth);
1878 }
1879
1880 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1881 {
1882         /* Calculate the display Data return Bandwidth */
1883         fixed20_12 return_efficiency; /* 0.8 */
1884         fixed20_12 sclk, bandwidth;
1885         fixed20_12 a;
1886
1887         a.full = dfixed_const(1000);
1888         sclk.full = dfixed_const(wm->sclk);
1889         sclk.full = dfixed_div(sclk, a);
1890         a.full = dfixed_const(10);
1891         return_efficiency.full = dfixed_const(8);
1892         return_efficiency.full = dfixed_div(return_efficiency, a);
1893         a.full = dfixed_const(32);
1894         bandwidth.full = dfixed_mul(a, sclk);
1895         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1896
1897         return dfixed_trunc(bandwidth);
1898 }
1899
1900 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1901 {
1902         /* Calculate the DMIF Request Bandwidth */
1903         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1904         fixed20_12 disp_clk, bandwidth;
1905         fixed20_12 a;
1906
1907         a.full = dfixed_const(1000);
1908         disp_clk.full = dfixed_const(wm->disp_clk);
1909         disp_clk.full = dfixed_div(disp_clk, a);
1910         a.full = dfixed_const(10);
1911         disp_clk_request_efficiency.full = dfixed_const(8);
1912         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1913         a.full = dfixed_const(32);
1914         bandwidth.full = dfixed_mul(a, disp_clk);
1915         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1916
1917         return dfixed_trunc(bandwidth);
1918 }
1919
1920 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1921 {
1922         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1923         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1924         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
1925         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
1926
1927         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1928 }
1929
1930 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
1931 {
1932         /* Calculate the display mode Average Bandwidth
1933          * DisplayMode should contain the source and destination dimensions,
1934          * timing, etc.
1935          */
1936         fixed20_12 bpp;
1937         fixed20_12 line_time;
1938         fixed20_12 src_width;
1939         fixed20_12 bandwidth;
1940         fixed20_12 a;
1941
1942         a.full = dfixed_const(1000);
1943         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1944         line_time.full = dfixed_div(line_time, a);
1945         bpp.full = dfixed_const(wm->bytes_per_pixel);
1946         src_width.full = dfixed_const(wm->src_width);
1947         bandwidth.full = dfixed_mul(src_width, bpp);
1948         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1949         bandwidth.full = dfixed_div(bandwidth, line_time);
1950
1951         return dfixed_trunc(bandwidth);
1952 }
1953
1954 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
1955 {
1956         /* First calcualte the latency in ns */
1957         u32 mc_latency = 2000; /* 2000 ns. */
1958         u32 available_bandwidth = evergreen_available_bandwidth(wm);
1959         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1960         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1961         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1962         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1963                 (wm->num_heads * cursor_line_pair_return_time);
1964         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1965         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1966         fixed20_12 a, b, c;
1967
1968         if (wm->num_heads == 0)
1969                 return 0;
1970
1971         a.full = dfixed_const(2);
1972         b.full = dfixed_const(1);
1973         if ((wm->vsc.full > a.full) ||
1974             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1975             (wm->vtaps >= 5) ||
1976             ((wm->vsc.full >= a.full) && wm->interlaced))
1977                 max_src_lines_per_dst_line = 4;
1978         else
1979                 max_src_lines_per_dst_line = 2;
1980
1981         a.full = dfixed_const(available_bandwidth);
1982         b.full = dfixed_const(wm->num_heads);
1983         a.full = dfixed_div(a, b);
1984
1985         b.full = dfixed_const(1000);
1986         c.full = dfixed_const(wm->disp_clk);
1987         b.full = dfixed_div(c, b);
1988         c.full = dfixed_const(wm->bytes_per_pixel);
1989         b.full = dfixed_mul(b, c);
1990
1991         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
1992
1993         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
1994         b.full = dfixed_const(1000);
1995         c.full = dfixed_const(lb_fill_bw);
1996         b.full = dfixed_div(c, b);
1997         a.full = dfixed_div(a, b);
1998         line_fill_time = dfixed_trunc(a);
1999
2000         if (line_fill_time < wm->active_time)
2001                 return latency;
2002         else
2003                 return latency + (line_fill_time - wm->active_time);
2004
2005 }
2006
2007 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2008 {
2009         if (evergreen_average_bandwidth(wm) <=
2010             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2011                 return true;
2012         else
2013                 return false;
2014 };
2015
2016 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2017 {
2018         if (evergreen_average_bandwidth(wm) <=
2019             (evergreen_available_bandwidth(wm) / wm->num_heads))
2020                 return true;
2021         else
2022                 return false;
2023 };
2024
2025 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2026 {
2027         u32 lb_partitions = wm->lb_size / wm->src_width;
2028         u32 line_time = wm->active_time + wm->blank_time;
2029         u32 latency_tolerant_lines;
2030         u32 latency_hiding;
2031         fixed20_12 a;
2032
2033         a.full = dfixed_const(1);
2034         if (wm->vsc.full > a.full)
2035                 latency_tolerant_lines = 1;
2036         else {
2037                 if (lb_partitions <= (wm->vtaps + 1))
2038                         latency_tolerant_lines = 1;
2039                 else
2040                         latency_tolerant_lines = 2;
2041         }
2042
2043         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2044
2045         if (evergreen_latency_watermark(wm) <= latency_hiding)
2046                 return true;
2047         else
2048                 return false;
2049 }
2050
2051 static void evergreen_program_watermarks(struct radeon_device *rdev,
2052                                          struct radeon_crtc *radeon_crtc,
2053                                          u32 lb_size, u32 num_heads)
2054 {
2055         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2056         struct evergreen_wm_params wm;
2057         u32 pixel_period;
2058         u32 line_time = 0;
2059         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2060         u32 priority_a_mark = 0, priority_b_mark = 0;
2061         u32 priority_a_cnt = PRIORITY_OFF;
2062         u32 priority_b_cnt = PRIORITY_OFF;
2063         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2064         u32 tmp, arb_control3;
2065         fixed20_12 a, b, c;
2066
2067         if (radeon_crtc->base.enabled && num_heads && mode) {
2068                 pixel_period = 1000000 / (u32)mode->clock;
2069                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2070                 priority_a_cnt = 0;
2071                 priority_b_cnt = 0;
2072
2073                 wm.yclk = rdev->pm.current_mclk * 10;
2074                 wm.sclk = rdev->pm.current_sclk * 10;
2075                 wm.disp_clk = mode->clock;
2076                 wm.src_width = mode->crtc_hdisplay;
2077                 wm.active_time = mode->crtc_hdisplay * pixel_period;
2078                 wm.blank_time = line_time - wm.active_time;
2079                 wm.interlaced = false;
2080                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2081                         wm.interlaced = true;
2082                 wm.vsc = radeon_crtc->vsc;
2083                 wm.vtaps = 1;
2084                 if (radeon_crtc->rmx_type != RMX_OFF)
2085                         wm.vtaps = 2;
2086                 wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
2087                 wm.lb_size = lb_size;
2088                 wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
2089                 wm.num_heads = num_heads;
2090
2091                 /* set for high clocks */
2092                 latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
2093                 /* set for low clocks */
2094                 /* wm.yclk = low clk; wm.sclk = low clk */
2095                 latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
2096
2097                 /* possibly force display priority to high */
2098                 /* should really do this at mode validation time... */
2099                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
2100                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
2101                     !evergreen_check_latency_hiding(&wm) ||
2102                     (rdev->disp_priority == 2)) {
2103                         DRM_DEBUG_KMS("force priority to high\n");
2104                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2105                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2106                 }
2107
2108                 a.full = dfixed_const(1000);
2109                 b.full = dfixed_const(mode->clock);
2110                 b.full = dfixed_div(b, a);
2111                 c.full = dfixed_const(latency_watermark_a);
2112                 c.full = dfixed_mul(c, b);
2113                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2114                 c.full = dfixed_div(c, a);
2115                 a.full = dfixed_const(16);
2116                 c.full = dfixed_div(c, a);
2117                 priority_a_mark = dfixed_trunc(c);
2118                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2119
2120                 a.full = dfixed_const(1000);
2121                 b.full = dfixed_const(mode->clock);
2122                 b.full = dfixed_div(b, a);
2123                 c.full = dfixed_const(latency_watermark_b);
2124                 c.full = dfixed_mul(c, b);
2125                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2126                 c.full = dfixed_div(c, a);
2127                 a.full = dfixed_const(16);
2128                 c.full = dfixed_div(c, a);
2129                 priority_b_mark = dfixed_trunc(c);
2130                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2131         }
2132
2133         /* select wm A */
2134         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2135         tmp = arb_control3;
2136         tmp &= ~LATENCY_WATERMARK_MASK(3);
2137         tmp |= LATENCY_WATERMARK_MASK(1);
2138         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2139         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2140                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2141                 LATENCY_HIGH_WATERMARK(line_time)));
2142         /* select wm B */
2143         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2144         tmp &= ~LATENCY_WATERMARK_MASK(3);
2145         tmp |= LATENCY_WATERMARK_MASK(2);
2146         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2147         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2148                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2149                 LATENCY_HIGH_WATERMARK(line_time)));
2150         /* restore original selection */
2151         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2152
2153         /* write the priority marks */
2154         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2155         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2156
2157 }
2158
2159 /**
2160  * evergreen_bandwidth_update - update display watermarks callback.
2161  *
2162  * @rdev: radeon_device pointer
2163  *
2164  * Update the display watermarks based on the requested mode(s)
2165  * (evergreen+).
2166  */
2167 void evergreen_bandwidth_update(struct radeon_device *rdev)
2168 {
2169         struct drm_display_mode *mode0 = NULL;
2170         struct drm_display_mode *mode1 = NULL;
2171         u32 num_heads = 0, lb_size;
2172         int i;
2173
2174         radeon_update_display_priority(rdev);
2175
2176         for (i = 0; i < rdev->num_crtc; i++) {
2177                 if (rdev->mode_info.crtcs[i]->base.enabled)
2178                         num_heads++;
2179         }
2180         for (i = 0; i < rdev->num_crtc; i += 2) {
2181                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2182                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2183                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2184                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2185                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2186                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2187         }
2188 }
2189
2190 /**
2191  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2192  *
2193  * @rdev: radeon_device pointer
2194  *
2195  * Wait for the MC (memory controller) to be idle.
2196  * (evergreen+).
2197  * Returns 0 if the MC is idle, -1 if not.
2198  */
2199 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2200 {
2201         unsigned i;
2202         u32 tmp;
2203
2204         for (i = 0; i < rdev->usec_timeout; i++) {
2205                 /* read MC_STATUS */
2206                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2207                 if (!tmp)
2208                         return 0;
2209                 udelay(1);
2210         }
2211         return -1;
2212 }
2213
2214 /*
2215  * GART
2216  */
2217 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2218 {
2219         unsigned i;
2220         u32 tmp;
2221
2222         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2223
2224         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2225         for (i = 0; i < rdev->usec_timeout; i++) {
2226                 /* read MC_STATUS */
2227                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2228                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2229                 if (tmp == 2) {
2230                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2231                         return;
2232                 }
2233                 if (tmp) {
2234                         return;
2235                 }
2236                 udelay(1);
2237         }
2238 }
2239
2240 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2241 {
2242         u32 tmp;
2243         int r;
2244
2245         if (rdev->gart.robj == NULL) {
2246                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2247                 return -EINVAL;
2248         }
2249         r = radeon_gart_table_vram_pin(rdev);
2250         if (r)
2251                 return r;
2252         radeon_gart_restore(rdev);
2253         /* Setup L2 cache */
2254         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2255                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2256                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2257         WREG32(VM_L2_CNTL2, 0);
2258         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2259         /* Setup TLB control */
2260         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2261                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2262                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2263                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2264         if (rdev->flags & RADEON_IS_IGP) {
2265                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2266                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2267                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2268         } else {
2269                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2270                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2271                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2272                 if ((rdev->family == CHIP_JUNIPER) ||
2273                     (rdev->family == CHIP_CYPRESS) ||
2274                     (rdev->family == CHIP_HEMLOCK) ||
2275                     (rdev->family == CHIP_BARTS))
2276                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2277         }
2278         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2279         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2280         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2281         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2282         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2283         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2284         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2285         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2286                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2287         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2288                         (u32)(rdev->dummy_page.addr >> 12));
2289         WREG32(VM_CONTEXT1_CNTL, 0);
2290
2291         evergreen_pcie_gart_tlb_flush(rdev);
2292         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2293                  (unsigned)(rdev->mc.gtt_size >> 20),
2294                  (unsigned long long)rdev->gart.table_addr);
2295         rdev->gart.ready = true;
2296         return 0;
2297 }
2298
2299 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2300 {
2301         u32 tmp;
2302
2303         /* Disable all tables */
2304         WREG32(VM_CONTEXT0_CNTL, 0);
2305         WREG32(VM_CONTEXT1_CNTL, 0);
2306
2307         /* Setup L2 cache */
2308         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2309                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2310         WREG32(VM_L2_CNTL2, 0);
2311         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2312         /* Setup TLB control */
2313         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2314         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2315         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2316         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2317         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2318         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2319         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2320         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2321         radeon_gart_table_vram_unpin(rdev);
2322 }
2323
2324 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2325 {
2326         evergreen_pcie_gart_disable(rdev);
2327         radeon_gart_table_vram_free(rdev);
2328         radeon_gart_fini(rdev);
2329 }
2330
2331
2332 static void evergreen_agp_enable(struct radeon_device *rdev)
2333 {
2334         u32 tmp;
2335
2336         /* Setup L2 cache */
2337         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2338                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2339                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2340         WREG32(VM_L2_CNTL2, 0);
2341         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2342         /* Setup TLB control */
2343         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2344                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2345                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2346                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2347         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2348         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2349         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2350         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2351         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2352         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2353         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2354         WREG32(VM_CONTEXT0_CNTL, 0);
2355         WREG32(VM_CONTEXT1_CNTL, 0);
2356 }
2357
2358 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2359 {
2360         u32 crtc_enabled, tmp, frame_count, blackout;
2361         int i, j;
2362
2363         if (!ASIC_IS_NODCE(rdev)) {
2364                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2365                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2366
2367                 /* disable VGA render */
2368                 WREG32(VGA_RENDER_CONTROL, 0);
2369         }
2370         /* blank the display controllers */
2371         for (i = 0; i < rdev->num_crtc; i++) {
2372                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2373                 if (crtc_enabled) {
2374                         save->crtc_enabled[i] = true;
2375                         if (ASIC_IS_DCE6(rdev)) {
2376                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2377                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2378                                         radeon_wait_for_vblank(rdev, i);
2379                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2380                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2381                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2382                                 }
2383                         } else {
2384                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2385                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2386                                         radeon_wait_for_vblank(rdev, i);
2387                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2388                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2389                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2390                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2391                                 }
2392                         }
2393                         /* wait for the next frame */
2394                         frame_count = radeon_get_vblank_counter(rdev, i);
2395                         for (j = 0; j < rdev->usec_timeout; j++) {
2396                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2397                                         break;
2398                                 udelay(1);
2399                         }
2400
2401                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2402                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2403                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2404                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2405                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2406                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2407                         save->crtc_enabled[i] = false;
2408                         /* ***** */
2409                 } else {
2410                         save->crtc_enabled[i] = false;
2411                 }
2412         }
2413
2414         radeon_mc_wait_for_idle(rdev);
2415
2416         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2417         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2418                 /* Block CPU access */
2419                 WREG32(BIF_FB_EN, 0);
2420                 /* blackout the MC */
2421                 blackout &= ~BLACKOUT_MODE_MASK;
2422                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2423         }
2424         /* wait for the MC to settle */
2425         udelay(100);
2426
2427         /* lock double buffered regs */
2428         for (i = 0; i < rdev->num_crtc; i++) {
2429                 if (save->crtc_enabled[i]) {
2430                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2431                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2432                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2433                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2434                         }
2435                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2436                         if (!(tmp & 1)) {
2437                                 tmp |= 1;
2438                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2439                         }
2440                 }
2441         }
2442 }
2443
2444 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2445 {
2446         u32 tmp, frame_count;
2447         int i, j;
2448
2449         /* update crtc base addresses */
2450         for (i = 0; i < rdev->num_crtc; i++) {
2451                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2452                        upper_32_bits(rdev->mc.vram_start));
2453                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2454                        upper_32_bits(rdev->mc.vram_start));
2455                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2456                        (u32)rdev->mc.vram_start);
2457                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2458                        (u32)rdev->mc.vram_start);
2459         }
2460
2461         if (!ASIC_IS_NODCE(rdev)) {
2462                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2463                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2464         }
2465
2466         /* unlock regs and wait for update */
2467         for (i = 0; i < rdev->num_crtc; i++) {
2468                 if (save->crtc_enabled[i]) {
2469                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2470                         if ((tmp & 0x3) != 0) {
2471                                 tmp &= ~0x3;
2472                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2473                         }
2474                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2475                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2476                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2477                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2478                         }
2479                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2480                         if (tmp & 1) {
2481                                 tmp &= ~1;
2482                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2483                         }
2484                         for (j = 0; j < rdev->usec_timeout; j++) {
2485                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2486                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2487                                         break;
2488                                 udelay(1);
2489                         }
2490                 }
2491         }
2492
2493         /* unblackout the MC */
2494         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2495         tmp &= ~BLACKOUT_MODE_MASK;
2496         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2497         /* allow CPU access */
2498         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2499
2500         for (i = 0; i < rdev->num_crtc; i++) {
2501                 if (save->crtc_enabled[i]) {
2502                         if (ASIC_IS_DCE6(rdev)) {
2503                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2504                                 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2505                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2506                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2507                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2508                         } else {
2509                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2510                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2511                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2512                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2513                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2514                         }
2515                         /* wait for the next frame */
2516                         frame_count = radeon_get_vblank_counter(rdev, i);
2517                         for (j = 0; j < rdev->usec_timeout; j++) {
2518                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2519                                         break;
2520                                 udelay(1);
2521                         }
2522                 }
2523         }
2524         if (!ASIC_IS_NODCE(rdev)) {
2525                 /* Unlock vga access */
2526                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2527                 mdelay(1);
2528                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2529         }
2530 }
2531
2532 void evergreen_mc_program(struct radeon_device *rdev)
2533 {
2534         struct evergreen_mc_save save;
2535         u32 tmp;
2536         int i, j;
2537
2538         /* Initialize HDP */
2539         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2540                 WREG32((0x2c14 + j), 0x00000000);
2541                 WREG32((0x2c18 + j), 0x00000000);
2542                 WREG32((0x2c1c + j), 0x00000000);
2543                 WREG32((0x2c20 + j), 0x00000000);
2544                 WREG32((0x2c24 + j), 0x00000000);
2545         }
2546         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2547
2548         evergreen_mc_stop(rdev, &save);
2549         if (evergreen_mc_wait_for_idle(rdev)) {
2550                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2551         }
2552         /* Lockout access through VGA aperture*/
2553         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2554         /* Update configuration */
2555         if (rdev->flags & RADEON_IS_AGP) {
2556                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2557                         /* VRAM before AGP */
2558                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2559                                 rdev->mc.vram_start >> 12);
2560                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2561                                 rdev->mc.gtt_end >> 12);
2562                 } else {
2563                         /* VRAM after AGP */
2564                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2565                                 rdev->mc.gtt_start >> 12);
2566                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2567                                 rdev->mc.vram_end >> 12);
2568                 }
2569         } else {
2570                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2571                         rdev->mc.vram_start >> 12);
2572                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2573                         rdev->mc.vram_end >> 12);
2574         }
2575         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2576         /* llano/ontario only */
2577         if ((rdev->family == CHIP_PALM) ||
2578             (rdev->family == CHIP_SUMO) ||
2579             (rdev->family == CHIP_SUMO2)) {
2580                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2581                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2582                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2583                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2584         }
2585         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2586         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2587         WREG32(MC_VM_FB_LOCATION, tmp);
2588         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2589         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2590         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2591         if (rdev->flags & RADEON_IS_AGP) {
2592                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2593                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2594                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2595         } else {
2596                 WREG32(MC_VM_AGP_BASE, 0);
2597                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2598                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2599         }
2600         if (evergreen_mc_wait_for_idle(rdev)) {
2601                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2602         }
2603         evergreen_mc_resume(rdev, &save);
2604         /* we need to own VRAM, so turn off the VGA renderer here
2605          * to stop it overwriting our objects */
2606         rv515_vga_render_disable(rdev);
2607 }
2608
2609 /*
2610  * CP.
2611  */
2612 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2613 {
2614         struct radeon_ring *ring = &rdev->ring[ib->ring];
2615         u32 next_rptr;
2616
2617         /* set to DX10/11 mode */
2618         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2619         radeon_ring_write(ring, 1);
2620
2621         if (ring->rptr_save_reg) {
2622                 next_rptr = ring->wptr + 3 + 4;
2623                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2624                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2625                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2626                 radeon_ring_write(ring, next_rptr);
2627         } else if (rdev->wb.enabled) {
2628                 next_rptr = ring->wptr + 5 + 4;
2629                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2630                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2631                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2632                 radeon_ring_write(ring, next_rptr);
2633                 radeon_ring_write(ring, 0);
2634         }
2635
2636         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2637         radeon_ring_write(ring,
2638 #ifdef __BIG_ENDIAN
2639                           (2 << 0) |
2640 #endif
2641                           (ib->gpu_addr & 0xFFFFFFFC));
2642         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2643         radeon_ring_write(ring, ib->length_dw);
2644 }
2645
2646
2647 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2648 {
2649         const __be32 *fw_data;
2650         int i;
2651
2652         if (!rdev->me_fw || !rdev->pfp_fw)
2653                 return -EINVAL;
2654
2655         r700_cp_stop(rdev);
2656         WREG32(CP_RB_CNTL,
2657 #ifdef __BIG_ENDIAN
2658                BUF_SWAP_32BIT |
2659 #endif
2660                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2661
2662         fw_data = (const __be32 *)rdev->pfp_fw->data;
2663         WREG32(CP_PFP_UCODE_ADDR, 0);
2664         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2665                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2666         WREG32(CP_PFP_UCODE_ADDR, 0);
2667
2668         fw_data = (const __be32 *)rdev->me_fw->data;
2669         WREG32(CP_ME_RAM_WADDR, 0);
2670         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2671                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2672
2673         WREG32(CP_PFP_UCODE_ADDR, 0);
2674         WREG32(CP_ME_RAM_WADDR, 0);
2675         WREG32(CP_ME_RAM_RADDR, 0);
2676         return 0;
2677 }
2678
2679 static int evergreen_cp_start(struct radeon_device *rdev)
2680 {
2681         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2682         int r, i;
2683         uint32_t cp_me;
2684
2685         r = radeon_ring_lock(rdev, ring, 7);
2686         if (r) {
2687                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2688                 return r;
2689         }
2690         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2691         radeon_ring_write(ring, 0x1);
2692         radeon_ring_write(ring, 0x0);
2693         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2694         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2695         radeon_ring_write(ring, 0);
2696         radeon_ring_write(ring, 0);
2697         radeon_ring_unlock_commit(rdev, ring);
2698
2699         cp_me = 0xff;
2700         WREG32(CP_ME_CNTL, cp_me);
2701
2702         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2703         if (r) {
2704                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2705                 return r;
2706         }
2707
2708         /* setup clear context state */
2709         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2710         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2711
2712         for (i = 0; i < evergreen_default_size; i++)
2713                 radeon_ring_write(ring, evergreen_default_state[i]);
2714
2715         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2716         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2717
2718         /* set clear context state */
2719         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2720         radeon_ring_write(ring, 0);
2721
2722         /* SQ_VTX_BASE_VTX_LOC */
2723         radeon_ring_write(ring, 0xc0026f00);
2724         radeon_ring_write(ring, 0x00000000);
2725         radeon_ring_write(ring, 0x00000000);
2726         radeon_ring_write(ring, 0x00000000);
2727
2728         /* Clear consts */
2729         radeon_ring_write(ring, 0xc0036f00);
2730         radeon_ring_write(ring, 0x00000bc4);
2731         radeon_ring_write(ring, 0xffffffff);
2732         radeon_ring_write(ring, 0xffffffff);
2733         radeon_ring_write(ring, 0xffffffff);
2734
2735         radeon_ring_write(ring, 0xc0026900);
2736         radeon_ring_write(ring, 0x00000316);
2737         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2738         radeon_ring_write(ring, 0x00000010); /*  */
2739
2740         radeon_ring_unlock_commit(rdev, ring);
2741
2742         return 0;
2743 }
2744
2745 static int evergreen_cp_resume(struct radeon_device *rdev)
2746 {
2747         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2748         u32 tmp;
2749         u32 rb_bufsz;
2750         int r;
2751
2752         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2753         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2754                                  SOFT_RESET_PA |
2755                                  SOFT_RESET_SH |
2756                                  SOFT_RESET_VGT |
2757                                  SOFT_RESET_SPI |
2758                                  SOFT_RESET_SX));
2759         RREG32(GRBM_SOFT_RESET);
2760         mdelay(15);
2761         WREG32(GRBM_SOFT_RESET, 0);
2762         RREG32(GRBM_SOFT_RESET);
2763
2764         /* Set ring buffer size */
2765         rb_bufsz = drm_order(ring->ring_size / 8);
2766         tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2767 #ifdef __BIG_ENDIAN
2768         tmp |= BUF_SWAP_32BIT;
2769 #endif
2770         WREG32(CP_RB_CNTL, tmp);
2771         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2772         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2773
2774         /* Set the write pointer delay */
2775         WREG32(CP_RB_WPTR_DELAY, 0);
2776
2777         /* Initialize the ring buffer's read and write pointers */
2778         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2779         WREG32(CP_RB_RPTR_WR, 0);
2780         ring->wptr = 0;
2781         WREG32(CP_RB_WPTR, ring->wptr);
2782
2783         /* set the wb address whether it's enabled or not */
2784         WREG32(CP_RB_RPTR_ADDR,
2785                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2786         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2787         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2788
2789         if (rdev->wb.enabled)
2790                 WREG32(SCRATCH_UMSK, 0xff);
2791         else {
2792                 tmp |= RB_NO_UPDATE;
2793                 WREG32(SCRATCH_UMSK, 0);
2794         }
2795
2796         mdelay(1);
2797         WREG32(CP_RB_CNTL, tmp);
2798
2799         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2800         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2801
2802         ring->rptr = RREG32(CP_RB_RPTR);
2803
2804         evergreen_cp_start(rdev);
2805         ring->ready = true;
2806         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2807         if (r) {
2808                 ring->ready = false;
2809                 return r;
2810         }
2811         return 0;
2812 }
2813
2814 /*
2815  * Core functions
2816  */
2817 static void evergreen_gpu_init(struct radeon_device *rdev)
2818 {
2819         u32 gb_addr_config;
2820         u32 mc_shared_chmap, mc_arb_ramcfg;
2821         u32 sx_debug_1;
2822         u32 smx_dc_ctl0;
2823         u32 sq_config;
2824         u32 sq_lds_resource_mgmt;
2825         u32 sq_gpr_resource_mgmt_1;
2826         u32 sq_gpr_resource_mgmt_2;
2827         u32 sq_gpr_resource_mgmt_3;
2828         u32 sq_thread_resource_mgmt;
2829         u32 sq_thread_resource_mgmt_2;
2830         u32 sq_stack_resource_mgmt_1;
2831         u32 sq_stack_resource_mgmt_2;
2832         u32 sq_stack_resource_mgmt_3;
2833         u32 vgt_cache_invalidation;
2834         u32 hdp_host_path_cntl, tmp;
2835         u32 disabled_rb_mask;
2836         int i, j, num_shader_engines, ps_thread_count;
2837
2838         switch (rdev->family) {
2839         case CHIP_CYPRESS:
2840         case CHIP_HEMLOCK:
2841                 rdev->config.evergreen.num_ses = 2;
2842                 rdev->config.evergreen.max_pipes = 4;
2843                 rdev->config.evergreen.max_tile_pipes = 8;
2844                 rdev->config.evergreen.max_simds = 10;
2845                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2846                 rdev->config.evergreen.max_gprs = 256;
2847                 rdev->config.evergreen.max_threads = 248;
2848                 rdev->config.evergreen.max_gs_threads = 32;
2849                 rdev->config.evergreen.max_stack_entries = 512;
2850                 rdev->config.evergreen.sx_num_of_sets = 4;
2851                 rdev->config.evergreen.sx_max_export_size = 256;
2852                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2853                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2854                 rdev->config.evergreen.max_hw_contexts = 8;
2855                 rdev->config.evergreen.sq_num_cf_insts = 2;
2856
2857                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2858                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2859                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2860                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
2861                 break;
2862         case CHIP_JUNIPER:
2863                 rdev->config.evergreen.num_ses = 1;
2864                 rdev->config.evergreen.max_pipes = 4;
2865                 rdev->config.evergreen.max_tile_pipes = 4;
2866                 rdev->config.evergreen.max_simds = 10;
2867                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2868                 rdev->config.evergreen.max_gprs = 256;
2869                 rdev->config.evergreen.max_threads = 248;
2870                 rdev->config.evergreen.max_gs_threads = 32;
2871                 rdev->config.evergreen.max_stack_entries = 512;
2872                 rdev->config.evergreen.sx_num_of_sets = 4;
2873                 rdev->config.evergreen.sx_max_export_size = 256;
2874                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2875                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2876                 rdev->config.evergreen.max_hw_contexts = 8;
2877                 rdev->config.evergreen.sq_num_cf_insts = 2;
2878
2879                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2880                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2881                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2882                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
2883                 break;
2884         case CHIP_REDWOOD:
2885                 rdev->config.evergreen.num_ses = 1;
2886                 rdev->config.evergreen.max_pipes = 4;
2887                 rdev->config.evergreen.max_tile_pipes = 4;
2888                 rdev->config.evergreen.max_simds = 5;
2889                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2890                 rdev->config.evergreen.max_gprs = 256;
2891                 rdev->config.evergreen.max_threads = 248;
2892                 rdev->config.evergreen.max_gs_threads = 32;
2893                 rdev->config.evergreen.max_stack_entries = 256;
2894                 rdev->config.evergreen.sx_num_of_sets = 4;
2895                 rdev->config.evergreen.sx_max_export_size = 256;
2896                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2897                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2898                 rdev->config.evergreen.max_hw_contexts = 8;
2899                 rdev->config.evergreen.sq_num_cf_insts = 2;
2900
2901                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2902                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2903                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2904                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
2905                 break;
2906         case CHIP_CEDAR:
2907         default:
2908                 rdev->config.evergreen.num_ses = 1;
2909                 rdev->config.evergreen.max_pipes = 2;
2910                 rdev->config.evergreen.max_tile_pipes = 2;
2911                 rdev->config.evergreen.max_simds = 2;
2912                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2913                 rdev->config.evergreen.max_gprs = 256;
2914                 rdev->config.evergreen.max_threads = 192;
2915                 rdev->config.evergreen.max_gs_threads = 16;
2916                 rdev->config.evergreen.max_stack_entries = 256;
2917                 rdev->config.evergreen.sx_num_of_sets = 4;
2918                 rdev->config.evergreen.sx_max_export_size = 128;
2919                 rdev->config.evergreen.sx_max_export_pos_size = 32;
2920                 rdev->config.evergreen.sx_max_export_smx_size = 96;
2921                 rdev->config.evergreen.max_hw_contexts = 4;
2922                 rdev->config.evergreen.sq_num_cf_insts = 1;
2923
2924                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2925                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2926                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2927                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2928                 break;
2929         case CHIP_PALM:
2930                 rdev->config.evergreen.num_ses = 1;
2931                 rdev->config.evergreen.max_pipes = 2;
2932                 rdev->config.evergreen.max_tile_pipes = 2;
2933                 rdev->config.evergreen.max_simds = 2;
2934                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2935                 rdev->config.evergreen.max_gprs = 256;
2936                 rdev->config.evergreen.max_threads = 192;
2937                 rdev->config.evergreen.max_gs_threads = 16;
2938                 rdev->config.evergreen.max_stack_entries = 256;
2939                 rdev->config.evergreen.sx_num_of_sets = 4;
2940                 rdev->config.evergreen.sx_max_export_size = 128;
2941                 rdev->config.evergreen.sx_max_export_pos_size = 32;
2942                 rdev->config.evergreen.sx_max_export_smx_size = 96;
2943                 rdev->config.evergreen.max_hw_contexts = 4;
2944                 rdev->config.evergreen.sq_num_cf_insts = 1;
2945
2946                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2947                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2948                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2949                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2950                 break;
2951         case CHIP_SUMO:
2952                 rdev->config.evergreen.num_ses = 1;
2953                 rdev->config.evergreen.max_pipes = 4;
2954                 rdev->config.evergreen.max_tile_pipes = 4;
2955                 if (rdev->pdev->device == 0x9648)
2956                         rdev->config.evergreen.max_simds = 3;
2957                 else if ((rdev->pdev->device == 0x9647) ||
2958                          (rdev->pdev->device == 0x964a))
2959                         rdev->config.evergreen.max_simds = 4;
2960                 else
2961                         rdev->config.evergreen.max_simds = 5;
2962                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2963                 rdev->config.evergreen.max_gprs = 256;
2964                 rdev->config.evergreen.max_threads = 248;
2965                 rdev->config.evergreen.max_gs_threads = 32;
2966                 rdev->config.evergreen.max_stack_entries = 256;
2967                 rdev->config.evergreen.sx_num_of_sets = 4;
2968                 rdev->config.evergreen.sx_max_export_size = 256;
2969                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2970                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2971                 rdev->config.evergreen.max_hw_contexts = 8;
2972                 rdev->config.evergreen.sq_num_cf_insts = 2;
2973
2974                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2975                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2976                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2977                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
2978                 break;
2979         case CHIP_SUMO2:
2980                 rdev->config.evergreen.num_ses = 1;
2981                 rdev->config.evergreen.max_pipes = 4;
2982                 rdev->config.evergreen.max_tile_pipes = 4;
2983                 rdev->config.evergreen.max_simds = 2;
2984                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2985                 rdev->config.evergreen.max_gprs = 256;
2986                 rdev->config.evergreen.max_threads = 248;
2987                 rdev->config.evergreen.max_gs_threads = 32;
2988                 rdev->config.evergreen.max_stack_entries = 512;
2989                 rdev->config.evergreen.sx_num_of_sets = 4;
2990                 rdev->config.evergreen.sx_max_export_size = 256;
2991                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2992                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2993                 rdev->config.evergreen.max_hw_contexts = 4;
2994                 rdev->config.evergreen.sq_num_cf_insts = 2;
2995
2996                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2997                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2998                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2999                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3000                 break;
3001         case CHIP_BARTS:
3002                 rdev->config.evergreen.num_ses = 2;
3003                 rdev->config.evergreen.max_pipes = 4;
3004                 rdev->config.evergreen.max_tile_pipes = 8;
3005                 rdev->config.evergreen.max_simds = 7;
3006                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3007                 rdev->config.evergreen.max_gprs = 256;
3008                 rdev->config.evergreen.max_threads = 248;
3009                 rdev->config.evergreen.max_gs_threads = 32;
3010                 rdev->config.evergreen.max_stack_entries = 512;
3011                 rdev->config.evergreen.sx_num_of_sets = 4;
3012                 rdev->config.evergreen.sx_max_export_size = 256;
3013                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3014                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3015                 rdev->config.evergreen.max_hw_contexts = 8;
3016                 rdev->config.evergreen.sq_num_cf_insts = 2;
3017
3018                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3019                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3020                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3021                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3022                 break;
3023         case CHIP_TURKS:
3024                 rdev->config.evergreen.num_ses = 1;
3025                 rdev->config.evergreen.max_pipes = 4;
3026                 rdev->config.evergreen.max_tile_pipes = 4;
3027                 rdev->config.evergreen.max_simds = 6;
3028                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3029                 rdev->config.evergreen.max_gprs = 256;
3030                 rdev->config.evergreen.max_threads = 248;
3031                 rdev->config.evergreen.max_gs_threads = 32;
3032                 rdev->config.evergreen.max_stack_entries = 256;
3033                 rdev->config.evergreen.sx_num_of_sets = 4;
3034                 rdev->config.evergreen.sx_max_export_size = 256;
3035                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3036                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3037                 rdev->config.evergreen.max_hw_contexts = 8;
3038                 rdev->config.evergreen.sq_num_cf_insts = 2;
3039
3040                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3041                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3042                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3043                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3044                 break;
3045         case CHIP_CAICOS:
3046                 rdev->config.evergreen.num_ses = 1;
3047                 rdev->config.evergreen.max_pipes = 2;
3048                 rdev->config.evergreen.max_tile_pipes = 2;
3049                 rdev->config.evergreen.max_simds = 2;
3050                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3051                 rdev->config.evergreen.max_gprs = 256;
3052                 rdev->config.evergreen.max_threads = 192;
3053                 rdev->config.evergreen.max_gs_threads = 16;
3054                 rdev->config.evergreen.max_stack_entries = 256;
3055                 rdev->config.evergreen.sx_num_of_sets = 4;
3056                 rdev->config.evergreen.sx_max_export_size = 128;
3057                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3058                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3059                 rdev->config.evergreen.max_hw_contexts = 4;
3060                 rdev->config.evergreen.sq_num_cf_insts = 1;
3061
3062                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3063                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3064                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3065                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3066                 break;
3067         }
3068
3069         /* Initialize HDP */
3070         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3071                 WREG32((0x2c14 + j), 0x00000000);
3072                 WREG32((0x2c18 + j), 0x00000000);
3073                 WREG32((0x2c1c + j), 0x00000000);
3074                 WREG32((0x2c20 + j), 0x00000000);
3075                 WREG32((0x2c24 + j), 0x00000000);
3076         }
3077
3078         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3079
3080         evergreen_fix_pci_max_read_req_size(rdev);
3081
3082         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3083         if ((rdev->family == CHIP_PALM) ||
3084             (rdev->family == CHIP_SUMO) ||
3085             (rdev->family == CHIP_SUMO2))
3086                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3087         else
3088                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3089
3090         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3091          * not have bank info, so create a custom tiling dword.
3092          * bits 3:0   num_pipes
3093          * bits 7:4   num_banks
3094          * bits 11:8  group_size
3095          * bits 15:12 row_size
3096          */
3097         rdev->config.evergreen.tile_config = 0;
3098         switch (rdev->config.evergreen.max_tile_pipes) {
3099         case 1:
3100         default:
3101                 rdev->config.evergreen.tile_config |= (0 << 0);
3102                 break;
3103         case 2:
3104                 rdev->config.evergreen.tile_config |= (1 << 0);
3105                 break;
3106         case 4:
3107                 rdev->config.evergreen.tile_config |= (2 << 0);
3108                 break;
3109         case 8:
3110                 rdev->config.evergreen.tile_config |= (3 << 0);
3111                 break;
3112         }
3113         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3114         if (rdev->flags & RADEON_IS_IGP)
3115                 rdev->config.evergreen.tile_config |= 1 << 4;
3116         else {
3117                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3118                 case 0: /* four banks */
3119                         rdev->config.evergreen.tile_config |= 0 << 4;
3120                         break;
3121                 case 1: /* eight banks */
3122                         rdev->config.evergreen.tile_config |= 1 << 4;
3123                         break;
3124                 case 2: /* sixteen banks */
3125                 default:
3126                         rdev->config.evergreen.tile_config |= 2 << 4;
3127                         break;
3128                 }
3129         }
3130         rdev->config.evergreen.tile_config |= 0 << 8;
3131         rdev->config.evergreen.tile_config |=
3132                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3133
3134         num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3135
3136         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3137                 u32 efuse_straps_4;
3138                 u32 efuse_straps_3;
3139
3140                 WREG32(RCU_IND_INDEX, 0x204);
3141                 efuse_straps_4 = RREG32(RCU_IND_DATA);
3142                 WREG32(RCU_IND_INDEX, 0x203);
3143                 efuse_straps_3 = RREG32(RCU_IND_DATA);
3144                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3145                       ((efuse_straps_3 & 0xf0000000) >> 28));
3146         } else {
3147                 tmp = 0;
3148                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3149                         u32 rb_disable_bitmap;
3150
3151                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3152                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3153                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3154                         tmp <<= 4;
3155                         tmp |= rb_disable_bitmap;
3156                 }
3157         }
3158         /* enabled rb are just the one not disabled :) */
3159         disabled_rb_mask = tmp;
3160         tmp = 0;
3161         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3162                 tmp |= (1 << i);
3163         /* if all the backends are disabled, fix it up here */
3164         if ((disabled_rb_mask & tmp) == tmp) {
3165                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3166                         disabled_rb_mask &= ~(1 << i);
3167         }
3168
3169         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3170         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3171
3172         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3173         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3174         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3175         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3176         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3177         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3178         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3179
3180         if ((rdev->config.evergreen.max_backends == 1) &&
3181             (rdev->flags & RADEON_IS_IGP)) {
3182                 if ((disabled_rb_mask & 3) == 1) {
3183                         /* RB0 disabled, RB1 enabled */
3184                         tmp = 0x11111111;
3185                 } else {
3186                         /* RB1 disabled, RB0 enabled */
3187                         tmp = 0x00000000;
3188                 }
3189         } else {
3190                 tmp = gb_addr_config & NUM_PIPES_MASK;
3191                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3192                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3193         }
3194         WREG32(GB_BACKEND_MAP, tmp);
3195
3196         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3197         WREG32(CGTS_TCC_DISABLE, 0);
3198         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3199         WREG32(CGTS_USER_TCC_DISABLE, 0);
3200
3201         /* set HW defaults for 3D engine */
3202         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3203                                      ROQ_IB2_START(0x2b)));
3204
3205         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3206
3207         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3208                              SYNC_GRADIENT |
3209                              SYNC_WALKER |
3210                              SYNC_ALIGNER));
3211
3212         sx_debug_1 = RREG32(SX_DEBUG_1);
3213         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3214         WREG32(SX_DEBUG_1, sx_debug_1);
3215
3216
3217         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3218         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3219         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3220         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3221
3222         if (rdev->family <= CHIP_SUMO2)
3223                 WREG32(SMX_SAR_CTL0, 0x00010000);
3224
3225         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3226                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3227                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3228
3229         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3230                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3231                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3232
3233         WREG32(VGT_NUM_INSTANCES, 1);
3234         WREG32(SPI_CONFIG_CNTL, 0);
3235         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3236         WREG32(CP_PERFMON_CNTL, 0);
3237
3238         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3239                                   FETCH_FIFO_HIWATER(0x4) |
3240                                   DONE_FIFO_HIWATER(0xe0) |
3241                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3242
3243         sq_config = RREG32(SQ_CONFIG);
3244         sq_config &= ~(PS_PRIO(3) |
3245                        VS_PRIO(3) |
3246                        GS_PRIO(3) |
3247                        ES_PRIO(3));
3248         sq_config |= (VC_ENABLE |
3249                       EXPORT_SRC_C |
3250                       PS_PRIO(0) |
3251                       VS_PRIO(1) |
3252                       GS_PRIO(2) |
3253                       ES_PRIO(3));
3254
3255         switch (rdev->family) {
3256         case CHIP_CEDAR:
3257         case CHIP_PALM:
3258         case CHIP_SUMO:
3259         case CHIP_SUMO2:
3260         case CHIP_CAICOS:
3261                 /* no vertex cache */
3262                 sq_config &= ~VC_ENABLE;
3263                 break;
3264         default:
3265                 break;
3266         }
3267
3268         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3269
3270         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3271         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3272         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3273         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3274         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3275         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3276         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3277
3278         switch (rdev->family) {
3279         case CHIP_CEDAR:
3280         case CHIP_PALM:
3281         case CHIP_SUMO:
3282         case CHIP_SUMO2:
3283                 ps_thread_count = 96;
3284                 break;
3285         default:
3286                 ps_thread_count = 128;
3287                 break;
3288         }
3289
3290         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3291         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3292         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3293         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3294         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3295         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3296
3297         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3298         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3299         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3300         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3301         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3302         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3303
3304         WREG32(SQ_CONFIG, sq_config);
3305         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3306         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3307         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3308         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3309         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3310         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3311         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3312         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3313         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3314         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3315
3316         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3317                                           FORCE_EOV_MAX_REZ_CNT(255)));
3318
3319         switch (rdev->family) {
3320         case CHIP_CEDAR:
3321         case CHIP_PALM:
3322         case CHIP_SUMO:
3323         case CHIP_SUMO2:
3324         case CHIP_CAICOS:
3325                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3326                 break;
3327         default:
3328                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3329                 break;
3330         }
3331         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3332         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3333
3334         WREG32(VGT_GS_VERTEX_REUSE, 16);
3335         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3336         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3337
3338         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3339         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3340
3341         WREG32(CB_PERF_CTR0_SEL_0, 0);
3342         WREG32(CB_PERF_CTR0_SEL_1, 0);
3343         WREG32(CB_PERF_CTR1_SEL_0, 0);
3344         WREG32(CB_PERF_CTR1_SEL_1, 0);
3345         WREG32(CB_PERF_CTR2_SEL_0, 0);
3346         WREG32(CB_PERF_CTR2_SEL_1, 0);
3347         WREG32(CB_PERF_CTR3_SEL_0, 0);
3348         WREG32(CB_PERF_CTR3_SEL_1, 0);
3349
3350         /* clear render buffer base addresses */
3351         WREG32(CB_COLOR0_BASE, 0);
3352         WREG32(CB_COLOR1_BASE, 0);
3353         WREG32(CB_COLOR2_BASE, 0);
3354         WREG32(CB_COLOR3_BASE, 0);
3355         WREG32(CB_COLOR4_BASE, 0);
3356         WREG32(CB_COLOR5_BASE, 0);
3357         WREG32(CB_COLOR6_BASE, 0);
3358         WREG32(CB_COLOR7_BASE, 0);
3359         WREG32(CB_COLOR8_BASE, 0);
3360         WREG32(CB_COLOR9_BASE, 0);
3361         WREG32(CB_COLOR10_BASE, 0);
3362         WREG32(CB_COLOR11_BASE, 0);
3363
3364         /* set the shader const cache sizes to 0 */
3365         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3366                 WREG32(i, 0);
3367         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3368                 WREG32(i, 0);
3369
3370         tmp = RREG32(HDP_MISC_CNTL);
3371         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3372         WREG32(HDP_MISC_CNTL, tmp);
3373
3374         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3375         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3376
3377         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3378
3379         udelay(50);
3380
3381 }
3382
3383 int evergreen_mc_init(struct radeon_device *rdev)
3384 {
3385         u32 tmp;
3386         int chansize, numchan;
3387
3388         /* Get VRAM informations */
3389         rdev->mc.vram_is_ddr = true;
3390         if ((rdev->family == CHIP_PALM) ||
3391             (rdev->family == CHIP_SUMO) ||
3392             (rdev->family == CHIP_SUMO2))
3393                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3394         else
3395                 tmp = RREG32(MC_ARB_RAMCFG);
3396         if (tmp & CHANSIZE_OVERRIDE) {
3397                 chansize = 16;
3398         } else if (tmp & CHANSIZE_MASK) {
3399                 chansize = 64;
3400         } else {
3401                 chansize = 32;
3402         }
3403         tmp = RREG32(MC_SHARED_CHMAP);
3404         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3405         case 0:
3406         default:
3407                 numchan = 1;
3408                 break;
3409         case 1:
3410                 numchan = 2;
3411                 break;
3412         case 2:
3413                 numchan = 4;
3414                 break;
3415         case 3:
3416                 numchan = 8;
3417                 break;
3418         }
3419         rdev->mc.vram_width = numchan * chansize;
3420         /* Could aper size report 0 ? */
3421         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3422         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3423         /* Setup GPU memory space */
3424         if ((rdev->family == CHIP_PALM) ||
3425             (rdev->family == CHIP_SUMO) ||
3426             (rdev->family == CHIP_SUMO2)) {
3427                 /* size in bytes on fusion */
3428                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3429                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3430         } else {
3431                 /* size in MB on evergreen/cayman/tn */
3432                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3433                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3434         }
3435         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3436         r700_vram_gtt_location(rdev, &rdev->mc);
3437         radeon_update_bandwidth_info(rdev);
3438
3439         return 0;
3440 }
3441
3442 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3443 {
3444         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3445                 RREG32(GRBM_STATUS));
3446         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3447                 RREG32(GRBM_STATUS_SE0));
3448         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3449                 RREG32(GRBM_STATUS_SE1));
3450         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3451                 RREG32(SRBM_STATUS));
3452         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3453                 RREG32(SRBM_STATUS2));
3454         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3455                 RREG32(CP_STALLED_STAT1));
3456         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3457                 RREG32(CP_STALLED_STAT2));
3458         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3459                 RREG32(CP_BUSY_STAT));
3460         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3461                 RREG32(CP_STAT));
3462         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3463                 RREG32(DMA_STATUS_REG));
3464         if (rdev->family >= CHIP_CAYMAN) {
3465                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3466                          RREG32(DMA_STATUS_REG + 0x800));
3467         }
3468 }
3469
3470 bool evergreen_is_display_hung(struct radeon_device *rdev)
3471 {
3472         u32 crtc_hung = 0;
3473         u32 crtc_status[6];
3474         u32 i, j, tmp;
3475
3476         for (i = 0; i < rdev->num_crtc; i++) {
3477                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3478                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3479                         crtc_hung |= (1 << i);
3480                 }
3481         }
3482
3483         for (j = 0; j < 10; j++) {
3484                 for (i = 0; i < rdev->num_crtc; i++) {
3485                         if (crtc_hung & (1 << i)) {
3486                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3487                                 if (tmp != crtc_status[i])
3488                                         crtc_hung &= ~(1 << i);
3489                         }
3490                 }
3491                 if (crtc_hung == 0)
3492                         return false;
3493                 udelay(100);
3494         }
3495
3496         return true;
3497 }
3498
3499 static u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3500 {
3501         u32 reset_mask = 0;
3502         u32 tmp;
3503
3504         /* GRBM_STATUS */
3505         tmp = RREG32(GRBM_STATUS);
3506         if (tmp & (PA_BUSY | SC_BUSY |
3507                    SH_BUSY | SX_BUSY |
3508                    TA_BUSY | VGT_BUSY |
3509                    DB_BUSY | CB_BUSY |
3510                    SPI_BUSY | VGT_BUSY_NO_DMA))
3511                 reset_mask |= RADEON_RESET_GFX;
3512
3513         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3514                    CP_BUSY | CP_COHERENCY_BUSY))
3515                 reset_mask |= RADEON_RESET_CP;
3516
3517         if (tmp & GRBM_EE_BUSY)
3518                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3519
3520         /* DMA_STATUS_REG */
3521         tmp = RREG32(DMA_STATUS_REG);
3522         if (!(tmp & DMA_IDLE))
3523                 reset_mask |= RADEON_RESET_DMA;
3524
3525         /* SRBM_STATUS2 */
3526         tmp = RREG32(SRBM_STATUS2);
3527         if (tmp & DMA_BUSY)
3528                 reset_mask |= RADEON_RESET_DMA;
3529
3530         /* SRBM_STATUS */
3531         tmp = RREG32(SRBM_STATUS);
3532         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3533                 reset_mask |= RADEON_RESET_RLC;
3534
3535         if (tmp & IH_BUSY)
3536                 reset_mask |= RADEON_RESET_IH;
3537
3538         if (tmp & SEM_BUSY)
3539                 reset_mask |= RADEON_RESET_SEM;
3540
3541         if (tmp & GRBM_RQ_PENDING)
3542                 reset_mask |= RADEON_RESET_GRBM;
3543
3544         if (tmp & VMC_BUSY)
3545                 reset_mask |= RADEON_RESET_VMC;
3546
3547         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3548                    MCC_BUSY | MCD_BUSY))
3549                 reset_mask |= RADEON_RESET_MC;
3550
3551         if (evergreen_is_display_hung(rdev))
3552                 reset_mask |= RADEON_RESET_DISPLAY;
3553
3554         /* VM_L2_STATUS */
3555         tmp = RREG32(VM_L2_STATUS);
3556         if (tmp & L2_BUSY)
3557                 reset_mask |= RADEON_RESET_VMC;
3558
3559         /* Skip MC reset as it's mostly likely not hung, just busy */
3560         if (reset_mask & RADEON_RESET_MC) {
3561                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3562                 reset_mask &= ~RADEON_RESET_MC;
3563         }
3564
3565         return reset_mask;
3566 }
3567
3568 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3569 {
3570         struct evergreen_mc_save save;
3571         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3572         u32 tmp;
3573
3574         if (reset_mask == 0)
3575                 return;
3576
3577         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3578
3579         evergreen_print_gpu_status_regs(rdev);
3580
3581         /* Disable CP parsing/prefetching */
3582         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3583
3584         if (reset_mask & RADEON_RESET_DMA) {
3585                 /* Disable DMA */
3586                 tmp = RREG32(DMA_RB_CNTL);
3587                 tmp &= ~DMA_RB_ENABLE;
3588                 WREG32(DMA_RB_CNTL, tmp);
3589         }
3590
3591         udelay(50);
3592
3593         evergreen_mc_stop(rdev, &save);
3594         if (evergreen_mc_wait_for_idle(rdev)) {
3595                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3596         }
3597
3598         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3599                 grbm_soft_reset |= SOFT_RESET_DB |
3600                         SOFT_RESET_CB |
3601                         SOFT_RESET_PA |
3602                         SOFT_RESET_SC |
3603                         SOFT_RESET_SPI |
3604                         SOFT_RESET_SX |
3605                         SOFT_RESET_SH |
3606                         SOFT_RESET_TC |
3607                         SOFT_RESET_TA |
3608                         SOFT_RESET_VC |
3609                         SOFT_RESET_VGT;
3610         }
3611
3612         if (reset_mask & RADEON_RESET_CP) {
3613                 grbm_soft_reset |= SOFT_RESET_CP |
3614                         SOFT_RESET_VGT;
3615
3616                 srbm_soft_reset |= SOFT_RESET_GRBM;
3617         }
3618
3619         if (reset_mask & RADEON_RESET_DMA)
3620                 srbm_soft_reset |= SOFT_RESET_DMA;
3621
3622         if (reset_mask & RADEON_RESET_DISPLAY)
3623                 srbm_soft_reset |= SOFT_RESET_DC;
3624
3625         if (reset_mask & RADEON_RESET_RLC)
3626                 srbm_soft_reset |= SOFT_RESET_RLC;
3627
3628         if (reset_mask & RADEON_RESET_SEM)
3629                 srbm_soft_reset |= SOFT_RESET_SEM;
3630
3631         if (reset_mask & RADEON_RESET_IH)
3632                 srbm_soft_reset |= SOFT_RESET_IH;
3633
3634         if (reset_mask & RADEON_RESET_GRBM)
3635                 srbm_soft_reset |= SOFT_RESET_GRBM;
3636
3637         if (reset_mask & RADEON_RESET_VMC)
3638                 srbm_soft_reset |= SOFT_RESET_VMC;
3639
3640         if (!(rdev->flags & RADEON_IS_IGP)) {
3641                 if (reset_mask & RADEON_RESET_MC)
3642                         srbm_soft_reset |= SOFT_RESET_MC;
3643         }
3644
3645         if (grbm_soft_reset) {
3646                 tmp = RREG32(GRBM_SOFT_RESET);
3647                 tmp |= grbm_soft_reset;
3648                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3649                 WREG32(GRBM_SOFT_RESET, tmp);
3650                 tmp = RREG32(GRBM_SOFT_RESET);
3651
3652                 udelay(50);
3653
3654                 tmp &= ~grbm_soft_reset;
3655                 WREG32(GRBM_SOFT_RESET, tmp);
3656                 tmp = RREG32(GRBM_SOFT_RESET);
3657         }
3658
3659         if (srbm_soft_reset) {
3660                 tmp = RREG32(SRBM_SOFT_RESET);
3661                 tmp |= srbm_soft_reset;
3662                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3663                 WREG32(SRBM_SOFT_RESET, tmp);
3664                 tmp = RREG32(SRBM_SOFT_RESET);
3665
3666                 udelay(50);
3667
3668                 tmp &= ~srbm_soft_reset;
3669                 WREG32(SRBM_SOFT_RESET, tmp);
3670                 tmp = RREG32(SRBM_SOFT_RESET);
3671         }
3672
3673         /* Wait a little for things to settle down */
3674         udelay(50);
3675
3676         evergreen_mc_resume(rdev, &save);
3677         udelay(50);
3678
3679         evergreen_print_gpu_status_regs(rdev);
3680 }
3681
3682 int evergreen_asic_reset(struct radeon_device *rdev)
3683 {
3684         u32 reset_mask;
3685
3686         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3687
3688         if (reset_mask)
3689                 r600_set_bios_scratch_engine_hung(rdev, true);
3690
3691         evergreen_gpu_soft_reset(rdev, reset_mask);
3692
3693         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3694
3695         if (!reset_mask)
3696                 r600_set_bios_scratch_engine_hung(rdev, false);
3697
3698         return 0;
3699 }
3700
3701 /**
3702  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3703  *
3704  * @rdev: radeon_device pointer
3705  * @ring: radeon_ring structure holding ring information
3706  *
3707  * Check if the GFX engine is locked up.
3708  * Returns true if the engine appears to be locked up, false if not.
3709  */
3710 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3711 {
3712         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3713
3714         if (!(reset_mask & (RADEON_RESET_GFX |
3715                             RADEON_RESET_COMPUTE |
3716                             RADEON_RESET_CP))) {
3717                 radeon_ring_lockup_update(ring);
3718                 return false;
3719         }
3720         /* force CP activities */
3721         radeon_ring_force_activity(rdev, ring);
3722         return radeon_ring_test_lockup(rdev, ring);
3723 }
3724
3725 /**
3726  * evergreen_dma_is_lockup - Check if the DMA engine is locked up
3727  *
3728  * @rdev: radeon_device pointer
3729  * @ring: radeon_ring structure holding ring information
3730  *
3731  * Check if the async DMA engine is locked up.
3732  * Returns true if the engine appears to be locked up, false if not.
3733  */
3734 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3735 {
3736         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3737
3738         if (!(reset_mask & RADEON_RESET_DMA)) {
3739                 radeon_ring_lockup_update(ring);
3740                 return false;
3741         }
3742         /* force ring activities */
3743         radeon_ring_force_activity(rdev, ring);
3744         return radeon_ring_test_lockup(rdev, ring);
3745 }
3746
3747 /* Interrupts */
3748
3749 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
3750 {
3751         if (crtc >= rdev->num_crtc)
3752                 return 0;
3753         else
3754                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
3755 }
3756
3757 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
3758 {
3759         u32 tmp;
3760
3761         if (rdev->family >= CHIP_CAYMAN) {
3762                 cayman_cp_int_cntl_setup(rdev, 0,
3763                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3764                 cayman_cp_int_cntl_setup(rdev, 1, 0);
3765                 cayman_cp_int_cntl_setup(rdev, 2, 0);
3766                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3767                 WREG32(CAYMAN_DMA1_CNTL, tmp);
3768         } else
3769                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3770         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3771         WREG32(DMA_CNTL, tmp);
3772         WREG32(GRBM_INT_CNTL, 0);
3773         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3774         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3775         if (rdev->num_crtc >= 4) {
3776                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3777                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3778         }
3779         if (rdev->num_crtc >= 6) {
3780                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3781                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3782         }
3783
3784         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3785         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3786         if (rdev->num_crtc >= 4) {
3787                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3788                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3789         }
3790         if (rdev->num_crtc >= 6) {
3791                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3792                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3793         }
3794
3795         /* only one DAC on DCE5 */
3796         if (!ASIC_IS_DCE5(rdev))
3797                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3798         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3799
3800         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3801         WREG32(DC_HPD1_INT_CONTROL, tmp);
3802         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3803         WREG32(DC_HPD2_INT_CONTROL, tmp);
3804         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3805         WREG32(DC_HPD3_INT_CONTROL, tmp);
3806         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3807         WREG32(DC_HPD4_INT_CONTROL, tmp);
3808         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3809         WREG32(DC_HPD5_INT_CONTROL, tmp);
3810         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3811         WREG32(DC_HPD6_INT_CONTROL, tmp);
3812
3813 }
3814
3815 int evergreen_irq_set(struct radeon_device *rdev)
3816 {
3817         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3818         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
3819         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
3820         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
3821         u32 grbm_int_cntl = 0;
3822         u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
3823         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
3824         u32 dma_cntl, dma_cntl1 = 0;
3825
3826         if (!rdev->irq.installed) {
3827                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3828                 return -EINVAL;
3829         }
3830         /* don't enable anything if the ih is disabled */
3831         if (!rdev->ih.enabled) {
3832                 r600_disable_interrupts(rdev);
3833                 /* force the active interrupt state to all disabled */
3834                 evergreen_disable_interrupt_state(rdev);
3835                 return 0;
3836         }
3837
3838         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3839         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3840         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3841         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3842         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3843         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3844
3845         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3846         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3847         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3848         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3849         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3850         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3851
3852         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3853
3854         if (rdev->family >= CHIP_CAYMAN) {
3855                 /* enable CP interrupts on all rings */
3856                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3857                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3858                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3859                 }
3860                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
3861                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
3862                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
3863                 }
3864                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
3865                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
3866                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
3867                 }
3868         } else {
3869                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3870                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3871                         cp_int_cntl |= RB_INT_ENABLE;
3872                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3873                 }
3874         }
3875
3876         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3877                 DRM_DEBUG("r600_irq_set: sw int dma\n");
3878                 dma_cntl |= TRAP_ENABLE;
3879         }
3880
3881         if (rdev->family >= CHIP_CAYMAN) {
3882                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3883                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
3884                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
3885                         dma_cntl1 |= TRAP_ENABLE;
3886                 }
3887         }
3888
3889         if (rdev->irq.crtc_vblank_int[0] ||
3890             atomic_read(&rdev->irq.pflip[0])) {
3891                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
3892                 crtc1 |= VBLANK_INT_MASK;
3893         }
3894         if (rdev->irq.crtc_vblank_int[1] ||
3895             atomic_read(&rdev->irq.pflip[1])) {
3896                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
3897                 crtc2 |= VBLANK_INT_MASK;
3898         }
3899         if (rdev->irq.crtc_vblank_int[2] ||
3900             atomic_read(&rdev->irq.pflip[2])) {
3901                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
3902                 crtc3 |= VBLANK_INT_MASK;
3903         }
3904         if (rdev->irq.crtc_vblank_int[3] ||
3905             atomic_read(&rdev->irq.pflip[3])) {
3906                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
3907                 crtc4 |= VBLANK_INT_MASK;
3908         }
3909         if (rdev->irq.crtc_vblank_int[4] ||
3910             atomic_read(&rdev->irq.pflip[4])) {
3911                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
3912                 crtc5 |= VBLANK_INT_MASK;
3913         }
3914         if (rdev->irq.crtc_vblank_int[5] ||
3915             atomic_read(&rdev->irq.pflip[5])) {
3916                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
3917                 crtc6 |= VBLANK_INT_MASK;
3918         }
3919         if (rdev->irq.hpd[0]) {
3920                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
3921                 hpd1 |= DC_HPDx_INT_EN;
3922         }
3923         if (rdev->irq.hpd[1]) {
3924                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
3925                 hpd2 |= DC_HPDx_INT_EN;
3926         }
3927         if (rdev->irq.hpd[2]) {
3928                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
3929                 hpd3 |= DC_HPDx_INT_EN;
3930         }
3931         if (rdev->irq.hpd[3]) {
3932                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
3933                 hpd4 |= DC_HPDx_INT_EN;
3934         }
3935         if (rdev->irq.hpd[4]) {
3936                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
3937                 hpd5 |= DC_HPDx_INT_EN;
3938         }
3939         if (rdev->irq.hpd[5]) {
3940                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
3941                 hpd6 |= DC_HPDx_INT_EN;
3942         }
3943         if (rdev->irq.afmt[0]) {
3944                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
3945                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3946         }
3947         if (rdev->irq.afmt[1]) {
3948                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
3949                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3950         }
3951         if (rdev->irq.afmt[2]) {
3952                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
3953                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3954         }
3955         if (rdev->irq.afmt[3]) {
3956                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
3957                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3958         }
3959         if (rdev->irq.afmt[4]) {
3960                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
3961                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3962         }
3963         if (rdev->irq.afmt[5]) {
3964                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
3965                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3966         }
3967
3968         if (rdev->family >= CHIP_CAYMAN) {
3969                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
3970                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
3971                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
3972         } else
3973                 WREG32(CP_INT_CNTL, cp_int_cntl);
3974
3975         WREG32(DMA_CNTL, dma_cntl);
3976
3977         if (rdev->family >= CHIP_CAYMAN)
3978                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
3979
3980         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3981
3982         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
3983         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
3984         if (rdev->num_crtc >= 4) {
3985                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
3986                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
3987         }
3988         if (rdev->num_crtc >= 6) {
3989                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
3990                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
3991         }
3992
3993         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
3994         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
3995         if (rdev->num_crtc >= 4) {
3996                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
3997                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
3998         }
3999         if (rdev->num_crtc >= 6) {
4000                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4001                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4002         }
4003
4004         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4005         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4006         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4007         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4008         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4009         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4010
4011         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4012         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4013         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4014         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4015         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4016         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4017
4018         return 0;
4019 }
4020
4021 static void evergreen_irq_ack(struct radeon_device *rdev)
4022 {
4023         u32 tmp;
4024
4025         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4026         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4027         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4028         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4029         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4030         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4031         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4032         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4033         if (rdev->num_crtc >= 4) {
4034                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4035                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4036         }
4037         if (rdev->num_crtc >= 6) {
4038                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4039                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4040         }
4041
4042         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4043         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4044         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4045         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4046         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4047         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4048
4049         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4050                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4051         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4052                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4053         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4054                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4055         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4056                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4057         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4058                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4059         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4060                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4061
4062         if (rdev->num_crtc >= 4) {
4063                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4064                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4065                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4066                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4067                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4068                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4069                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4070                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4071                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4072                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4073                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4074                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4075         }
4076
4077         if (rdev->num_crtc >= 6) {
4078                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4079                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4080                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4081                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4082                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4083                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4084                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4085                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4086                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4087                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4088                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4089                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4090         }
4091
4092         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4093                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4094                 tmp |= DC_HPDx_INT_ACK;
4095                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4096         }
4097         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4098                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4099                 tmp |= DC_HPDx_INT_ACK;
4100                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4101         }
4102         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4103                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4104                 tmp |= DC_HPDx_INT_ACK;
4105                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4106         }
4107         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4108                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4109                 tmp |= DC_HPDx_INT_ACK;
4110                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4111         }
4112         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4113                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4114                 tmp |= DC_HPDx_INT_ACK;
4115                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4116         }
4117         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4118                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4119                 tmp |= DC_HPDx_INT_ACK;
4120                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4121         }
4122         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4123                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4124                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4125                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4126         }
4127         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4128                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4129                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4130                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4131         }
4132         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4133                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4134                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4135                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4136         }
4137         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4138                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4139                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4140                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4141         }
4142         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4143                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4144                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4145                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4146         }
4147         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4148                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4149                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4150                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4151         }
4152 }
4153
4154 static void evergreen_irq_disable(struct radeon_device *rdev)
4155 {
4156         r600_disable_interrupts(rdev);
4157         /* Wait and acknowledge irq */
4158         mdelay(1);
4159         evergreen_irq_ack(rdev);
4160         evergreen_disable_interrupt_state(rdev);
4161 }
4162
4163 void evergreen_irq_suspend(struct radeon_device *rdev)
4164 {
4165         evergreen_irq_disable(rdev);
4166         r600_rlc_stop(rdev);
4167 }
4168
4169 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4170 {
4171         u32 wptr, tmp;
4172
4173         if (rdev->wb.enabled)
4174                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4175         else
4176                 wptr = RREG32(IH_RB_WPTR);
4177
4178         if (wptr & RB_OVERFLOW) {
4179                 /* When a ring buffer overflow happen start parsing interrupt
4180                  * from the last not overwritten vector (wptr + 16). Hopefully
4181                  * this should allow us to catchup.
4182                  */
4183                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4184                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4185                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4186                 tmp = RREG32(IH_RB_CNTL);
4187                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4188                 WREG32(IH_RB_CNTL, tmp);
4189         }
4190         return (wptr & rdev->ih.ptr_mask);
4191 }
4192
4193 int evergreen_irq_process(struct radeon_device *rdev)
4194 {
4195         u32 wptr;
4196         u32 rptr;
4197         u32 src_id, src_data;
4198         u32 ring_index;
4199         bool queue_hotplug = false;
4200         bool queue_hdmi = false;
4201
4202         if (!rdev->ih.enabled || rdev->shutdown)
4203                 return IRQ_NONE;
4204
4205         wptr = evergreen_get_ih_wptr(rdev);
4206
4207 restart_ih:
4208         /* is somebody else already processing irqs? */
4209         if (atomic_xchg(&rdev->ih.lock, 1))
4210                 return IRQ_NONE;
4211
4212         rptr = rdev->ih.rptr;
4213         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4214
4215         /* Order reading of wptr vs. reading of IH ring data */
4216         rmb();
4217
4218         /* display interrupts */
4219         evergreen_irq_ack(rdev);
4220
4221         while (rptr != wptr) {
4222                 /* wptr/rptr are in bytes! */
4223                 ring_index = rptr / 4;
4224                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4225                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4226
4227                 switch (src_id) {
4228                 case 1: /* D1 vblank/vline */
4229                         switch (src_data) {
4230                         case 0: /* D1 vblank */
4231                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4232                                         if (rdev->irq.crtc_vblank_int[0]) {
4233                                                 drm_handle_vblank(rdev->ddev, 0);
4234                                                 rdev->pm.vblank_sync = true;
4235                                                 wake_up(&rdev->irq.vblank_queue);
4236                                         }
4237                                         if (atomic_read(&rdev->irq.pflip[0]))
4238                                                 radeon_crtc_handle_flip(rdev, 0);
4239                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4240                                         DRM_DEBUG("IH: D1 vblank\n");
4241                                 }
4242                                 break;
4243                         case 1: /* D1 vline */
4244                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4245                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4246                                         DRM_DEBUG("IH: D1 vline\n");
4247                                 }
4248                                 break;
4249                         default:
4250                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4251                                 break;
4252                         }
4253                         break;
4254                 case 2: /* D2 vblank/vline */
4255                         switch (src_data) {
4256                         case 0: /* D2 vblank */
4257                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4258                                         if (rdev->irq.crtc_vblank_int[1]) {
4259                                                 drm_handle_vblank(rdev->ddev, 1);
4260                                                 rdev->pm.vblank_sync = true;
4261                                                 wake_up(&rdev->irq.vblank_queue);
4262                                         }
4263                                         if (atomic_read(&rdev->irq.pflip[1]))
4264                                                 radeon_crtc_handle_flip(rdev, 1);
4265                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4266                                         DRM_DEBUG("IH: D2 vblank\n");
4267                                 }
4268                                 break;
4269                         case 1: /* D2 vline */
4270                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4271                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4272                                         DRM_DEBUG("IH: D2 vline\n");
4273                                 }
4274                                 break;
4275                         default:
4276                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4277                                 break;
4278                         }
4279                         break;
4280                 case 3: /* D3 vblank/vline */
4281                         switch (src_data) {
4282                         case 0: /* D3 vblank */
4283                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4284                                         if (rdev->irq.crtc_vblank_int[2]) {
4285                                                 drm_handle_vblank(rdev->ddev, 2);
4286                                                 rdev->pm.vblank_sync = true;
4287                                                 wake_up(&rdev->irq.vblank_queue);
4288                                         }
4289                                         if (atomic_read(&rdev->irq.pflip[2]))
4290                                                 radeon_crtc_handle_flip(rdev, 2);
4291                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4292                                         DRM_DEBUG("IH: D3 vblank\n");
4293                                 }
4294                                 break;
4295                         case 1: /* D3 vline */
4296                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4297                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4298                                         DRM_DEBUG("IH: D3 vline\n");
4299                                 }
4300                                 break;
4301                         default:
4302                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4303                                 break;
4304                         }
4305                         break;
4306                 case 4: /* D4 vblank/vline */
4307                         switch (src_data) {
4308                         case 0: /* D4 vblank */
4309                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4310                                         if (rdev->irq.crtc_vblank_int[3]) {
4311                                                 drm_handle_vblank(rdev->ddev, 3);
4312                                                 rdev->pm.vblank_sync = true;
4313                                                 wake_up(&rdev->irq.vblank_queue);
4314                                         }
4315                                         if (atomic_read(&rdev->irq.pflip[3]))
4316                                                 radeon_crtc_handle_flip(rdev, 3);
4317                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4318                                         DRM_DEBUG("IH: D4 vblank\n");
4319                                 }
4320                                 break;
4321                         case 1: /* D4 vline */
4322                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4323                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4324                                         DRM_DEBUG("IH: D4 vline\n");
4325                                 }
4326                                 break;
4327                         default:
4328                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4329                                 break;
4330                         }
4331                         break;
4332                 case 5: /* D5 vblank/vline */
4333                         switch (src_data) {
4334                         case 0: /* D5 vblank */
4335                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4336                                         if (rdev->irq.crtc_vblank_int[4]) {
4337                                                 drm_handle_vblank(rdev->ddev, 4);
4338                                                 rdev->pm.vblank_sync = true;
4339                                                 wake_up(&rdev->irq.vblank_queue);
4340                                         }
4341                                         if (atomic_read(&rdev->irq.pflip[4]))
4342                                                 radeon_crtc_handle_flip(rdev, 4);
4343                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4344                                         DRM_DEBUG("IH: D5 vblank\n");
4345                                 }
4346                                 break;
4347                         case 1: /* D5 vline */
4348                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4349                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4350                                         DRM_DEBUG("IH: D5 vline\n");
4351                                 }
4352                                 break;
4353                         default:
4354                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4355                                 break;
4356                         }
4357                         break;
4358                 case 6: /* D6 vblank/vline */
4359                         switch (src_data) {
4360                         case 0: /* D6 vblank */
4361                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4362                                         if (rdev->irq.crtc_vblank_int[5]) {
4363                                                 drm_handle_vblank(rdev->ddev, 5);
4364                                                 rdev->pm.vblank_sync = true;
4365                                                 wake_up(&rdev->irq.vblank_queue);
4366                                         }
4367                                         if (atomic_read(&rdev->irq.pflip[5]))
4368                                                 radeon_crtc_handle_flip(rdev, 5);
4369                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4370                                         DRM_DEBUG("IH: D6 vblank\n");
4371                                 }
4372                                 break;
4373                         case 1: /* D6 vline */
4374                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4375                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4376                                         DRM_DEBUG("IH: D6 vline\n");
4377                                 }
4378                                 break;
4379                         default:
4380                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4381                                 break;
4382                         }
4383                         break;
4384                 case 42: /* HPD hotplug */
4385                         switch (src_data) {
4386                         case 0:
4387                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4388                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4389                                         queue_hotplug = true;
4390                                         DRM_DEBUG("IH: HPD1\n");
4391                                 }
4392                                 break;
4393                         case 1:
4394                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4395                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4396                                         queue_hotplug = true;
4397                                         DRM_DEBUG("IH: HPD2\n");
4398                                 }
4399                                 break;
4400                         case 2:
4401                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4402                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4403                                         queue_hotplug = true;
4404                                         DRM_DEBUG("IH: HPD3\n");
4405                                 }
4406                                 break;
4407                         case 3:
4408                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4409                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4410                                         queue_hotplug = true;
4411                                         DRM_DEBUG("IH: HPD4\n");
4412                                 }
4413                                 break;
4414                         case 4:
4415                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4416                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4417                                         queue_hotplug = true;
4418                                         DRM_DEBUG("IH: HPD5\n");
4419                                 }
4420                                 break;
4421                         case 5:
4422                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4423                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4424                                         queue_hotplug = true;
4425                                         DRM_DEBUG("IH: HPD6\n");
4426                                 }
4427                                 break;
4428                         default:
4429                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4430                                 break;
4431                         }
4432                         break;
4433                 case 44: /* hdmi */
4434                         switch (src_data) {
4435                         case 0:
4436                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4437                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4438                                         queue_hdmi = true;
4439                                         DRM_DEBUG("IH: HDMI0\n");
4440                                 }
4441                                 break;
4442                         case 1:
4443                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4444                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4445                                         queue_hdmi = true;
4446                                         DRM_DEBUG("IH: HDMI1\n");
4447                                 }
4448                                 break;
4449                         case 2:
4450                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4451                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4452                                         queue_hdmi = true;
4453                                         DRM_DEBUG("IH: HDMI2\n");
4454                                 }
4455                                 break;
4456                         case 3:
4457                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4458                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4459                                         queue_hdmi = true;
4460                                         DRM_DEBUG("IH: HDMI3\n");
4461                                 }
4462                                 break;
4463                         case 4:
4464                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4465                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4466                                         queue_hdmi = true;
4467                                         DRM_DEBUG("IH: HDMI4\n");
4468                                 }
4469                                 break;
4470                         case 5:
4471                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4472                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4473                                         queue_hdmi = true;
4474                                         DRM_DEBUG("IH: HDMI5\n");
4475                                 }
4476                                 break;
4477                         default:
4478                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4479                                 break;
4480                         }
4481                 case 124: /* UVD */
4482                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4483                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4484                         break;
4485                 case 146:
4486                 case 147:
4487                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4488                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
4489                                 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
4490                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4491                                 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
4492                         /* reset addr and status */
4493                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4494                         break;
4495                 case 176: /* CP_INT in ring buffer */
4496                 case 177: /* CP_INT in IB1 */
4497                 case 178: /* CP_INT in IB2 */
4498                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4499                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4500                         break;
4501                 case 181: /* CP EOP event */
4502                         DRM_DEBUG("IH: CP EOP\n");
4503                         if (rdev->family >= CHIP_CAYMAN) {
4504                                 switch (src_data) {
4505                                 case 0:
4506                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4507                                         break;
4508                                 case 1:
4509                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4510                                         break;
4511                                 case 2:
4512                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4513                                         break;
4514                                 }
4515                         } else
4516                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4517                         break;
4518                 case 224: /* DMA trap event */
4519                         DRM_DEBUG("IH: DMA trap\n");
4520                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4521                         break;
4522                 case 233: /* GUI IDLE */
4523                         DRM_DEBUG("IH: GUI idle\n");
4524                         break;
4525                 case 244: /* DMA trap event */
4526                         if (rdev->family >= CHIP_CAYMAN) {
4527                                 DRM_DEBUG("IH: DMA1 trap\n");
4528                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4529                         }
4530                         break;
4531                 default:
4532                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4533                         break;
4534                 }
4535
4536                 /* wptr/rptr are in bytes! */
4537                 rptr += 16;
4538                 rptr &= rdev->ih.ptr_mask;
4539         }
4540         if (queue_hotplug)
4541                 schedule_work(&rdev->hotplug_work);
4542         if (queue_hdmi)
4543                 schedule_work(&rdev->audio_work);
4544         rdev->ih.rptr = rptr;
4545         WREG32(IH_RB_RPTR, rdev->ih.rptr);
4546         atomic_set(&rdev->ih.lock, 0);
4547
4548         /* make sure wptr hasn't changed while processing */
4549         wptr = evergreen_get_ih_wptr(rdev);
4550         if (wptr != rptr)
4551                 goto restart_ih;
4552
4553         return IRQ_HANDLED;
4554 }
4555
4556 /**
4557  * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
4558  *
4559  * @rdev: radeon_device pointer
4560  * @fence: radeon fence object
4561  *
4562  * Add a DMA fence packet to the ring to write
4563  * the fence seq number and DMA trap packet to generate
4564  * an interrupt if needed (evergreen-SI).
4565  */
4566 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
4567                                    struct radeon_fence *fence)
4568 {
4569         struct radeon_ring *ring = &rdev->ring[fence->ring];
4570         u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
4571         /* write the fence */
4572         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
4573         radeon_ring_write(ring, addr & 0xfffffffc);
4574         radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
4575         radeon_ring_write(ring, fence->seq);
4576         /* generate an interrupt */
4577         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
4578         /* flush HDP */
4579         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
4580         radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
4581         radeon_ring_write(ring, 1);
4582 }
4583
4584 /**
4585  * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
4586  *
4587  * @rdev: radeon_device pointer
4588  * @ib: IB object to schedule
4589  *
4590  * Schedule an IB in the DMA ring (evergreen).
4591  */
4592 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
4593                                    struct radeon_ib *ib)
4594 {
4595         struct radeon_ring *ring = &rdev->ring[ib->ring];
4596
4597         if (rdev->wb.enabled) {
4598                 u32 next_rptr = ring->wptr + 4;
4599                 while ((next_rptr & 7) != 5)
4600                         next_rptr++;
4601                 next_rptr += 3;
4602                 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
4603                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
4604                 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
4605                 radeon_ring_write(ring, next_rptr);
4606         }
4607
4608         /* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
4609          * Pad as necessary with NOPs.
4610          */
4611         while ((ring->wptr & 7) != 5)
4612                 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4613         radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
4614         radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
4615         radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
4616
4617 }
4618
4619 /**
4620  * evergreen_copy_dma - copy pages using the DMA engine
4621  *
4622  * @rdev: radeon_device pointer
4623  * @src_offset: src GPU address
4624  * @dst_offset: dst GPU address
4625  * @num_gpu_pages: number of GPU pages to xfer
4626  * @fence: radeon fence object
4627  *
4628  * Copy GPU paging using the DMA engine (evergreen-cayman).
4629  * Used by the radeon ttm implementation to move pages if
4630  * registered as the asic copy callback.
4631  */
4632 int evergreen_copy_dma(struct radeon_device *rdev,
4633                        uint64_t src_offset, uint64_t dst_offset,
4634                        unsigned num_gpu_pages,
4635                        struct radeon_fence **fence)
4636 {
4637         struct radeon_semaphore *sem = NULL;
4638         int ring_index = rdev->asic->copy.dma_ring_index;
4639         struct radeon_ring *ring = &rdev->ring[ring_index];
4640         u32 size_in_dw, cur_size_in_dw;
4641         int i, num_loops;
4642         int r = 0;
4643
4644         r = radeon_semaphore_create(rdev, &sem);
4645         if (r) {
4646                 DRM_ERROR("radeon: moving bo (%d).\n", r);
4647                 return r;
4648         }
4649
4650         size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
4651         num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
4652         r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
4653         if (r) {
4654                 DRM_ERROR("radeon: moving bo (%d).\n", r);
4655                 radeon_semaphore_free(rdev, &sem, NULL);
4656                 return r;
4657         }
4658
4659         if (radeon_fence_need_sync(*fence, ring->idx)) {
4660                 radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
4661                                             ring->idx);
4662                 radeon_fence_note_sync(*fence, ring->idx);
4663         } else {
4664                 radeon_semaphore_free(rdev, &sem, NULL);
4665         }
4666
4667         for (i = 0; i < num_loops; i++) {
4668                 cur_size_in_dw = size_in_dw;
4669                 if (cur_size_in_dw > 0xFFFFF)
4670                         cur_size_in_dw = 0xFFFFF;
4671                 size_in_dw -= cur_size_in_dw;
4672                 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
4673                 radeon_ring_write(ring, dst_offset & 0xfffffffc);
4674                 radeon_ring_write(ring, src_offset & 0xfffffffc);
4675                 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
4676                 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
4677                 src_offset += cur_size_in_dw * 4;
4678                 dst_offset += cur_size_in_dw * 4;
4679         }
4680
4681         r = radeon_fence_emit(rdev, fence, ring->idx);
4682         if (r) {
4683                 radeon_ring_unlock_undo(rdev, ring);
4684                 return r;
4685         }
4686
4687         radeon_ring_unlock_commit(rdev, ring);
4688         radeon_semaphore_free(rdev, &sem, *fence);
4689
4690         return r;
4691 }
4692
4693 static int evergreen_startup(struct radeon_device *rdev)
4694 {
4695         struct radeon_ring *ring;
4696         int r;
4697
4698         /* enable pcie gen2 link */
4699         evergreen_pcie_gen2_enable(rdev);
4700
4701         evergreen_mc_program(rdev);
4702
4703         if (ASIC_IS_DCE5(rdev)) {
4704                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
4705                         r = ni_init_microcode(rdev);
4706                         if (r) {
4707                                 DRM_ERROR("Failed to load firmware!\n");
4708                                 return r;
4709                         }
4710                 }
4711                 r = ni_mc_load_microcode(rdev);
4712                 if (r) {
4713                         DRM_ERROR("Failed to load MC firmware!\n");
4714                         return r;
4715                 }
4716         } else {
4717                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
4718                         r = r600_init_microcode(rdev);
4719                         if (r) {
4720                                 DRM_ERROR("Failed to load firmware!\n");
4721                                 return r;
4722                         }
4723                 }
4724         }
4725
4726         r = r600_vram_scratch_init(rdev);
4727         if (r)
4728                 return r;
4729
4730         if (rdev->flags & RADEON_IS_AGP) {
4731                 evergreen_agp_enable(rdev);
4732         } else {
4733                 r = evergreen_pcie_gart_enable(rdev);
4734                 if (r)
4735                         return r;
4736         }
4737         evergreen_gpu_init(rdev);
4738
4739         r = evergreen_blit_init(rdev);
4740         if (r) {
4741                 r600_blit_fini(rdev);
4742                 rdev->asic->copy.copy = NULL;
4743                 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
4744         }
4745
4746         /* allocate wb buffer */
4747         r = radeon_wb_init(rdev);
4748         if (r)
4749                 return r;
4750
4751         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
4752         if (r) {
4753                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
4754                 return r;
4755         }
4756
4757         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
4758         if (r) {
4759                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
4760                 return r;
4761         }
4762
4763         r = rv770_uvd_resume(rdev);
4764         if (!r) {
4765                 r = radeon_fence_driver_start_ring(rdev,
4766                                                    R600_RING_TYPE_UVD_INDEX);
4767                 if (r)
4768                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
4769         }
4770
4771         if (r)
4772                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4773
4774         /* Enable IRQ */
4775         if (!rdev->irq.installed) {
4776                 r = radeon_irq_kms_init(rdev);
4777                 if (r)
4778                         return r;
4779         }
4780
4781         r = r600_irq_init(rdev);
4782         if (r) {
4783                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
4784                 radeon_irq_kms_fini(rdev);
4785                 return r;
4786         }
4787         evergreen_irq_set(rdev);
4788
4789         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
4790         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
4791                              R600_CP_RB_RPTR, R600_CP_RB_WPTR,
4792                              0, 0xfffff, RADEON_CP_PACKET2);
4793         if (r)
4794                 return r;
4795
4796         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
4797         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
4798                              DMA_RB_RPTR, DMA_RB_WPTR,
4799                              2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4800         if (r)
4801                 return r;
4802
4803         r = evergreen_cp_load_microcode(rdev);
4804         if (r)
4805                 return r;
4806         r = evergreen_cp_resume(rdev);
4807         if (r)
4808                 return r;
4809         r = r600_dma_resume(rdev);
4810         if (r)
4811                 return r;
4812
4813         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4814         if (ring->ring_size) {
4815                 r = radeon_ring_init(rdev, ring, ring->ring_size,
4816                                      R600_WB_UVD_RPTR_OFFSET,
4817                                      UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
4818                                      0, 0xfffff, RADEON_CP_PACKET2);
4819                 if (!r)
4820                         r = r600_uvd_init(rdev);
4821
4822                 if (r)
4823                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
4824         }
4825
4826         r = radeon_ib_pool_init(rdev);
4827         if (r) {
4828                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
4829                 return r;
4830         }
4831
4832         r = r600_audio_init(rdev);
4833         if (r) {
4834                 DRM_ERROR("radeon: audio init failed\n");
4835                 return r;
4836         }
4837
4838         return 0;
4839 }
4840
4841 int evergreen_resume(struct radeon_device *rdev)
4842 {
4843         int r;
4844
4845         /* reset the asic, the gfx blocks are often in a bad state
4846          * after the driver is unloaded or after a resume
4847          */
4848         if (radeon_asic_reset(rdev))
4849                 dev_warn(rdev->dev, "GPU reset failed !\n");
4850         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
4851          * posting will perform necessary task to bring back GPU into good
4852          * shape.
4853          */
4854         /* post card */
4855         atom_asic_init(rdev->mode_info.atom_context);
4856
4857         /* init golden registers */
4858         evergreen_init_golden_registers(rdev);
4859
4860         rdev->accel_working = true;
4861         r = evergreen_startup(rdev);
4862         if (r) {
4863                 DRM_ERROR("evergreen startup failed on resume\n");
4864                 rdev->accel_working = false;
4865                 return r;
4866         }
4867
4868         return r;
4869
4870 }
4871
4872 int evergreen_suspend(struct radeon_device *rdev)
4873 {
4874         r600_audio_fini(rdev);
4875         r600_uvd_stop(rdev);
4876         radeon_uvd_suspend(rdev);
4877         r700_cp_stop(rdev);
4878         r600_dma_stop(rdev);
4879         evergreen_irq_suspend(rdev);
4880         radeon_wb_disable(rdev);
4881         evergreen_pcie_gart_disable(rdev);
4882
4883         return 0;
4884 }
4885
4886 /* Plan is to move initialization in that function and use
4887  * helper function so that radeon_device_init pretty much
4888  * do nothing more than calling asic specific function. This
4889  * should also allow to remove a bunch of callback function
4890  * like vram_info.
4891  */
4892 int evergreen_init(struct radeon_device *rdev)
4893 {
4894         int r;
4895
4896         /* Read BIOS */
4897         if (!radeon_get_bios(rdev)) {
4898                 if (ASIC_IS_AVIVO(rdev))
4899                         return -EINVAL;
4900         }
4901         /* Must be an ATOMBIOS */
4902         if (!rdev->is_atom_bios) {
4903                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
4904                 return -EINVAL;
4905         }
4906         r = radeon_atombios_init(rdev);
4907         if (r)
4908                 return r;
4909         /* reset the asic, the gfx blocks are often in a bad state
4910          * after the driver is unloaded or after a resume
4911          */
4912         if (radeon_asic_reset(rdev))
4913                 dev_warn(rdev->dev, "GPU reset failed !\n");
4914         /* Post card if necessary */
4915         if (!radeon_card_posted(rdev)) {
4916                 if (!rdev->bios) {
4917                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
4918                         return -EINVAL;
4919                 }
4920                 DRM_INFO("GPU not posted. posting now...\n");
4921                 atom_asic_init(rdev->mode_info.atom_context);
4922         }
4923         /* init golden registers */
4924         evergreen_init_golden_registers(rdev);
4925         /* Initialize scratch registers */
4926         r600_scratch_init(rdev);
4927         /* Initialize surface registers */
4928         radeon_surface_init(rdev);
4929         /* Initialize clocks */
4930         radeon_get_clock_info(rdev->ddev);
4931         /* Fence driver */
4932         r = radeon_fence_driver_init(rdev);
4933         if (r)
4934                 return r;
4935         /* initialize AGP */
4936         if (rdev->flags & RADEON_IS_AGP) {
4937                 r = radeon_agp_init(rdev);
4938                 if (r)
4939                         radeon_agp_disable(rdev);
4940         }
4941         /* initialize memory controller */
4942         r = evergreen_mc_init(rdev);
4943         if (r)
4944                 return r;
4945         /* Memory manager */
4946         r = radeon_bo_init(rdev);
4947         if (r)
4948                 return r;
4949
4950         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
4951         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
4952
4953         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
4954         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
4955
4956         r = radeon_uvd_init(rdev);
4957         if (!r) {
4958                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4959                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
4960                                4096);
4961         }
4962
4963         rdev->ih.ring_obj = NULL;
4964         r600_ih_ring_init(rdev, 64 * 1024);
4965
4966         r = r600_pcie_gart_init(rdev);
4967         if (r)
4968                 return r;
4969
4970         rdev->accel_working = true;
4971         r = evergreen_startup(rdev);
4972         if (r) {
4973                 dev_err(rdev->dev, "disabling GPU acceleration\n");
4974                 r700_cp_fini(rdev);
4975                 r600_dma_fini(rdev);
4976                 r600_irq_fini(rdev);
4977                 radeon_wb_fini(rdev);
4978                 radeon_ib_pool_fini(rdev);
4979                 radeon_irq_kms_fini(rdev);
4980                 evergreen_pcie_gart_fini(rdev);
4981                 rdev->accel_working = false;
4982         }
4983
4984         /* Don't start up if the MC ucode is missing on BTC parts.
4985          * The default clocks and voltages before the MC ucode
4986          * is loaded are not suffient for advanced operations.
4987          */
4988         if (ASIC_IS_DCE5(rdev)) {
4989                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
4990                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
4991                         return -EINVAL;
4992                 }
4993         }
4994
4995         return 0;
4996 }
4997
4998 void evergreen_fini(struct radeon_device *rdev)
4999 {
5000         r600_audio_fini(rdev);
5001         r600_blit_fini(rdev);
5002         r700_cp_fini(rdev);
5003         r600_dma_fini(rdev);
5004         r600_irq_fini(rdev);
5005         radeon_wb_fini(rdev);
5006         radeon_ib_pool_fini(rdev);
5007         radeon_irq_kms_fini(rdev);
5008         evergreen_pcie_gart_fini(rdev);
5009         r600_uvd_stop(rdev);
5010         radeon_uvd_fini(rdev);
5011         r600_vram_scratch_fini(rdev);
5012         radeon_gem_fini(rdev);
5013         radeon_fence_driver_fini(rdev);
5014         radeon_agp_fini(rdev);
5015         radeon_bo_fini(rdev);
5016         radeon_atombios_fini(rdev);
5017         kfree(rdev->bios);
5018         rdev->bios = NULL;
5019 }
5020
5021 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5022 {
5023         u32 link_width_cntl, speed_cntl;
5024
5025         if (radeon_pcie_gen2 == 0)
5026                 return;
5027
5028         if (rdev->flags & RADEON_IS_IGP)
5029                 return;
5030
5031         if (!(rdev->flags & RADEON_IS_PCIE))
5032                 return;
5033
5034         /* x2 cards have a special sequence */
5035         if (ASIC_IS_X2(rdev))
5036                 return;
5037
5038         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5039                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5040                 return;
5041
5042         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5043         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5044                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5045                 return;
5046         }
5047
5048         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5049
5050         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5051             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5052
5053                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5054                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5055                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5056
5057                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5058                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5059                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5060
5061                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5062                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5063                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5064
5065                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5066                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5067                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5068
5069                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5070                 speed_cntl |= LC_GEN2_EN_STRAP;
5071                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5072
5073         } else {
5074                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5075                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5076                 if (1)
5077                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5078                 else
5079                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5080                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5081         }
5082 }