Merge tag 'tegra-for-4.8-i2c' of git://git.kernel.org/pub/scm/linux/kernel/git/tegra...
[cascardo/linux.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_audio.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 /*
39  * Indirect registers accessor
40  */
41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
42 {
43         unsigned long flags;
44         u32 r;
45
46         spin_lock_irqsave(&rdev->cg_idx_lock, flags);
47         WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
48         r = RREG32(EVERGREEN_CG_IND_DATA);
49         spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
50         return r;
51 }
52
53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
54 {
55         unsigned long flags;
56
57         spin_lock_irqsave(&rdev->cg_idx_lock, flags);
58         WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
59         WREG32(EVERGREEN_CG_IND_DATA, (v));
60         spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
61 }
62
63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
64 {
65         unsigned long flags;
66         u32 r;
67
68         spin_lock_irqsave(&rdev->pif_idx_lock, flags);
69         WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
70         r = RREG32(EVERGREEN_PIF_PHY0_DATA);
71         spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
72         return r;
73 }
74
75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
76 {
77         unsigned long flags;
78
79         spin_lock_irqsave(&rdev->pif_idx_lock, flags);
80         WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
81         WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
82         spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
83 }
84
85 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
86 {
87         unsigned long flags;
88         u32 r;
89
90         spin_lock_irqsave(&rdev->pif_idx_lock, flags);
91         WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
92         r = RREG32(EVERGREEN_PIF_PHY1_DATA);
93         spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
94         return r;
95 }
96
97 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
98 {
99         unsigned long flags;
100
101         spin_lock_irqsave(&rdev->pif_idx_lock, flags);
102         WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
103         WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
104         spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
105 }
106
107 static const u32 crtc_offsets[6] =
108 {
109         EVERGREEN_CRTC0_REGISTER_OFFSET,
110         EVERGREEN_CRTC1_REGISTER_OFFSET,
111         EVERGREEN_CRTC2_REGISTER_OFFSET,
112         EVERGREEN_CRTC3_REGISTER_OFFSET,
113         EVERGREEN_CRTC4_REGISTER_OFFSET,
114         EVERGREEN_CRTC5_REGISTER_OFFSET
115 };
116
117 #include "clearstate_evergreen.h"
118
119 static const u32 sumo_rlc_save_restore_register_list[] =
120 {
121         0x98fc,
122         0x9830,
123         0x9834,
124         0x9838,
125         0x9870,
126         0x9874,
127         0x8a14,
128         0x8b24,
129         0x8bcc,
130         0x8b10,
131         0x8d00,
132         0x8d04,
133         0x8c00,
134         0x8c04,
135         0x8c08,
136         0x8c0c,
137         0x8d8c,
138         0x8c20,
139         0x8c24,
140         0x8c28,
141         0x8c18,
142         0x8c1c,
143         0x8cf0,
144         0x8e2c,
145         0x8e38,
146         0x8c30,
147         0x9508,
148         0x9688,
149         0x9608,
150         0x960c,
151         0x9610,
152         0x9614,
153         0x88c4,
154         0x88d4,
155         0xa008,
156         0x900c,
157         0x9100,
158         0x913c,
159         0x98f8,
160         0x98f4,
161         0x9b7c,
162         0x3f8c,
163         0x8950,
164         0x8954,
165         0x8a18,
166         0x8b28,
167         0x9144,
168         0x9148,
169         0x914c,
170         0x3f90,
171         0x3f94,
172         0x915c,
173         0x9160,
174         0x9178,
175         0x917c,
176         0x9180,
177         0x918c,
178         0x9190,
179         0x9194,
180         0x9198,
181         0x919c,
182         0x91a8,
183         0x91ac,
184         0x91b0,
185         0x91b4,
186         0x91b8,
187         0x91c4,
188         0x91c8,
189         0x91cc,
190         0x91d0,
191         0x91d4,
192         0x91e0,
193         0x91e4,
194         0x91ec,
195         0x91f0,
196         0x91f4,
197         0x9200,
198         0x9204,
199         0x929c,
200         0x9150,
201         0x802c,
202 };
203
204 static void evergreen_gpu_init(struct radeon_device *rdev);
205 void evergreen_fini(struct radeon_device *rdev);
206 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207 void evergreen_program_aspm(struct radeon_device *rdev);
208 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
209                                      int ring, u32 cp_int_cntl);
210 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
211                                    u32 status, u32 addr);
212 void cik_init_cp_pg_table(struct radeon_device *rdev);
213
214 extern u32 si_get_csb_size(struct radeon_device *rdev);
215 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 extern u32 cik_get_csb_size(struct radeon_device *rdev);
217 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
219
220 static const u32 evergreen_golden_registers[] =
221 {
222         0x3f90, 0xffff0000, 0xff000000,
223         0x9148, 0xffff0000, 0xff000000,
224         0x3f94, 0xffff0000, 0xff000000,
225         0x914c, 0xffff0000, 0xff000000,
226         0x9b7c, 0xffffffff, 0x00000000,
227         0x8a14, 0xffffffff, 0x00000007,
228         0x8b10, 0xffffffff, 0x00000000,
229         0x960c, 0xffffffff, 0x54763210,
230         0x88c4, 0xffffffff, 0x000000c2,
231         0x88d4, 0xffffffff, 0x00000010,
232         0x8974, 0xffffffff, 0x00000000,
233         0xc78, 0x00000080, 0x00000080,
234         0x5eb4, 0xffffffff, 0x00000002,
235         0x5e78, 0xffffffff, 0x001000f0,
236         0x6104, 0x01000300, 0x00000000,
237         0x5bc0, 0x00300000, 0x00000000,
238         0x7030, 0xffffffff, 0x00000011,
239         0x7c30, 0xffffffff, 0x00000011,
240         0x10830, 0xffffffff, 0x00000011,
241         0x11430, 0xffffffff, 0x00000011,
242         0x12030, 0xffffffff, 0x00000011,
243         0x12c30, 0xffffffff, 0x00000011,
244         0xd02c, 0xffffffff, 0x08421000,
245         0x240c, 0xffffffff, 0x00000380,
246         0x8b24, 0xffffffff, 0x00ff0fff,
247         0x28a4c, 0x06000000, 0x06000000,
248         0x10c, 0x00000001, 0x00000001,
249         0x8d00, 0xffffffff, 0x100e4848,
250         0x8d04, 0xffffffff, 0x00164745,
251         0x8c00, 0xffffffff, 0xe4000003,
252         0x8c04, 0xffffffff, 0x40600060,
253         0x8c08, 0xffffffff, 0x001c001c,
254         0x8cf0, 0xffffffff, 0x08e00620,
255         0x8c20, 0xffffffff, 0x00800080,
256         0x8c24, 0xffffffff, 0x00800080,
257         0x8c18, 0xffffffff, 0x20202078,
258         0x8c1c, 0xffffffff, 0x00001010,
259         0x28350, 0xffffffff, 0x00000000,
260         0xa008, 0xffffffff, 0x00010000,
261         0x5c4, 0xffffffff, 0x00000001,
262         0x9508, 0xffffffff, 0x00000002,
263         0x913c, 0x0000000f, 0x0000000a
264 };
265
266 static const u32 evergreen_golden_registers2[] =
267 {
268         0x2f4c, 0xffffffff, 0x00000000,
269         0x54f4, 0xffffffff, 0x00000000,
270         0x54f0, 0xffffffff, 0x00000000,
271         0x5498, 0xffffffff, 0x00000000,
272         0x549c, 0xffffffff, 0x00000000,
273         0x5494, 0xffffffff, 0x00000000,
274         0x53cc, 0xffffffff, 0x00000000,
275         0x53c8, 0xffffffff, 0x00000000,
276         0x53c4, 0xffffffff, 0x00000000,
277         0x53c0, 0xffffffff, 0x00000000,
278         0x53bc, 0xffffffff, 0x00000000,
279         0x53b8, 0xffffffff, 0x00000000,
280         0x53b4, 0xffffffff, 0x00000000,
281         0x53b0, 0xffffffff, 0x00000000
282 };
283
284 static const u32 cypress_mgcg_init[] =
285 {
286         0x802c, 0xffffffff, 0xc0000000,
287         0x5448, 0xffffffff, 0x00000100,
288         0x55e4, 0xffffffff, 0x00000100,
289         0x160c, 0xffffffff, 0x00000100,
290         0x5644, 0xffffffff, 0x00000100,
291         0xc164, 0xffffffff, 0x00000100,
292         0x8a18, 0xffffffff, 0x00000100,
293         0x897c, 0xffffffff, 0x06000100,
294         0x8b28, 0xffffffff, 0x00000100,
295         0x9144, 0xffffffff, 0x00000100,
296         0x9a60, 0xffffffff, 0x00000100,
297         0x9868, 0xffffffff, 0x00000100,
298         0x8d58, 0xffffffff, 0x00000100,
299         0x9510, 0xffffffff, 0x00000100,
300         0x949c, 0xffffffff, 0x00000100,
301         0x9654, 0xffffffff, 0x00000100,
302         0x9030, 0xffffffff, 0x00000100,
303         0x9034, 0xffffffff, 0x00000100,
304         0x9038, 0xffffffff, 0x00000100,
305         0x903c, 0xffffffff, 0x00000100,
306         0x9040, 0xffffffff, 0x00000100,
307         0xa200, 0xffffffff, 0x00000100,
308         0xa204, 0xffffffff, 0x00000100,
309         0xa208, 0xffffffff, 0x00000100,
310         0xa20c, 0xffffffff, 0x00000100,
311         0x971c, 0xffffffff, 0x00000100,
312         0x977c, 0xffffffff, 0x00000100,
313         0x3f80, 0xffffffff, 0x00000100,
314         0xa210, 0xffffffff, 0x00000100,
315         0xa214, 0xffffffff, 0x00000100,
316         0x4d8, 0xffffffff, 0x00000100,
317         0x9784, 0xffffffff, 0x00000100,
318         0x9698, 0xffffffff, 0x00000100,
319         0x4d4, 0xffffffff, 0x00000200,
320         0x30cc, 0xffffffff, 0x00000100,
321         0xd0c0, 0xffffffff, 0xff000100,
322         0x802c, 0xffffffff, 0x40000000,
323         0x915c, 0xffffffff, 0x00010000,
324         0x9160, 0xffffffff, 0x00030002,
325         0x9178, 0xffffffff, 0x00070000,
326         0x917c, 0xffffffff, 0x00030002,
327         0x9180, 0xffffffff, 0x00050004,
328         0x918c, 0xffffffff, 0x00010006,
329         0x9190, 0xffffffff, 0x00090008,
330         0x9194, 0xffffffff, 0x00070000,
331         0x9198, 0xffffffff, 0x00030002,
332         0x919c, 0xffffffff, 0x00050004,
333         0x91a8, 0xffffffff, 0x00010006,
334         0x91ac, 0xffffffff, 0x00090008,
335         0x91b0, 0xffffffff, 0x00070000,
336         0x91b4, 0xffffffff, 0x00030002,
337         0x91b8, 0xffffffff, 0x00050004,
338         0x91c4, 0xffffffff, 0x00010006,
339         0x91c8, 0xffffffff, 0x00090008,
340         0x91cc, 0xffffffff, 0x00070000,
341         0x91d0, 0xffffffff, 0x00030002,
342         0x91d4, 0xffffffff, 0x00050004,
343         0x91e0, 0xffffffff, 0x00010006,
344         0x91e4, 0xffffffff, 0x00090008,
345         0x91e8, 0xffffffff, 0x00000000,
346         0x91ec, 0xffffffff, 0x00070000,
347         0x91f0, 0xffffffff, 0x00030002,
348         0x91f4, 0xffffffff, 0x00050004,
349         0x9200, 0xffffffff, 0x00010006,
350         0x9204, 0xffffffff, 0x00090008,
351         0x9208, 0xffffffff, 0x00070000,
352         0x920c, 0xffffffff, 0x00030002,
353         0x9210, 0xffffffff, 0x00050004,
354         0x921c, 0xffffffff, 0x00010006,
355         0x9220, 0xffffffff, 0x00090008,
356         0x9224, 0xffffffff, 0x00070000,
357         0x9228, 0xffffffff, 0x00030002,
358         0x922c, 0xffffffff, 0x00050004,
359         0x9238, 0xffffffff, 0x00010006,
360         0x923c, 0xffffffff, 0x00090008,
361         0x9240, 0xffffffff, 0x00070000,
362         0x9244, 0xffffffff, 0x00030002,
363         0x9248, 0xffffffff, 0x00050004,
364         0x9254, 0xffffffff, 0x00010006,
365         0x9258, 0xffffffff, 0x00090008,
366         0x925c, 0xffffffff, 0x00070000,
367         0x9260, 0xffffffff, 0x00030002,
368         0x9264, 0xffffffff, 0x00050004,
369         0x9270, 0xffffffff, 0x00010006,
370         0x9274, 0xffffffff, 0x00090008,
371         0x9278, 0xffffffff, 0x00070000,
372         0x927c, 0xffffffff, 0x00030002,
373         0x9280, 0xffffffff, 0x00050004,
374         0x928c, 0xffffffff, 0x00010006,
375         0x9290, 0xffffffff, 0x00090008,
376         0x9294, 0xffffffff, 0x00000000,
377         0x929c, 0xffffffff, 0x00000001,
378         0x802c, 0xffffffff, 0x40010000,
379         0x915c, 0xffffffff, 0x00010000,
380         0x9160, 0xffffffff, 0x00030002,
381         0x9178, 0xffffffff, 0x00070000,
382         0x917c, 0xffffffff, 0x00030002,
383         0x9180, 0xffffffff, 0x00050004,
384         0x918c, 0xffffffff, 0x00010006,
385         0x9190, 0xffffffff, 0x00090008,
386         0x9194, 0xffffffff, 0x00070000,
387         0x9198, 0xffffffff, 0x00030002,
388         0x919c, 0xffffffff, 0x00050004,
389         0x91a8, 0xffffffff, 0x00010006,
390         0x91ac, 0xffffffff, 0x00090008,
391         0x91b0, 0xffffffff, 0x00070000,
392         0x91b4, 0xffffffff, 0x00030002,
393         0x91b8, 0xffffffff, 0x00050004,
394         0x91c4, 0xffffffff, 0x00010006,
395         0x91c8, 0xffffffff, 0x00090008,
396         0x91cc, 0xffffffff, 0x00070000,
397         0x91d0, 0xffffffff, 0x00030002,
398         0x91d4, 0xffffffff, 0x00050004,
399         0x91e0, 0xffffffff, 0x00010006,
400         0x91e4, 0xffffffff, 0x00090008,
401         0x91e8, 0xffffffff, 0x00000000,
402         0x91ec, 0xffffffff, 0x00070000,
403         0x91f0, 0xffffffff, 0x00030002,
404         0x91f4, 0xffffffff, 0x00050004,
405         0x9200, 0xffffffff, 0x00010006,
406         0x9204, 0xffffffff, 0x00090008,
407         0x9208, 0xffffffff, 0x00070000,
408         0x920c, 0xffffffff, 0x00030002,
409         0x9210, 0xffffffff, 0x00050004,
410         0x921c, 0xffffffff, 0x00010006,
411         0x9220, 0xffffffff, 0x00090008,
412         0x9224, 0xffffffff, 0x00070000,
413         0x9228, 0xffffffff, 0x00030002,
414         0x922c, 0xffffffff, 0x00050004,
415         0x9238, 0xffffffff, 0x00010006,
416         0x923c, 0xffffffff, 0x00090008,
417         0x9240, 0xffffffff, 0x00070000,
418         0x9244, 0xffffffff, 0x00030002,
419         0x9248, 0xffffffff, 0x00050004,
420         0x9254, 0xffffffff, 0x00010006,
421         0x9258, 0xffffffff, 0x00090008,
422         0x925c, 0xffffffff, 0x00070000,
423         0x9260, 0xffffffff, 0x00030002,
424         0x9264, 0xffffffff, 0x00050004,
425         0x9270, 0xffffffff, 0x00010006,
426         0x9274, 0xffffffff, 0x00090008,
427         0x9278, 0xffffffff, 0x00070000,
428         0x927c, 0xffffffff, 0x00030002,
429         0x9280, 0xffffffff, 0x00050004,
430         0x928c, 0xffffffff, 0x00010006,
431         0x9290, 0xffffffff, 0x00090008,
432         0x9294, 0xffffffff, 0x00000000,
433         0x929c, 0xffffffff, 0x00000001,
434         0x802c, 0xffffffff, 0xc0000000
435 };
436
437 static const u32 redwood_mgcg_init[] =
438 {
439         0x802c, 0xffffffff, 0xc0000000,
440         0x5448, 0xffffffff, 0x00000100,
441         0x55e4, 0xffffffff, 0x00000100,
442         0x160c, 0xffffffff, 0x00000100,
443         0x5644, 0xffffffff, 0x00000100,
444         0xc164, 0xffffffff, 0x00000100,
445         0x8a18, 0xffffffff, 0x00000100,
446         0x897c, 0xffffffff, 0x06000100,
447         0x8b28, 0xffffffff, 0x00000100,
448         0x9144, 0xffffffff, 0x00000100,
449         0x9a60, 0xffffffff, 0x00000100,
450         0x9868, 0xffffffff, 0x00000100,
451         0x8d58, 0xffffffff, 0x00000100,
452         0x9510, 0xffffffff, 0x00000100,
453         0x949c, 0xffffffff, 0x00000100,
454         0x9654, 0xffffffff, 0x00000100,
455         0x9030, 0xffffffff, 0x00000100,
456         0x9034, 0xffffffff, 0x00000100,
457         0x9038, 0xffffffff, 0x00000100,
458         0x903c, 0xffffffff, 0x00000100,
459         0x9040, 0xffffffff, 0x00000100,
460         0xa200, 0xffffffff, 0x00000100,
461         0xa204, 0xffffffff, 0x00000100,
462         0xa208, 0xffffffff, 0x00000100,
463         0xa20c, 0xffffffff, 0x00000100,
464         0x971c, 0xffffffff, 0x00000100,
465         0x977c, 0xffffffff, 0x00000100,
466         0x3f80, 0xffffffff, 0x00000100,
467         0xa210, 0xffffffff, 0x00000100,
468         0xa214, 0xffffffff, 0x00000100,
469         0x4d8, 0xffffffff, 0x00000100,
470         0x9784, 0xffffffff, 0x00000100,
471         0x9698, 0xffffffff, 0x00000100,
472         0x4d4, 0xffffffff, 0x00000200,
473         0x30cc, 0xffffffff, 0x00000100,
474         0xd0c0, 0xffffffff, 0xff000100,
475         0x802c, 0xffffffff, 0x40000000,
476         0x915c, 0xffffffff, 0x00010000,
477         0x9160, 0xffffffff, 0x00030002,
478         0x9178, 0xffffffff, 0x00070000,
479         0x917c, 0xffffffff, 0x00030002,
480         0x9180, 0xffffffff, 0x00050004,
481         0x918c, 0xffffffff, 0x00010006,
482         0x9190, 0xffffffff, 0x00090008,
483         0x9194, 0xffffffff, 0x00070000,
484         0x9198, 0xffffffff, 0x00030002,
485         0x919c, 0xffffffff, 0x00050004,
486         0x91a8, 0xffffffff, 0x00010006,
487         0x91ac, 0xffffffff, 0x00090008,
488         0x91b0, 0xffffffff, 0x00070000,
489         0x91b4, 0xffffffff, 0x00030002,
490         0x91b8, 0xffffffff, 0x00050004,
491         0x91c4, 0xffffffff, 0x00010006,
492         0x91c8, 0xffffffff, 0x00090008,
493         0x91cc, 0xffffffff, 0x00070000,
494         0x91d0, 0xffffffff, 0x00030002,
495         0x91d4, 0xffffffff, 0x00050004,
496         0x91e0, 0xffffffff, 0x00010006,
497         0x91e4, 0xffffffff, 0x00090008,
498         0x91e8, 0xffffffff, 0x00000000,
499         0x91ec, 0xffffffff, 0x00070000,
500         0x91f0, 0xffffffff, 0x00030002,
501         0x91f4, 0xffffffff, 0x00050004,
502         0x9200, 0xffffffff, 0x00010006,
503         0x9204, 0xffffffff, 0x00090008,
504         0x9294, 0xffffffff, 0x00000000,
505         0x929c, 0xffffffff, 0x00000001,
506         0x802c, 0xffffffff, 0xc0000000
507 };
508
509 static const u32 cedar_golden_registers[] =
510 {
511         0x3f90, 0xffff0000, 0xff000000,
512         0x9148, 0xffff0000, 0xff000000,
513         0x3f94, 0xffff0000, 0xff000000,
514         0x914c, 0xffff0000, 0xff000000,
515         0x9b7c, 0xffffffff, 0x00000000,
516         0x8a14, 0xffffffff, 0x00000007,
517         0x8b10, 0xffffffff, 0x00000000,
518         0x960c, 0xffffffff, 0x54763210,
519         0x88c4, 0xffffffff, 0x000000c2,
520         0x88d4, 0xffffffff, 0x00000000,
521         0x8974, 0xffffffff, 0x00000000,
522         0xc78, 0x00000080, 0x00000080,
523         0x5eb4, 0xffffffff, 0x00000002,
524         0x5e78, 0xffffffff, 0x001000f0,
525         0x6104, 0x01000300, 0x00000000,
526         0x5bc0, 0x00300000, 0x00000000,
527         0x7030, 0xffffffff, 0x00000011,
528         0x7c30, 0xffffffff, 0x00000011,
529         0x10830, 0xffffffff, 0x00000011,
530         0x11430, 0xffffffff, 0x00000011,
531         0xd02c, 0xffffffff, 0x08421000,
532         0x240c, 0xffffffff, 0x00000380,
533         0x8b24, 0xffffffff, 0x00ff0fff,
534         0x28a4c, 0x06000000, 0x06000000,
535         0x10c, 0x00000001, 0x00000001,
536         0x8d00, 0xffffffff, 0x100e4848,
537         0x8d04, 0xffffffff, 0x00164745,
538         0x8c00, 0xffffffff, 0xe4000003,
539         0x8c04, 0xffffffff, 0x40600060,
540         0x8c08, 0xffffffff, 0x001c001c,
541         0x8cf0, 0xffffffff, 0x08e00410,
542         0x8c20, 0xffffffff, 0x00800080,
543         0x8c24, 0xffffffff, 0x00800080,
544         0x8c18, 0xffffffff, 0x20202078,
545         0x8c1c, 0xffffffff, 0x00001010,
546         0x28350, 0xffffffff, 0x00000000,
547         0xa008, 0xffffffff, 0x00010000,
548         0x5c4, 0xffffffff, 0x00000001,
549         0x9508, 0xffffffff, 0x00000002
550 };
551
552 static const u32 cedar_mgcg_init[] =
553 {
554         0x802c, 0xffffffff, 0xc0000000,
555         0x5448, 0xffffffff, 0x00000100,
556         0x55e4, 0xffffffff, 0x00000100,
557         0x160c, 0xffffffff, 0x00000100,
558         0x5644, 0xffffffff, 0x00000100,
559         0xc164, 0xffffffff, 0x00000100,
560         0x8a18, 0xffffffff, 0x00000100,
561         0x897c, 0xffffffff, 0x06000100,
562         0x8b28, 0xffffffff, 0x00000100,
563         0x9144, 0xffffffff, 0x00000100,
564         0x9a60, 0xffffffff, 0x00000100,
565         0x9868, 0xffffffff, 0x00000100,
566         0x8d58, 0xffffffff, 0x00000100,
567         0x9510, 0xffffffff, 0x00000100,
568         0x949c, 0xffffffff, 0x00000100,
569         0x9654, 0xffffffff, 0x00000100,
570         0x9030, 0xffffffff, 0x00000100,
571         0x9034, 0xffffffff, 0x00000100,
572         0x9038, 0xffffffff, 0x00000100,
573         0x903c, 0xffffffff, 0x00000100,
574         0x9040, 0xffffffff, 0x00000100,
575         0xa200, 0xffffffff, 0x00000100,
576         0xa204, 0xffffffff, 0x00000100,
577         0xa208, 0xffffffff, 0x00000100,
578         0xa20c, 0xffffffff, 0x00000100,
579         0x971c, 0xffffffff, 0x00000100,
580         0x977c, 0xffffffff, 0x00000100,
581         0x3f80, 0xffffffff, 0x00000100,
582         0xa210, 0xffffffff, 0x00000100,
583         0xa214, 0xffffffff, 0x00000100,
584         0x4d8, 0xffffffff, 0x00000100,
585         0x9784, 0xffffffff, 0x00000100,
586         0x9698, 0xffffffff, 0x00000100,
587         0x4d4, 0xffffffff, 0x00000200,
588         0x30cc, 0xffffffff, 0x00000100,
589         0xd0c0, 0xffffffff, 0xff000100,
590         0x802c, 0xffffffff, 0x40000000,
591         0x915c, 0xffffffff, 0x00010000,
592         0x9178, 0xffffffff, 0x00050000,
593         0x917c, 0xffffffff, 0x00030002,
594         0x918c, 0xffffffff, 0x00010004,
595         0x9190, 0xffffffff, 0x00070006,
596         0x9194, 0xffffffff, 0x00050000,
597         0x9198, 0xffffffff, 0x00030002,
598         0x91a8, 0xffffffff, 0x00010004,
599         0x91ac, 0xffffffff, 0x00070006,
600         0x91e8, 0xffffffff, 0x00000000,
601         0x9294, 0xffffffff, 0x00000000,
602         0x929c, 0xffffffff, 0x00000001,
603         0x802c, 0xffffffff, 0xc0000000
604 };
605
606 static const u32 juniper_mgcg_init[] =
607 {
608         0x802c, 0xffffffff, 0xc0000000,
609         0x5448, 0xffffffff, 0x00000100,
610         0x55e4, 0xffffffff, 0x00000100,
611         0x160c, 0xffffffff, 0x00000100,
612         0x5644, 0xffffffff, 0x00000100,
613         0xc164, 0xffffffff, 0x00000100,
614         0x8a18, 0xffffffff, 0x00000100,
615         0x897c, 0xffffffff, 0x06000100,
616         0x8b28, 0xffffffff, 0x00000100,
617         0x9144, 0xffffffff, 0x00000100,
618         0x9a60, 0xffffffff, 0x00000100,
619         0x9868, 0xffffffff, 0x00000100,
620         0x8d58, 0xffffffff, 0x00000100,
621         0x9510, 0xffffffff, 0x00000100,
622         0x949c, 0xffffffff, 0x00000100,
623         0x9654, 0xffffffff, 0x00000100,
624         0x9030, 0xffffffff, 0x00000100,
625         0x9034, 0xffffffff, 0x00000100,
626         0x9038, 0xffffffff, 0x00000100,
627         0x903c, 0xffffffff, 0x00000100,
628         0x9040, 0xffffffff, 0x00000100,
629         0xa200, 0xffffffff, 0x00000100,
630         0xa204, 0xffffffff, 0x00000100,
631         0xa208, 0xffffffff, 0x00000100,
632         0xa20c, 0xffffffff, 0x00000100,
633         0x971c, 0xffffffff, 0x00000100,
634         0xd0c0, 0xffffffff, 0xff000100,
635         0x802c, 0xffffffff, 0x40000000,
636         0x915c, 0xffffffff, 0x00010000,
637         0x9160, 0xffffffff, 0x00030002,
638         0x9178, 0xffffffff, 0x00070000,
639         0x917c, 0xffffffff, 0x00030002,
640         0x9180, 0xffffffff, 0x00050004,
641         0x918c, 0xffffffff, 0x00010006,
642         0x9190, 0xffffffff, 0x00090008,
643         0x9194, 0xffffffff, 0x00070000,
644         0x9198, 0xffffffff, 0x00030002,
645         0x919c, 0xffffffff, 0x00050004,
646         0x91a8, 0xffffffff, 0x00010006,
647         0x91ac, 0xffffffff, 0x00090008,
648         0x91b0, 0xffffffff, 0x00070000,
649         0x91b4, 0xffffffff, 0x00030002,
650         0x91b8, 0xffffffff, 0x00050004,
651         0x91c4, 0xffffffff, 0x00010006,
652         0x91c8, 0xffffffff, 0x00090008,
653         0x91cc, 0xffffffff, 0x00070000,
654         0x91d0, 0xffffffff, 0x00030002,
655         0x91d4, 0xffffffff, 0x00050004,
656         0x91e0, 0xffffffff, 0x00010006,
657         0x91e4, 0xffffffff, 0x00090008,
658         0x91e8, 0xffffffff, 0x00000000,
659         0x91ec, 0xffffffff, 0x00070000,
660         0x91f0, 0xffffffff, 0x00030002,
661         0x91f4, 0xffffffff, 0x00050004,
662         0x9200, 0xffffffff, 0x00010006,
663         0x9204, 0xffffffff, 0x00090008,
664         0x9208, 0xffffffff, 0x00070000,
665         0x920c, 0xffffffff, 0x00030002,
666         0x9210, 0xffffffff, 0x00050004,
667         0x921c, 0xffffffff, 0x00010006,
668         0x9220, 0xffffffff, 0x00090008,
669         0x9224, 0xffffffff, 0x00070000,
670         0x9228, 0xffffffff, 0x00030002,
671         0x922c, 0xffffffff, 0x00050004,
672         0x9238, 0xffffffff, 0x00010006,
673         0x923c, 0xffffffff, 0x00090008,
674         0x9240, 0xffffffff, 0x00070000,
675         0x9244, 0xffffffff, 0x00030002,
676         0x9248, 0xffffffff, 0x00050004,
677         0x9254, 0xffffffff, 0x00010006,
678         0x9258, 0xffffffff, 0x00090008,
679         0x925c, 0xffffffff, 0x00070000,
680         0x9260, 0xffffffff, 0x00030002,
681         0x9264, 0xffffffff, 0x00050004,
682         0x9270, 0xffffffff, 0x00010006,
683         0x9274, 0xffffffff, 0x00090008,
684         0x9278, 0xffffffff, 0x00070000,
685         0x927c, 0xffffffff, 0x00030002,
686         0x9280, 0xffffffff, 0x00050004,
687         0x928c, 0xffffffff, 0x00010006,
688         0x9290, 0xffffffff, 0x00090008,
689         0x9294, 0xffffffff, 0x00000000,
690         0x929c, 0xffffffff, 0x00000001,
691         0x802c, 0xffffffff, 0xc0000000,
692         0x977c, 0xffffffff, 0x00000100,
693         0x3f80, 0xffffffff, 0x00000100,
694         0xa210, 0xffffffff, 0x00000100,
695         0xa214, 0xffffffff, 0x00000100,
696         0x4d8, 0xffffffff, 0x00000100,
697         0x9784, 0xffffffff, 0x00000100,
698         0x9698, 0xffffffff, 0x00000100,
699         0x4d4, 0xffffffff, 0x00000200,
700         0x30cc, 0xffffffff, 0x00000100,
701         0x802c, 0xffffffff, 0xc0000000
702 };
703
704 static const u32 supersumo_golden_registers[] =
705 {
706         0x5eb4, 0xffffffff, 0x00000002,
707         0x5c4, 0xffffffff, 0x00000001,
708         0x7030, 0xffffffff, 0x00000011,
709         0x7c30, 0xffffffff, 0x00000011,
710         0x6104, 0x01000300, 0x00000000,
711         0x5bc0, 0x00300000, 0x00000000,
712         0x8c04, 0xffffffff, 0x40600060,
713         0x8c08, 0xffffffff, 0x001c001c,
714         0x8c20, 0xffffffff, 0x00800080,
715         0x8c24, 0xffffffff, 0x00800080,
716         0x8c18, 0xffffffff, 0x20202078,
717         0x8c1c, 0xffffffff, 0x00001010,
718         0x918c, 0xffffffff, 0x00010006,
719         0x91a8, 0xffffffff, 0x00010006,
720         0x91c4, 0xffffffff, 0x00010006,
721         0x91e0, 0xffffffff, 0x00010006,
722         0x9200, 0xffffffff, 0x00010006,
723         0x9150, 0xffffffff, 0x6e944040,
724         0x917c, 0xffffffff, 0x00030002,
725         0x9180, 0xffffffff, 0x00050004,
726         0x9198, 0xffffffff, 0x00030002,
727         0x919c, 0xffffffff, 0x00050004,
728         0x91b4, 0xffffffff, 0x00030002,
729         0x91b8, 0xffffffff, 0x00050004,
730         0x91d0, 0xffffffff, 0x00030002,
731         0x91d4, 0xffffffff, 0x00050004,
732         0x91f0, 0xffffffff, 0x00030002,
733         0x91f4, 0xffffffff, 0x00050004,
734         0x915c, 0xffffffff, 0x00010000,
735         0x9160, 0xffffffff, 0x00030002,
736         0x3f90, 0xffff0000, 0xff000000,
737         0x9178, 0xffffffff, 0x00070000,
738         0x9194, 0xffffffff, 0x00070000,
739         0x91b0, 0xffffffff, 0x00070000,
740         0x91cc, 0xffffffff, 0x00070000,
741         0x91ec, 0xffffffff, 0x00070000,
742         0x9148, 0xffff0000, 0xff000000,
743         0x9190, 0xffffffff, 0x00090008,
744         0x91ac, 0xffffffff, 0x00090008,
745         0x91c8, 0xffffffff, 0x00090008,
746         0x91e4, 0xffffffff, 0x00090008,
747         0x9204, 0xffffffff, 0x00090008,
748         0x3f94, 0xffff0000, 0xff000000,
749         0x914c, 0xffff0000, 0xff000000,
750         0x929c, 0xffffffff, 0x00000001,
751         0x8a18, 0xffffffff, 0x00000100,
752         0x8b28, 0xffffffff, 0x00000100,
753         0x9144, 0xffffffff, 0x00000100,
754         0x5644, 0xffffffff, 0x00000100,
755         0x9b7c, 0xffffffff, 0x00000000,
756         0x8030, 0xffffffff, 0x0000100a,
757         0x8a14, 0xffffffff, 0x00000007,
758         0x8b24, 0xffffffff, 0x00ff0fff,
759         0x8b10, 0xffffffff, 0x00000000,
760         0x28a4c, 0x06000000, 0x06000000,
761         0x4d8, 0xffffffff, 0x00000100,
762         0x913c, 0xffff000f, 0x0100000a,
763         0x960c, 0xffffffff, 0x54763210,
764         0x88c4, 0xffffffff, 0x000000c2,
765         0x88d4, 0xffffffff, 0x00000010,
766         0x8974, 0xffffffff, 0x00000000,
767         0xc78, 0x00000080, 0x00000080,
768         0x5e78, 0xffffffff, 0x001000f0,
769         0xd02c, 0xffffffff, 0x08421000,
770         0xa008, 0xffffffff, 0x00010000,
771         0x8d00, 0xffffffff, 0x100e4848,
772         0x8d04, 0xffffffff, 0x00164745,
773         0x8c00, 0xffffffff, 0xe4000003,
774         0x8cf0, 0x1fffffff, 0x08e00620,
775         0x28350, 0xffffffff, 0x00000000,
776         0x9508, 0xffffffff, 0x00000002
777 };
778
779 static const u32 sumo_golden_registers[] =
780 {
781         0x900c, 0x00ffffff, 0x0017071f,
782         0x8c18, 0xffffffff, 0x10101060,
783         0x8c1c, 0xffffffff, 0x00001010,
784         0x8c30, 0x0000000f, 0x00000005,
785         0x9688, 0x0000000f, 0x00000007
786 };
787
788 static const u32 wrestler_golden_registers[] =
789 {
790         0x5eb4, 0xffffffff, 0x00000002,
791         0x5c4, 0xffffffff, 0x00000001,
792         0x7030, 0xffffffff, 0x00000011,
793         0x7c30, 0xffffffff, 0x00000011,
794         0x6104, 0x01000300, 0x00000000,
795         0x5bc0, 0x00300000, 0x00000000,
796         0x918c, 0xffffffff, 0x00010006,
797         0x91a8, 0xffffffff, 0x00010006,
798         0x9150, 0xffffffff, 0x6e944040,
799         0x917c, 0xffffffff, 0x00030002,
800         0x9198, 0xffffffff, 0x00030002,
801         0x915c, 0xffffffff, 0x00010000,
802         0x3f90, 0xffff0000, 0xff000000,
803         0x9178, 0xffffffff, 0x00070000,
804         0x9194, 0xffffffff, 0x00070000,
805         0x9148, 0xffff0000, 0xff000000,
806         0x9190, 0xffffffff, 0x00090008,
807         0x91ac, 0xffffffff, 0x00090008,
808         0x3f94, 0xffff0000, 0xff000000,
809         0x914c, 0xffff0000, 0xff000000,
810         0x929c, 0xffffffff, 0x00000001,
811         0x8a18, 0xffffffff, 0x00000100,
812         0x8b28, 0xffffffff, 0x00000100,
813         0x9144, 0xffffffff, 0x00000100,
814         0x9b7c, 0xffffffff, 0x00000000,
815         0x8030, 0xffffffff, 0x0000100a,
816         0x8a14, 0xffffffff, 0x00000001,
817         0x8b24, 0xffffffff, 0x00ff0fff,
818         0x8b10, 0xffffffff, 0x00000000,
819         0x28a4c, 0x06000000, 0x06000000,
820         0x4d8, 0xffffffff, 0x00000100,
821         0x913c, 0xffff000f, 0x0100000a,
822         0x960c, 0xffffffff, 0x54763210,
823         0x88c4, 0xffffffff, 0x000000c2,
824         0x88d4, 0xffffffff, 0x00000010,
825         0x8974, 0xffffffff, 0x00000000,
826         0xc78, 0x00000080, 0x00000080,
827         0x5e78, 0xffffffff, 0x001000f0,
828         0xd02c, 0xffffffff, 0x08421000,
829         0xa008, 0xffffffff, 0x00010000,
830         0x8d00, 0xffffffff, 0x100e4848,
831         0x8d04, 0xffffffff, 0x00164745,
832         0x8c00, 0xffffffff, 0xe4000003,
833         0x8cf0, 0x1fffffff, 0x08e00410,
834         0x28350, 0xffffffff, 0x00000000,
835         0x9508, 0xffffffff, 0x00000002,
836         0x900c, 0xffffffff, 0x0017071f,
837         0x8c18, 0xffffffff, 0x10101060,
838         0x8c1c, 0xffffffff, 0x00001010
839 };
840
841 static const u32 barts_golden_registers[] =
842 {
843         0x5eb4, 0xffffffff, 0x00000002,
844         0x5e78, 0x8f311ff1, 0x001000f0,
845         0x3f90, 0xffff0000, 0xff000000,
846         0x9148, 0xffff0000, 0xff000000,
847         0x3f94, 0xffff0000, 0xff000000,
848         0x914c, 0xffff0000, 0xff000000,
849         0xc78, 0x00000080, 0x00000080,
850         0xbd4, 0x70073777, 0x00010001,
851         0xd02c, 0xbfffff1f, 0x08421000,
852         0xd0b8, 0x03773777, 0x02011003,
853         0x5bc0, 0x00200000, 0x50100000,
854         0x98f8, 0x33773777, 0x02011003,
855         0x98fc, 0xffffffff, 0x76543210,
856         0x7030, 0x31000311, 0x00000011,
857         0x2f48, 0x00000007, 0x02011003,
858         0x6b28, 0x00000010, 0x00000012,
859         0x7728, 0x00000010, 0x00000012,
860         0x10328, 0x00000010, 0x00000012,
861         0x10f28, 0x00000010, 0x00000012,
862         0x11b28, 0x00000010, 0x00000012,
863         0x12728, 0x00000010, 0x00000012,
864         0x240c, 0x000007ff, 0x00000380,
865         0x8a14, 0xf000001f, 0x00000007,
866         0x8b24, 0x3fff3fff, 0x00ff0fff,
867         0x8b10, 0x0000ff0f, 0x00000000,
868         0x28a4c, 0x07ffffff, 0x06000000,
869         0x10c, 0x00000001, 0x00010003,
870         0xa02c, 0xffffffff, 0x0000009b,
871         0x913c, 0x0000000f, 0x0100000a,
872         0x8d00, 0xffff7f7f, 0x100e4848,
873         0x8d04, 0x00ffffff, 0x00164745,
874         0x8c00, 0xfffc0003, 0xe4000003,
875         0x8c04, 0xf8ff00ff, 0x40600060,
876         0x8c08, 0x00ff00ff, 0x001c001c,
877         0x8cf0, 0x1fff1fff, 0x08e00620,
878         0x8c20, 0x0fff0fff, 0x00800080,
879         0x8c24, 0x0fff0fff, 0x00800080,
880         0x8c18, 0xffffffff, 0x20202078,
881         0x8c1c, 0x0000ffff, 0x00001010,
882         0x28350, 0x00000f01, 0x00000000,
883         0x9508, 0x3700001f, 0x00000002,
884         0x960c, 0xffffffff, 0x54763210,
885         0x88c4, 0x001f3ae3, 0x000000c2,
886         0x88d4, 0x0000001f, 0x00000010,
887         0x8974, 0xffffffff, 0x00000000
888 };
889
890 static const u32 turks_golden_registers[] =
891 {
892         0x5eb4, 0xffffffff, 0x00000002,
893         0x5e78, 0x8f311ff1, 0x001000f0,
894         0x8c8, 0x00003000, 0x00001070,
895         0x8cc, 0x000fffff, 0x00040035,
896         0x3f90, 0xffff0000, 0xfff00000,
897         0x9148, 0xffff0000, 0xfff00000,
898         0x3f94, 0xffff0000, 0xfff00000,
899         0x914c, 0xffff0000, 0xfff00000,
900         0xc78, 0x00000080, 0x00000080,
901         0xbd4, 0x00073007, 0x00010002,
902         0xd02c, 0xbfffff1f, 0x08421000,
903         0xd0b8, 0x03773777, 0x02010002,
904         0x5bc0, 0x00200000, 0x50100000,
905         0x98f8, 0x33773777, 0x00010002,
906         0x98fc, 0xffffffff, 0x33221100,
907         0x7030, 0x31000311, 0x00000011,
908         0x2f48, 0x33773777, 0x00010002,
909         0x6b28, 0x00000010, 0x00000012,
910         0x7728, 0x00000010, 0x00000012,
911         0x10328, 0x00000010, 0x00000012,
912         0x10f28, 0x00000010, 0x00000012,
913         0x11b28, 0x00000010, 0x00000012,
914         0x12728, 0x00000010, 0x00000012,
915         0x240c, 0x000007ff, 0x00000380,
916         0x8a14, 0xf000001f, 0x00000007,
917         0x8b24, 0x3fff3fff, 0x00ff0fff,
918         0x8b10, 0x0000ff0f, 0x00000000,
919         0x28a4c, 0x07ffffff, 0x06000000,
920         0x10c, 0x00000001, 0x00010003,
921         0xa02c, 0xffffffff, 0x0000009b,
922         0x913c, 0x0000000f, 0x0100000a,
923         0x8d00, 0xffff7f7f, 0x100e4848,
924         0x8d04, 0x00ffffff, 0x00164745,
925         0x8c00, 0xfffc0003, 0xe4000003,
926         0x8c04, 0xf8ff00ff, 0x40600060,
927         0x8c08, 0x00ff00ff, 0x001c001c,
928         0x8cf0, 0x1fff1fff, 0x08e00410,
929         0x8c20, 0x0fff0fff, 0x00800080,
930         0x8c24, 0x0fff0fff, 0x00800080,
931         0x8c18, 0xffffffff, 0x20202078,
932         0x8c1c, 0x0000ffff, 0x00001010,
933         0x28350, 0x00000f01, 0x00000000,
934         0x9508, 0x3700001f, 0x00000002,
935         0x960c, 0xffffffff, 0x54763210,
936         0x88c4, 0x001f3ae3, 0x000000c2,
937         0x88d4, 0x0000001f, 0x00000010,
938         0x8974, 0xffffffff, 0x00000000
939 };
940
941 static const u32 caicos_golden_registers[] =
942 {
943         0x5eb4, 0xffffffff, 0x00000002,
944         0x5e78, 0x8f311ff1, 0x001000f0,
945         0x8c8, 0x00003420, 0x00001450,
946         0x8cc, 0x000fffff, 0x00040035,
947         0x3f90, 0xffff0000, 0xfffc0000,
948         0x9148, 0xffff0000, 0xfffc0000,
949         0x3f94, 0xffff0000, 0xfffc0000,
950         0x914c, 0xffff0000, 0xfffc0000,
951         0xc78, 0x00000080, 0x00000080,
952         0xbd4, 0x00073007, 0x00010001,
953         0xd02c, 0xbfffff1f, 0x08421000,
954         0xd0b8, 0x03773777, 0x02010001,
955         0x5bc0, 0x00200000, 0x50100000,
956         0x98f8, 0x33773777, 0x02010001,
957         0x98fc, 0xffffffff, 0x33221100,
958         0x7030, 0x31000311, 0x00000011,
959         0x2f48, 0x33773777, 0x02010001,
960         0x6b28, 0x00000010, 0x00000012,
961         0x7728, 0x00000010, 0x00000012,
962         0x10328, 0x00000010, 0x00000012,
963         0x10f28, 0x00000010, 0x00000012,
964         0x11b28, 0x00000010, 0x00000012,
965         0x12728, 0x00000010, 0x00000012,
966         0x240c, 0x000007ff, 0x00000380,
967         0x8a14, 0xf000001f, 0x00000001,
968         0x8b24, 0x3fff3fff, 0x00ff0fff,
969         0x8b10, 0x0000ff0f, 0x00000000,
970         0x28a4c, 0x07ffffff, 0x06000000,
971         0x10c, 0x00000001, 0x00010003,
972         0xa02c, 0xffffffff, 0x0000009b,
973         0x913c, 0x0000000f, 0x0100000a,
974         0x8d00, 0xffff7f7f, 0x100e4848,
975         0x8d04, 0x00ffffff, 0x00164745,
976         0x8c00, 0xfffc0003, 0xe4000003,
977         0x8c04, 0xf8ff00ff, 0x40600060,
978         0x8c08, 0x00ff00ff, 0x001c001c,
979         0x8cf0, 0x1fff1fff, 0x08e00410,
980         0x8c20, 0x0fff0fff, 0x00800080,
981         0x8c24, 0x0fff0fff, 0x00800080,
982         0x8c18, 0xffffffff, 0x20202078,
983         0x8c1c, 0x0000ffff, 0x00001010,
984         0x28350, 0x00000f01, 0x00000000,
985         0x9508, 0x3700001f, 0x00000002,
986         0x960c, 0xffffffff, 0x54763210,
987         0x88c4, 0x001f3ae3, 0x000000c2,
988         0x88d4, 0x0000001f, 0x00000010,
989         0x8974, 0xffffffff, 0x00000000
990 };
991
992 static void evergreen_init_golden_registers(struct radeon_device *rdev)
993 {
994         switch (rdev->family) {
995         case CHIP_CYPRESS:
996         case CHIP_HEMLOCK:
997                 radeon_program_register_sequence(rdev,
998                                                  evergreen_golden_registers,
999                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
1000                 radeon_program_register_sequence(rdev,
1001                                                  evergreen_golden_registers2,
1002                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1003                 radeon_program_register_sequence(rdev,
1004                                                  cypress_mgcg_init,
1005                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
1006                 break;
1007         case CHIP_JUNIPER:
1008                 radeon_program_register_sequence(rdev,
1009                                                  evergreen_golden_registers,
1010                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
1011                 radeon_program_register_sequence(rdev,
1012                                                  evergreen_golden_registers2,
1013                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1014                 radeon_program_register_sequence(rdev,
1015                                                  juniper_mgcg_init,
1016                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
1017                 break;
1018         case CHIP_REDWOOD:
1019                 radeon_program_register_sequence(rdev,
1020                                                  evergreen_golden_registers,
1021                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
1022                 radeon_program_register_sequence(rdev,
1023                                                  evergreen_golden_registers2,
1024                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1025                 radeon_program_register_sequence(rdev,
1026                                                  redwood_mgcg_init,
1027                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
1028                 break;
1029         case CHIP_CEDAR:
1030                 radeon_program_register_sequence(rdev,
1031                                                  cedar_golden_registers,
1032                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
1033                 radeon_program_register_sequence(rdev,
1034                                                  evergreen_golden_registers2,
1035                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1036                 radeon_program_register_sequence(rdev,
1037                                                  cedar_mgcg_init,
1038                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
1039                 break;
1040         case CHIP_PALM:
1041                 radeon_program_register_sequence(rdev,
1042                                                  wrestler_golden_registers,
1043                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
1044                 break;
1045         case CHIP_SUMO:
1046                 radeon_program_register_sequence(rdev,
1047                                                  supersumo_golden_registers,
1048                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
1049                 break;
1050         case CHIP_SUMO2:
1051                 radeon_program_register_sequence(rdev,
1052                                                  supersumo_golden_registers,
1053                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054                 radeon_program_register_sequence(rdev,
1055                                                  sumo_golden_registers,
1056                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
1057                 break;
1058         case CHIP_BARTS:
1059                 radeon_program_register_sequence(rdev,
1060                                                  barts_golden_registers,
1061                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
1062                 break;
1063         case CHIP_TURKS:
1064                 radeon_program_register_sequence(rdev,
1065                                                  turks_golden_registers,
1066                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
1067                 break;
1068         case CHIP_CAICOS:
1069                 radeon_program_register_sequence(rdev,
1070                                                  caicos_golden_registers,
1071                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
1072                 break;
1073         default:
1074                 break;
1075         }
1076 }
1077
1078 /**
1079  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1080  *
1081  * @rdev: radeon_device pointer
1082  * @reg: register offset in bytes
1083  * @val: register value
1084  *
1085  * Returns 0 for success or -EINVAL for an invalid register
1086  *
1087  */
1088 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1089                                         u32 reg, u32 *val)
1090 {
1091         switch (reg) {
1092         case GRBM_STATUS:
1093         case GRBM_STATUS_SE0:
1094         case GRBM_STATUS_SE1:
1095         case SRBM_STATUS:
1096         case SRBM_STATUS2:
1097         case DMA_STATUS_REG:
1098         case UVD_STATUS:
1099                 *val = RREG32(reg);
1100                 return 0;
1101         default:
1102                 return -EINVAL;
1103         }
1104 }
1105
1106 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1107                              unsigned *bankh, unsigned *mtaspect,
1108                              unsigned *tile_split)
1109 {
1110         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1111         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1112         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1113         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1114         switch (*bankw) {
1115         default:
1116         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1117         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1118         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1119         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1120         }
1121         switch (*bankh) {
1122         default:
1123         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1124         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1125         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1126         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1127         }
1128         switch (*mtaspect) {
1129         default:
1130         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1131         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1132         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1133         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1134         }
1135 }
1136
1137 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1138                               u32 cntl_reg, u32 status_reg)
1139 {
1140         int r, i;
1141         struct atom_clock_dividers dividers;
1142
1143         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1144                                            clock, false, &dividers);
1145         if (r)
1146                 return r;
1147
1148         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1149
1150         for (i = 0; i < 100; i++) {
1151                 if (RREG32(status_reg) & DCLK_STATUS)
1152                         break;
1153                 mdelay(10);
1154         }
1155         if (i == 100)
1156                 return -ETIMEDOUT;
1157
1158         return 0;
1159 }
1160
1161 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1162 {
1163         int r = 0;
1164         u32 cg_scratch = RREG32(CG_SCRATCH1);
1165
1166         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1167         if (r)
1168                 goto done;
1169         cg_scratch &= 0xffff0000;
1170         cg_scratch |= vclk / 100; /* Mhz */
1171
1172         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1173         if (r)
1174                 goto done;
1175         cg_scratch &= 0x0000ffff;
1176         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1177
1178 done:
1179         WREG32(CG_SCRATCH1, cg_scratch);
1180
1181         return r;
1182 }
1183
1184 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1185 {
1186         /* start off with something large */
1187         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1188         int r;
1189
1190         /* bypass vclk and dclk with bclk */
1191         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1192                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1193                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1194
1195         /* put PLL in bypass mode */
1196         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1197
1198         if (!vclk || !dclk) {
1199                 /* keep the Bypass mode, put PLL to sleep */
1200                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1201                 return 0;
1202         }
1203
1204         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1205                                           16384, 0x03FFFFFF, 0, 128, 5,
1206                                           &fb_div, &vclk_div, &dclk_div);
1207         if (r)
1208                 return r;
1209
1210         /* set VCO_MODE to 1 */
1211         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1212
1213         /* toggle UPLL_SLEEP to 1 then back to 0 */
1214         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1215         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1216
1217         /* deassert UPLL_RESET */
1218         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1219
1220         mdelay(1);
1221
1222         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1223         if (r)
1224                 return r;
1225
1226         /* assert UPLL_RESET again */
1227         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1228
1229         /* disable spread spectrum. */
1230         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1231
1232         /* set feedback divider */
1233         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1234
1235         /* set ref divider to 0 */
1236         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1237
1238         if (fb_div < 307200)
1239                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1240         else
1241                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1242
1243         /* set PDIV_A and PDIV_B */
1244         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1245                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1246                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1247
1248         /* give the PLL some time to settle */
1249         mdelay(15);
1250
1251         /* deassert PLL_RESET */
1252         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1253
1254         mdelay(15);
1255
1256         /* switch from bypass mode to normal mode */
1257         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1258
1259         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1260         if (r)
1261                 return r;
1262
1263         /* switch VCLK and DCLK selection */
1264         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1265                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1266                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1267
1268         mdelay(100);
1269
1270         return 0;
1271 }
1272
1273 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1274 {
1275         int readrq;
1276         u16 v;
1277
1278         readrq = pcie_get_readrq(rdev->pdev);
1279         v = ffs(readrq) - 8;
1280         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1281          * to avoid hangs or perfomance issues
1282          */
1283         if ((v == 0) || (v == 6) || (v == 7))
1284                 pcie_set_readrq(rdev->pdev, 512);
1285 }
1286
1287 void dce4_program_fmt(struct drm_encoder *encoder)
1288 {
1289         struct drm_device *dev = encoder->dev;
1290         struct radeon_device *rdev = dev->dev_private;
1291         struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1292         struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1293         struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1294         int bpc = 0;
1295         u32 tmp = 0;
1296         enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1297
1298         if (connector) {
1299                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1300                 bpc = radeon_get_monitor_bpc(connector);
1301                 dither = radeon_connector->dither;
1302         }
1303
1304         /* LVDS/eDP FMT is set up by atom */
1305         if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1306                 return;
1307
1308         /* not needed for analog */
1309         if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1310             (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1311                 return;
1312
1313         if (bpc == 0)
1314                 return;
1315
1316         switch (bpc) {
1317         case 6:
1318                 if (dither == RADEON_FMT_DITHER_ENABLE)
1319                         /* XXX sort out optimal dither settings */
1320                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1321                                 FMT_SPATIAL_DITHER_EN);
1322                 else
1323                         tmp |= FMT_TRUNCATE_EN;
1324                 break;
1325         case 8:
1326                 if (dither == RADEON_FMT_DITHER_ENABLE)
1327                         /* XXX sort out optimal dither settings */
1328                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329                                 FMT_RGB_RANDOM_ENABLE |
1330                                 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1331                 else
1332                         tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1333                 break;
1334         case 10:
1335         default:
1336                 /* not needed */
1337                 break;
1338         }
1339
1340         WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1341 }
1342
1343 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1344 {
1345         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1346                 return true;
1347         else
1348                 return false;
1349 }
1350
1351 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1352 {
1353         u32 pos1, pos2;
1354
1355         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1357
1358         if (pos1 != pos2)
1359                 return true;
1360         else
1361                 return false;
1362 }
1363
1364 /**
1365  * dce4_wait_for_vblank - vblank wait asic callback.
1366  *
1367  * @rdev: radeon_device pointer
1368  * @crtc: crtc to wait for vblank on
1369  *
1370  * Wait for vblank on the requested crtc (evergreen+).
1371  */
1372 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1373 {
1374         unsigned i = 0;
1375
1376         if (crtc >= rdev->num_crtc)
1377                 return;
1378
1379         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1380                 return;
1381
1382         /* depending on when we hit vblank, we may be close to active; if so,
1383          * wait for another frame.
1384          */
1385         while (dce4_is_in_vblank(rdev, crtc)) {
1386                 if (i++ % 100 == 0) {
1387                         if (!dce4_is_counter_moving(rdev, crtc))
1388                                 break;
1389                 }
1390         }
1391
1392         while (!dce4_is_in_vblank(rdev, crtc)) {
1393                 if (i++ % 100 == 0) {
1394                         if (!dce4_is_counter_moving(rdev, crtc))
1395                                 break;
1396                 }
1397         }
1398 }
1399
1400 /**
1401  * evergreen_page_flip - pageflip callback.
1402  *
1403  * @rdev: radeon_device pointer
1404  * @crtc_id: crtc to cleanup pageflip on
1405  * @crtc_base: new address of the crtc (GPU MC address)
1406  *
1407  * Triggers the actual pageflip by updating the primary
1408  * surface base address (evergreen+).
1409  */
1410 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1411                          bool async)
1412 {
1413         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1414
1415         /* update the scanout addresses */
1416         WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1417                async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1418         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1419                upper_32_bits(crtc_base));
1420         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1421                (u32)crtc_base);
1422         /* post the write */
1423         RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1424 }
1425
1426 /**
1427  * evergreen_page_flip_pending - check if page flip is still pending
1428  *
1429  * @rdev: radeon_device pointer
1430  * @crtc_id: crtc to check
1431  *
1432  * Returns the current update pending status.
1433  */
1434 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1435 {
1436         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1437
1438         /* Return current update_pending status: */
1439         return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1440                 EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1441 }
1442
1443 /* get temperature in millidegrees */
1444 int evergreen_get_temp(struct radeon_device *rdev)
1445 {
1446         u32 temp, toffset;
1447         int actual_temp = 0;
1448
1449         if (rdev->family == CHIP_JUNIPER) {
1450                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1451                         TOFFSET_SHIFT;
1452                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1453                         TS0_ADC_DOUT_SHIFT;
1454
1455                 if (toffset & 0x100)
1456                         actual_temp = temp / 2 - (0x200 - toffset);
1457                 else
1458                         actual_temp = temp / 2 + toffset;
1459
1460                 actual_temp = actual_temp * 1000;
1461
1462         } else {
1463                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1464                         ASIC_T_SHIFT;
1465
1466                 if (temp & 0x400)
1467                         actual_temp = -256;
1468                 else if (temp & 0x200)
1469                         actual_temp = 255;
1470                 else if (temp & 0x100) {
1471                         actual_temp = temp & 0x1ff;
1472                         actual_temp |= ~0x1ff;
1473                 } else
1474                         actual_temp = temp & 0xff;
1475
1476                 actual_temp = (actual_temp * 1000) / 2;
1477         }
1478
1479         return actual_temp;
1480 }
1481
1482 int sumo_get_temp(struct radeon_device *rdev)
1483 {
1484         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1485         int actual_temp = temp - 49;
1486
1487         return actual_temp * 1000;
1488 }
1489
1490 /**
1491  * sumo_pm_init_profile - Initialize power profiles callback.
1492  *
1493  * @rdev: radeon_device pointer
1494  *
1495  * Initialize the power states used in profile mode
1496  * (sumo, trinity, SI).
1497  * Used for profile mode only.
1498  */
1499 void sumo_pm_init_profile(struct radeon_device *rdev)
1500 {
1501         int idx;
1502
1503         /* default */
1504         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1505         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1506         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1507         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1508
1509         /* low,mid sh/mh */
1510         if (rdev->flags & RADEON_IS_MOBILITY)
1511                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1512         else
1513                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1514
1515         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1516         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1517         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1518         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1519
1520         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1521         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1522         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1523         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1524
1525         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1526         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1527         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1528         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1529
1530         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1531         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1532         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1533         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1534
1535         /* high sh/mh */
1536         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1537         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1538         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1539         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1540         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1541                 rdev->pm.power_state[idx].num_clock_modes - 1;
1542
1543         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1544         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1545         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1546         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1547                 rdev->pm.power_state[idx].num_clock_modes - 1;
1548 }
1549
1550 /**
1551  * btc_pm_init_profile - Initialize power profiles callback.
1552  *
1553  * @rdev: radeon_device pointer
1554  *
1555  * Initialize the power states used in profile mode
1556  * (BTC, cayman).
1557  * Used for profile mode only.
1558  */
1559 void btc_pm_init_profile(struct radeon_device *rdev)
1560 {
1561         int idx;
1562
1563         /* default */
1564         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1565         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1566         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1567         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1568         /* starting with BTC, there is one state that is used for both
1569          * MH and SH.  Difference is that we always use the high clock index for
1570          * mclk.
1571          */
1572         if (rdev->flags & RADEON_IS_MOBILITY)
1573                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1574         else
1575                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1576         /* low sh */
1577         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1578         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1579         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1580         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1581         /* mid sh */
1582         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1583         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1584         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1585         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1586         /* high sh */
1587         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1588         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1589         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1590         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1591         /* low mh */
1592         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1593         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1594         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1595         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1596         /* mid mh */
1597         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1598         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1599         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1600         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1601         /* high mh */
1602         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1603         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1604         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1605         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1606 }
1607
1608 /**
1609  * evergreen_pm_misc - set additional pm hw parameters callback.
1610  *
1611  * @rdev: radeon_device pointer
1612  *
1613  * Set non-clock parameters associated with a power state
1614  * (voltage, etc.) (evergreen+).
1615  */
1616 void evergreen_pm_misc(struct radeon_device *rdev)
1617 {
1618         int req_ps_idx = rdev->pm.requested_power_state_index;
1619         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1620         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1621         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1622
1623         if (voltage->type == VOLTAGE_SW) {
1624                 /* 0xff0x are flags rather then an actual voltage */
1625                 if ((voltage->voltage & 0xff00) == 0xff00)
1626                         return;
1627                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1628                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1629                         rdev->pm.current_vddc = voltage->voltage;
1630                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1631                 }
1632
1633                 /* starting with BTC, there is one state that is used for both
1634                  * MH and SH.  Difference is that we always use the high clock index for
1635                  * mclk and vddci.
1636                  */
1637                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1638                     (rdev->family >= CHIP_BARTS) &&
1639                     rdev->pm.active_crtc_count &&
1640                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1641                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1642                         voltage = &rdev->pm.power_state[req_ps_idx].
1643                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1644
1645                 /* 0xff0x are flags rather then an actual voltage */
1646                 if ((voltage->vddci & 0xff00) == 0xff00)
1647                         return;
1648                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1649                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1650                         rdev->pm.current_vddci = voltage->vddci;
1651                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1652                 }
1653         }
1654 }
1655
1656 /**
1657  * evergreen_pm_prepare - pre-power state change callback.
1658  *
1659  * @rdev: radeon_device pointer
1660  *
1661  * Prepare for a power state change (evergreen+).
1662  */
1663 void evergreen_pm_prepare(struct radeon_device *rdev)
1664 {
1665         struct drm_device *ddev = rdev->ddev;
1666         struct drm_crtc *crtc;
1667         struct radeon_crtc *radeon_crtc;
1668         u32 tmp;
1669
1670         /* disable any active CRTCs */
1671         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1672                 radeon_crtc = to_radeon_crtc(crtc);
1673                 if (radeon_crtc->enabled) {
1674                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1675                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1676                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1677                 }
1678         }
1679 }
1680
1681 /**
1682  * evergreen_pm_finish - post-power state change callback.
1683  *
1684  * @rdev: radeon_device pointer
1685  *
1686  * Clean up after a power state change (evergreen+).
1687  */
1688 void evergreen_pm_finish(struct radeon_device *rdev)
1689 {
1690         struct drm_device *ddev = rdev->ddev;
1691         struct drm_crtc *crtc;
1692         struct radeon_crtc *radeon_crtc;
1693         u32 tmp;
1694
1695         /* enable any active CRTCs */
1696         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1697                 radeon_crtc = to_radeon_crtc(crtc);
1698                 if (radeon_crtc->enabled) {
1699                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1700                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1701                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1702                 }
1703         }
1704 }
1705
1706 /**
1707  * evergreen_hpd_sense - hpd sense callback.
1708  *
1709  * @rdev: radeon_device pointer
1710  * @hpd: hpd (hotplug detect) pin
1711  *
1712  * Checks if a digital monitor is connected (evergreen+).
1713  * Returns true if connected, false if not connected.
1714  */
1715 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1716 {
1717         bool connected = false;
1718
1719         switch (hpd) {
1720         case RADEON_HPD_1:
1721                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1722                         connected = true;
1723                 break;
1724         case RADEON_HPD_2:
1725                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1726                         connected = true;
1727                 break;
1728         case RADEON_HPD_3:
1729                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1730                         connected = true;
1731                 break;
1732         case RADEON_HPD_4:
1733                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1734                         connected = true;
1735                 break;
1736         case RADEON_HPD_5:
1737                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1738                         connected = true;
1739                 break;
1740         case RADEON_HPD_6:
1741                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1742                         connected = true;
1743                 break;
1744         default:
1745                 break;
1746         }
1747
1748         return connected;
1749 }
1750
1751 /**
1752  * evergreen_hpd_set_polarity - hpd set polarity callback.
1753  *
1754  * @rdev: radeon_device pointer
1755  * @hpd: hpd (hotplug detect) pin
1756  *
1757  * Set the polarity of the hpd pin (evergreen+).
1758  */
1759 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1760                                 enum radeon_hpd_id hpd)
1761 {
1762         u32 tmp;
1763         bool connected = evergreen_hpd_sense(rdev, hpd);
1764
1765         switch (hpd) {
1766         case RADEON_HPD_1:
1767                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1768                 if (connected)
1769                         tmp &= ~DC_HPDx_INT_POLARITY;
1770                 else
1771                         tmp |= DC_HPDx_INT_POLARITY;
1772                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1773                 break;
1774         case RADEON_HPD_2:
1775                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1776                 if (connected)
1777                         tmp &= ~DC_HPDx_INT_POLARITY;
1778                 else
1779                         tmp |= DC_HPDx_INT_POLARITY;
1780                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1781                 break;
1782         case RADEON_HPD_3:
1783                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1784                 if (connected)
1785                         tmp &= ~DC_HPDx_INT_POLARITY;
1786                 else
1787                         tmp |= DC_HPDx_INT_POLARITY;
1788                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1789                 break;
1790         case RADEON_HPD_4:
1791                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1792                 if (connected)
1793                         tmp &= ~DC_HPDx_INT_POLARITY;
1794                 else
1795                         tmp |= DC_HPDx_INT_POLARITY;
1796                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1797                 break;
1798         case RADEON_HPD_5:
1799                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1800                 if (connected)
1801                         tmp &= ~DC_HPDx_INT_POLARITY;
1802                 else
1803                         tmp |= DC_HPDx_INT_POLARITY;
1804                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1805                         break;
1806         case RADEON_HPD_6:
1807                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1808                 if (connected)
1809                         tmp &= ~DC_HPDx_INT_POLARITY;
1810                 else
1811                         tmp |= DC_HPDx_INT_POLARITY;
1812                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1813                 break;
1814         default:
1815                 break;
1816         }
1817 }
1818
1819 /**
1820  * evergreen_hpd_init - hpd setup callback.
1821  *
1822  * @rdev: radeon_device pointer
1823  *
1824  * Setup the hpd pins used by the card (evergreen+).
1825  * Enable the pin, set the polarity, and enable the hpd interrupts.
1826  */
1827 void evergreen_hpd_init(struct radeon_device *rdev)
1828 {
1829         struct drm_device *dev = rdev->ddev;
1830         struct drm_connector *connector;
1831         unsigned enabled = 0;
1832         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1833                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1834
1835         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1836                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1837
1838                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1839                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1840                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1841                          * aux dp channel on imac and help (but not completely fix)
1842                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1843                          * also avoid interrupt storms during dpms.
1844                          */
1845                         continue;
1846                 }
1847                 switch (radeon_connector->hpd.hpd) {
1848                 case RADEON_HPD_1:
1849                         WREG32(DC_HPD1_CONTROL, tmp);
1850                         break;
1851                 case RADEON_HPD_2:
1852                         WREG32(DC_HPD2_CONTROL, tmp);
1853                         break;
1854                 case RADEON_HPD_3:
1855                         WREG32(DC_HPD3_CONTROL, tmp);
1856                         break;
1857                 case RADEON_HPD_4:
1858                         WREG32(DC_HPD4_CONTROL, tmp);
1859                         break;
1860                 case RADEON_HPD_5:
1861                         WREG32(DC_HPD5_CONTROL, tmp);
1862                         break;
1863                 case RADEON_HPD_6:
1864                         WREG32(DC_HPD6_CONTROL, tmp);
1865                         break;
1866                 default:
1867                         break;
1868                 }
1869                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1870                 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1871                         enabled |= 1 << radeon_connector->hpd.hpd;
1872         }
1873         radeon_irq_kms_enable_hpd(rdev, enabled);
1874 }
1875
1876 /**
1877  * evergreen_hpd_fini - hpd tear down callback.
1878  *
1879  * @rdev: radeon_device pointer
1880  *
1881  * Tear down the hpd pins used by the card (evergreen+).
1882  * Disable the hpd interrupts.
1883  */
1884 void evergreen_hpd_fini(struct radeon_device *rdev)
1885 {
1886         struct drm_device *dev = rdev->ddev;
1887         struct drm_connector *connector;
1888         unsigned disabled = 0;
1889
1890         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1891                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1892                 switch (radeon_connector->hpd.hpd) {
1893                 case RADEON_HPD_1:
1894                         WREG32(DC_HPD1_CONTROL, 0);
1895                         break;
1896                 case RADEON_HPD_2:
1897                         WREG32(DC_HPD2_CONTROL, 0);
1898                         break;
1899                 case RADEON_HPD_3:
1900                         WREG32(DC_HPD3_CONTROL, 0);
1901                         break;
1902                 case RADEON_HPD_4:
1903                         WREG32(DC_HPD4_CONTROL, 0);
1904                         break;
1905                 case RADEON_HPD_5:
1906                         WREG32(DC_HPD5_CONTROL, 0);
1907                         break;
1908                 case RADEON_HPD_6:
1909                         WREG32(DC_HPD6_CONTROL, 0);
1910                         break;
1911                 default:
1912                         break;
1913                 }
1914                 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1915                         disabled |= 1 << radeon_connector->hpd.hpd;
1916         }
1917         radeon_irq_kms_disable_hpd(rdev, disabled);
1918 }
1919
1920 /* watermark setup */
1921
1922 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1923                                         struct radeon_crtc *radeon_crtc,
1924                                         struct drm_display_mode *mode,
1925                                         struct drm_display_mode *other_mode)
1926 {
1927         u32 tmp, buffer_alloc, i;
1928         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1929         /*
1930          * Line Buffer Setup
1931          * There are 3 line buffers, each one shared by 2 display controllers.
1932          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1933          * the display controllers.  The paritioning is done via one of four
1934          * preset allocations specified in bits 2:0:
1935          * first display controller
1936          *  0 - first half of lb (3840 * 2)
1937          *  1 - first 3/4 of lb (5760 * 2)
1938          *  2 - whole lb (7680 * 2), other crtc must be disabled
1939          *  3 - first 1/4 of lb (1920 * 2)
1940          * second display controller
1941          *  4 - second half of lb (3840 * 2)
1942          *  5 - second 3/4 of lb (5760 * 2)
1943          *  6 - whole lb (7680 * 2), other crtc must be disabled
1944          *  7 - last 1/4 of lb (1920 * 2)
1945          */
1946         /* this can get tricky if we have two large displays on a paired group
1947          * of crtcs.  Ideally for multiple large displays we'd assign them to
1948          * non-linked crtcs for maximum line buffer allocation.
1949          */
1950         if (radeon_crtc->base.enabled && mode) {
1951                 if (other_mode) {
1952                         tmp = 0; /* 1/2 */
1953                         buffer_alloc = 1;
1954                 } else {
1955                         tmp = 2; /* whole */
1956                         buffer_alloc = 2;
1957                 }
1958         } else {
1959                 tmp = 0;
1960                 buffer_alloc = 0;
1961         }
1962
1963         /* second controller of the pair uses second half of the lb */
1964         if (radeon_crtc->crtc_id % 2)
1965                 tmp += 4;
1966         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1967
1968         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1969                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1970                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1971                 for (i = 0; i < rdev->usec_timeout; i++) {
1972                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1973                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1974                                 break;
1975                         udelay(1);
1976                 }
1977         }
1978
1979         if (radeon_crtc->base.enabled && mode) {
1980                 switch (tmp) {
1981                 case 0:
1982                 case 4:
1983                 default:
1984                         if (ASIC_IS_DCE5(rdev))
1985                                 return 4096 * 2;
1986                         else
1987                                 return 3840 * 2;
1988                 case 1:
1989                 case 5:
1990                         if (ASIC_IS_DCE5(rdev))
1991                                 return 6144 * 2;
1992                         else
1993                                 return 5760 * 2;
1994                 case 2:
1995                 case 6:
1996                         if (ASIC_IS_DCE5(rdev))
1997                                 return 8192 * 2;
1998                         else
1999                                 return 7680 * 2;
2000                 case 3:
2001                 case 7:
2002                         if (ASIC_IS_DCE5(rdev))
2003                                 return 2048 * 2;
2004                         else
2005                                 return 1920 * 2;
2006                 }
2007         }
2008
2009         /* controller not enabled, so no lb used */
2010         return 0;
2011 }
2012
2013 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2014 {
2015         u32 tmp = RREG32(MC_SHARED_CHMAP);
2016
2017         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2018         case 0:
2019         default:
2020                 return 1;
2021         case 1:
2022                 return 2;
2023         case 2:
2024                 return 4;
2025         case 3:
2026                 return 8;
2027         }
2028 }
2029
2030 struct evergreen_wm_params {
2031         u32 dram_channels; /* number of dram channels */
2032         u32 yclk;          /* bandwidth per dram data pin in kHz */
2033         u32 sclk;          /* engine clock in kHz */
2034         u32 disp_clk;      /* display clock in kHz */
2035         u32 src_width;     /* viewport width */
2036         u32 active_time;   /* active display time in ns */
2037         u32 blank_time;    /* blank time in ns */
2038         bool interlaced;    /* mode is interlaced */
2039         fixed20_12 vsc;    /* vertical scale ratio */
2040         u32 num_heads;     /* number of active crtcs */
2041         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2042         u32 lb_size;       /* line buffer allocated to pipe */
2043         u32 vtaps;         /* vertical scaler taps */
2044 };
2045
2046 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2047 {
2048         /* Calculate DRAM Bandwidth and the part allocated to display. */
2049         fixed20_12 dram_efficiency; /* 0.7 */
2050         fixed20_12 yclk, dram_channels, bandwidth;
2051         fixed20_12 a;
2052
2053         a.full = dfixed_const(1000);
2054         yclk.full = dfixed_const(wm->yclk);
2055         yclk.full = dfixed_div(yclk, a);
2056         dram_channels.full = dfixed_const(wm->dram_channels * 4);
2057         a.full = dfixed_const(10);
2058         dram_efficiency.full = dfixed_const(7);
2059         dram_efficiency.full = dfixed_div(dram_efficiency, a);
2060         bandwidth.full = dfixed_mul(dram_channels, yclk);
2061         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2062
2063         return dfixed_trunc(bandwidth);
2064 }
2065
2066 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2067 {
2068         /* Calculate DRAM Bandwidth and the part allocated to display. */
2069         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2070         fixed20_12 yclk, dram_channels, bandwidth;
2071         fixed20_12 a;
2072
2073         a.full = dfixed_const(1000);
2074         yclk.full = dfixed_const(wm->yclk);
2075         yclk.full = dfixed_div(yclk, a);
2076         dram_channels.full = dfixed_const(wm->dram_channels * 4);
2077         a.full = dfixed_const(10);
2078         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2079         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2080         bandwidth.full = dfixed_mul(dram_channels, yclk);
2081         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2082
2083         return dfixed_trunc(bandwidth);
2084 }
2085
2086 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2087 {
2088         /* Calculate the display Data return Bandwidth */
2089         fixed20_12 return_efficiency; /* 0.8 */
2090         fixed20_12 sclk, bandwidth;
2091         fixed20_12 a;
2092
2093         a.full = dfixed_const(1000);
2094         sclk.full = dfixed_const(wm->sclk);
2095         sclk.full = dfixed_div(sclk, a);
2096         a.full = dfixed_const(10);
2097         return_efficiency.full = dfixed_const(8);
2098         return_efficiency.full = dfixed_div(return_efficiency, a);
2099         a.full = dfixed_const(32);
2100         bandwidth.full = dfixed_mul(a, sclk);
2101         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2102
2103         return dfixed_trunc(bandwidth);
2104 }
2105
2106 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2107 {
2108         /* Calculate the DMIF Request Bandwidth */
2109         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2110         fixed20_12 disp_clk, bandwidth;
2111         fixed20_12 a;
2112
2113         a.full = dfixed_const(1000);
2114         disp_clk.full = dfixed_const(wm->disp_clk);
2115         disp_clk.full = dfixed_div(disp_clk, a);
2116         a.full = dfixed_const(10);
2117         disp_clk_request_efficiency.full = dfixed_const(8);
2118         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2119         a.full = dfixed_const(32);
2120         bandwidth.full = dfixed_mul(a, disp_clk);
2121         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2122
2123         return dfixed_trunc(bandwidth);
2124 }
2125
2126 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2127 {
2128         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2129         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2130         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2131         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2132
2133         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2134 }
2135
2136 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2137 {
2138         /* Calculate the display mode Average Bandwidth
2139          * DisplayMode should contain the source and destination dimensions,
2140          * timing, etc.
2141          */
2142         fixed20_12 bpp;
2143         fixed20_12 line_time;
2144         fixed20_12 src_width;
2145         fixed20_12 bandwidth;
2146         fixed20_12 a;
2147
2148         a.full = dfixed_const(1000);
2149         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2150         line_time.full = dfixed_div(line_time, a);
2151         bpp.full = dfixed_const(wm->bytes_per_pixel);
2152         src_width.full = dfixed_const(wm->src_width);
2153         bandwidth.full = dfixed_mul(src_width, bpp);
2154         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2155         bandwidth.full = dfixed_div(bandwidth, line_time);
2156
2157         return dfixed_trunc(bandwidth);
2158 }
2159
2160 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2161 {
2162         /* First calcualte the latency in ns */
2163         u32 mc_latency = 2000; /* 2000 ns. */
2164         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2165         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2166         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2167         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2168         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2169                 (wm->num_heads * cursor_line_pair_return_time);
2170         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2171         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2172         fixed20_12 a, b, c;
2173
2174         if (wm->num_heads == 0)
2175                 return 0;
2176
2177         a.full = dfixed_const(2);
2178         b.full = dfixed_const(1);
2179         if ((wm->vsc.full > a.full) ||
2180             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2181             (wm->vtaps >= 5) ||
2182             ((wm->vsc.full >= a.full) && wm->interlaced))
2183                 max_src_lines_per_dst_line = 4;
2184         else
2185                 max_src_lines_per_dst_line = 2;
2186
2187         a.full = dfixed_const(available_bandwidth);
2188         b.full = dfixed_const(wm->num_heads);
2189         a.full = dfixed_div(a, b);
2190
2191         b.full = dfixed_const(1000);
2192         c.full = dfixed_const(wm->disp_clk);
2193         b.full = dfixed_div(c, b);
2194         c.full = dfixed_const(wm->bytes_per_pixel);
2195         b.full = dfixed_mul(b, c);
2196
2197         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2198
2199         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2200         b.full = dfixed_const(1000);
2201         c.full = dfixed_const(lb_fill_bw);
2202         b.full = dfixed_div(c, b);
2203         a.full = dfixed_div(a, b);
2204         line_fill_time = dfixed_trunc(a);
2205
2206         if (line_fill_time < wm->active_time)
2207                 return latency;
2208         else
2209                 return latency + (line_fill_time - wm->active_time);
2210
2211 }
2212
2213 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2214 {
2215         if (evergreen_average_bandwidth(wm) <=
2216             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2217                 return true;
2218         else
2219                 return false;
2220 };
2221
2222 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2223 {
2224         if (evergreen_average_bandwidth(wm) <=
2225             (evergreen_available_bandwidth(wm) / wm->num_heads))
2226                 return true;
2227         else
2228                 return false;
2229 };
2230
2231 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2232 {
2233         u32 lb_partitions = wm->lb_size / wm->src_width;
2234         u32 line_time = wm->active_time + wm->blank_time;
2235         u32 latency_tolerant_lines;
2236         u32 latency_hiding;
2237         fixed20_12 a;
2238
2239         a.full = dfixed_const(1);
2240         if (wm->vsc.full > a.full)
2241                 latency_tolerant_lines = 1;
2242         else {
2243                 if (lb_partitions <= (wm->vtaps + 1))
2244                         latency_tolerant_lines = 1;
2245                 else
2246                         latency_tolerant_lines = 2;
2247         }
2248
2249         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2250
2251         if (evergreen_latency_watermark(wm) <= latency_hiding)
2252                 return true;
2253         else
2254                 return false;
2255 }
2256
2257 static void evergreen_program_watermarks(struct radeon_device *rdev,
2258                                          struct radeon_crtc *radeon_crtc,
2259                                          u32 lb_size, u32 num_heads)
2260 {
2261         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2262         struct evergreen_wm_params wm_low, wm_high;
2263         u32 dram_channels;
2264         u32 pixel_period;
2265         u32 line_time = 0;
2266         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2267         u32 priority_a_mark = 0, priority_b_mark = 0;
2268         u32 priority_a_cnt = PRIORITY_OFF;
2269         u32 priority_b_cnt = PRIORITY_OFF;
2270         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2271         u32 tmp, arb_control3;
2272         fixed20_12 a, b, c;
2273
2274         if (radeon_crtc->base.enabled && num_heads && mode) {
2275                 pixel_period = 1000000 / (u32)mode->clock;
2276                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2277                 priority_a_cnt = 0;
2278                 priority_b_cnt = 0;
2279                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2280
2281                 /* watermark for high clocks */
2282                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2283                         wm_high.yclk =
2284                                 radeon_dpm_get_mclk(rdev, false) * 10;
2285                         wm_high.sclk =
2286                                 radeon_dpm_get_sclk(rdev, false) * 10;
2287                 } else {
2288                         wm_high.yclk = rdev->pm.current_mclk * 10;
2289                         wm_high.sclk = rdev->pm.current_sclk * 10;
2290                 }
2291
2292                 wm_high.disp_clk = mode->clock;
2293                 wm_high.src_width = mode->crtc_hdisplay;
2294                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2295                 wm_high.blank_time = line_time - wm_high.active_time;
2296                 wm_high.interlaced = false;
2297                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2298                         wm_high.interlaced = true;
2299                 wm_high.vsc = radeon_crtc->vsc;
2300                 wm_high.vtaps = 1;
2301                 if (radeon_crtc->rmx_type != RMX_OFF)
2302                         wm_high.vtaps = 2;
2303                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2304                 wm_high.lb_size = lb_size;
2305                 wm_high.dram_channels = dram_channels;
2306                 wm_high.num_heads = num_heads;
2307
2308                 /* watermark for low clocks */
2309                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2310                         wm_low.yclk =
2311                                 radeon_dpm_get_mclk(rdev, true) * 10;
2312                         wm_low.sclk =
2313                                 radeon_dpm_get_sclk(rdev, true) * 10;
2314                 } else {
2315                         wm_low.yclk = rdev->pm.current_mclk * 10;
2316                         wm_low.sclk = rdev->pm.current_sclk * 10;
2317                 }
2318
2319                 wm_low.disp_clk = mode->clock;
2320                 wm_low.src_width = mode->crtc_hdisplay;
2321                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2322                 wm_low.blank_time = line_time - wm_low.active_time;
2323                 wm_low.interlaced = false;
2324                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2325                         wm_low.interlaced = true;
2326                 wm_low.vsc = radeon_crtc->vsc;
2327                 wm_low.vtaps = 1;
2328                 if (radeon_crtc->rmx_type != RMX_OFF)
2329                         wm_low.vtaps = 2;
2330                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2331                 wm_low.lb_size = lb_size;
2332                 wm_low.dram_channels = dram_channels;
2333                 wm_low.num_heads = num_heads;
2334
2335                 /* set for high clocks */
2336                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2337                 /* set for low clocks */
2338                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2339
2340                 /* possibly force display priority to high */
2341                 /* should really do this at mode validation time... */
2342                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2343                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2344                     !evergreen_check_latency_hiding(&wm_high) ||
2345                     (rdev->disp_priority == 2)) {
2346                         DRM_DEBUG_KMS("force priority a to high\n");
2347                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2348                 }
2349                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2350                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2351                     !evergreen_check_latency_hiding(&wm_low) ||
2352                     (rdev->disp_priority == 2)) {
2353                         DRM_DEBUG_KMS("force priority b to high\n");
2354                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2355                 }
2356
2357                 a.full = dfixed_const(1000);
2358                 b.full = dfixed_const(mode->clock);
2359                 b.full = dfixed_div(b, a);
2360                 c.full = dfixed_const(latency_watermark_a);
2361                 c.full = dfixed_mul(c, b);
2362                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2363                 c.full = dfixed_div(c, a);
2364                 a.full = dfixed_const(16);
2365                 c.full = dfixed_div(c, a);
2366                 priority_a_mark = dfixed_trunc(c);
2367                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2368
2369                 a.full = dfixed_const(1000);
2370                 b.full = dfixed_const(mode->clock);
2371                 b.full = dfixed_div(b, a);
2372                 c.full = dfixed_const(latency_watermark_b);
2373                 c.full = dfixed_mul(c, b);
2374                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2375                 c.full = dfixed_div(c, a);
2376                 a.full = dfixed_const(16);
2377                 c.full = dfixed_div(c, a);
2378                 priority_b_mark = dfixed_trunc(c);
2379                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2380
2381                 /* Save number of lines the linebuffer leads before the scanout */
2382                 radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2383         }
2384
2385         /* select wm A */
2386         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2387         tmp = arb_control3;
2388         tmp &= ~LATENCY_WATERMARK_MASK(3);
2389         tmp |= LATENCY_WATERMARK_MASK(1);
2390         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2391         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2392                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2393                 LATENCY_HIGH_WATERMARK(line_time)));
2394         /* select wm B */
2395         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2396         tmp &= ~LATENCY_WATERMARK_MASK(3);
2397         tmp |= LATENCY_WATERMARK_MASK(2);
2398         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2399         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2400                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2401                 LATENCY_HIGH_WATERMARK(line_time)));
2402         /* restore original selection */
2403         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2404
2405         /* write the priority marks */
2406         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2407         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2408
2409         /* save values for DPM */
2410         radeon_crtc->line_time = line_time;
2411         radeon_crtc->wm_high = latency_watermark_a;
2412         radeon_crtc->wm_low = latency_watermark_b;
2413 }
2414
2415 /**
2416  * evergreen_bandwidth_update - update display watermarks callback.
2417  *
2418  * @rdev: radeon_device pointer
2419  *
2420  * Update the display watermarks based on the requested mode(s)
2421  * (evergreen+).
2422  */
2423 void evergreen_bandwidth_update(struct radeon_device *rdev)
2424 {
2425         struct drm_display_mode *mode0 = NULL;
2426         struct drm_display_mode *mode1 = NULL;
2427         u32 num_heads = 0, lb_size;
2428         int i;
2429
2430         if (!rdev->mode_info.mode_config_initialized)
2431                 return;
2432
2433         radeon_update_display_priority(rdev);
2434
2435         for (i = 0; i < rdev->num_crtc; i++) {
2436                 if (rdev->mode_info.crtcs[i]->base.enabled)
2437                         num_heads++;
2438         }
2439         for (i = 0; i < rdev->num_crtc; i += 2) {
2440                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2441                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2442                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2443                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2444                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2445                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2446         }
2447 }
2448
2449 /**
2450  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2451  *
2452  * @rdev: radeon_device pointer
2453  *
2454  * Wait for the MC (memory controller) to be idle.
2455  * (evergreen+).
2456  * Returns 0 if the MC is idle, -1 if not.
2457  */
2458 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2459 {
2460         unsigned i;
2461         u32 tmp;
2462
2463         for (i = 0; i < rdev->usec_timeout; i++) {
2464                 /* read MC_STATUS */
2465                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2466                 if (!tmp)
2467                         return 0;
2468                 udelay(1);
2469         }
2470         return -1;
2471 }
2472
2473 /*
2474  * GART
2475  */
2476 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2477 {
2478         unsigned i;
2479         u32 tmp;
2480
2481         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2482
2483         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2484         for (i = 0; i < rdev->usec_timeout; i++) {
2485                 /* read MC_STATUS */
2486                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2487                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2488                 if (tmp == 2) {
2489                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2490                         return;
2491                 }
2492                 if (tmp) {
2493                         return;
2494                 }
2495                 udelay(1);
2496         }
2497 }
2498
2499 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2500 {
2501         u32 tmp;
2502         int r;
2503
2504         if (rdev->gart.robj == NULL) {
2505                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2506                 return -EINVAL;
2507         }
2508         r = radeon_gart_table_vram_pin(rdev);
2509         if (r)
2510                 return r;
2511         /* Setup L2 cache */
2512         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2513                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2514                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2515         WREG32(VM_L2_CNTL2, 0);
2516         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2517         /* Setup TLB control */
2518         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2519                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2520                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2521                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2522         if (rdev->flags & RADEON_IS_IGP) {
2523                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2524                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2525                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2526         } else {
2527                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2528                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2529                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2530                 if ((rdev->family == CHIP_JUNIPER) ||
2531                     (rdev->family == CHIP_CYPRESS) ||
2532                     (rdev->family == CHIP_HEMLOCK) ||
2533                     (rdev->family == CHIP_BARTS))
2534                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2535         }
2536         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2537         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2538         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2539         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2540         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2541         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2542         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2543         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2544                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2545         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2546                         (u32)(rdev->dummy_page.addr >> 12));
2547         WREG32(VM_CONTEXT1_CNTL, 0);
2548
2549         evergreen_pcie_gart_tlb_flush(rdev);
2550         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2551                  (unsigned)(rdev->mc.gtt_size >> 20),
2552                  (unsigned long long)rdev->gart.table_addr);
2553         rdev->gart.ready = true;
2554         return 0;
2555 }
2556
2557 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2558 {
2559         u32 tmp;
2560
2561         /* Disable all tables */
2562         WREG32(VM_CONTEXT0_CNTL, 0);
2563         WREG32(VM_CONTEXT1_CNTL, 0);
2564
2565         /* Setup L2 cache */
2566         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2567                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2568         WREG32(VM_L2_CNTL2, 0);
2569         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2570         /* Setup TLB control */
2571         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2572         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2573         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2574         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2575         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2576         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2577         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2578         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2579         radeon_gart_table_vram_unpin(rdev);
2580 }
2581
2582 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2583 {
2584         evergreen_pcie_gart_disable(rdev);
2585         radeon_gart_table_vram_free(rdev);
2586         radeon_gart_fini(rdev);
2587 }
2588
2589
2590 static void evergreen_agp_enable(struct radeon_device *rdev)
2591 {
2592         u32 tmp;
2593
2594         /* Setup L2 cache */
2595         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2596                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2597                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2598         WREG32(VM_L2_CNTL2, 0);
2599         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2600         /* Setup TLB control */
2601         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2602                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2603                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2604                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2605         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2606         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2607         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2608         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2609         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2610         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2611         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2612         WREG32(VM_CONTEXT0_CNTL, 0);
2613         WREG32(VM_CONTEXT1_CNTL, 0);
2614 }
2615
2616 static const unsigned ni_dig_offsets[] =
2617 {
2618         NI_DIG0_REGISTER_OFFSET,
2619         NI_DIG1_REGISTER_OFFSET,
2620         NI_DIG2_REGISTER_OFFSET,
2621         NI_DIG3_REGISTER_OFFSET,
2622         NI_DIG4_REGISTER_OFFSET,
2623         NI_DIG5_REGISTER_OFFSET
2624 };
2625
2626 static const unsigned ni_tx_offsets[] =
2627 {
2628         NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2629         NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2630         NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2631         NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2632         NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2633         NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2634 };
2635
2636 static const unsigned evergreen_dp_offsets[] =
2637 {
2638         EVERGREEN_DP0_REGISTER_OFFSET,
2639         EVERGREEN_DP1_REGISTER_OFFSET,
2640         EVERGREEN_DP2_REGISTER_OFFSET,
2641         EVERGREEN_DP3_REGISTER_OFFSET,
2642         EVERGREEN_DP4_REGISTER_OFFSET,
2643         EVERGREEN_DP5_REGISTER_OFFSET
2644 };
2645
2646
2647 /*
2648  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2649  * We go from crtc to connector and it is not relible  since it
2650  * should be an opposite direction .If crtc is enable then
2651  * find the dig_fe which selects this crtc and insure that it enable.
2652  * if such dig_fe is found then find dig_be which selects found dig_be and
2653  * insure that it enable and in DP_SST mode.
2654  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2655  * from dp symbols clocks .
2656  */
2657 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2658                                                unsigned crtc_id, unsigned *ret_dig_fe)
2659 {
2660         unsigned i;
2661         unsigned dig_fe;
2662         unsigned dig_be;
2663         unsigned dig_en_be;
2664         unsigned uniphy_pll;
2665         unsigned digs_fe_selected;
2666         unsigned dig_be_mode;
2667         unsigned dig_fe_mask;
2668         bool is_enabled = false;
2669         bool found_crtc = false;
2670
2671         /* loop through all running dig_fe to find selected crtc */
2672         for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2673                 dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2674                 if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2675                     crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2676                         /* found running pipe */
2677                         found_crtc = true;
2678                         dig_fe_mask = 1 << i;
2679                         dig_fe = i;
2680                         break;
2681                 }
2682         }
2683
2684         if (found_crtc) {
2685                 /* loop through all running dig_be to find selected dig_fe */
2686                 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2687                         dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2688                         /* if dig_fe_selected by dig_be? */
2689                         digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2690                         dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2691                         if (dig_fe_mask &  digs_fe_selected &&
2692                             /* if dig_be in sst mode? */
2693                             dig_be_mode == NI_DIG_BE_DPSST) {
2694                                 dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2695                                                    ni_dig_offsets[i]);
2696                                 uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2697                                                     ni_tx_offsets[i]);
2698                                 /* dig_be enable and tx is running */
2699                                 if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2700                                     dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2701                                     uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2702                                         is_enabled = true;
2703                                         *ret_dig_fe = dig_fe;
2704                                         break;
2705                                 }
2706                         }
2707                 }
2708         }
2709
2710         return is_enabled;
2711 }
2712
2713 /*
2714  * Blank dig when in dp sst mode
2715  * Dig ignores crtc timing
2716  */
2717 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2718                                       unsigned dig_fe)
2719 {
2720         unsigned stream_ctrl;
2721         unsigned fifo_ctrl;
2722         unsigned counter = 0;
2723
2724         if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2725                 DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2726                 return;
2727         }
2728
2729         stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2730                              evergreen_dp_offsets[dig_fe]);
2731         if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2732                 DRM_ERROR("dig %d , should be enable\n", dig_fe);
2733                 return;
2734         }
2735
2736         stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2737         WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2738                evergreen_dp_offsets[dig_fe], stream_ctrl);
2739
2740         stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2741                              evergreen_dp_offsets[dig_fe]);
2742         while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2743                 msleep(1);
2744                 counter++;
2745                 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2746                                      evergreen_dp_offsets[dig_fe]);
2747         }
2748         if (counter >= 32 )
2749                 DRM_ERROR("counter exceeds %d\n", counter);
2750
2751         fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2752         fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2753         WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2754
2755 }
2756
2757 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2758 {
2759         u32 crtc_enabled, tmp, frame_count, blackout;
2760         int i, j;
2761         unsigned dig_fe;
2762
2763         if (!ASIC_IS_NODCE(rdev)) {
2764                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2765                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2766
2767                 /* disable VGA render */
2768                 WREG32(VGA_RENDER_CONTROL, 0);
2769         }
2770         /* blank the display controllers */
2771         for (i = 0; i < rdev->num_crtc; i++) {
2772                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2773                 if (crtc_enabled) {
2774                         save->crtc_enabled[i] = true;
2775                         if (ASIC_IS_DCE6(rdev)) {
2776                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2777                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2778                                         radeon_wait_for_vblank(rdev, i);
2779                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2780                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2781                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2782                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2783                                 }
2784                         } else {
2785                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2786                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2787                                         radeon_wait_for_vblank(rdev, i);
2788                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2789                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2790                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2791                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2792                                 }
2793                         }
2794                         /* wait for the next frame */
2795                         frame_count = radeon_get_vblank_counter(rdev, i);
2796                         for (j = 0; j < rdev->usec_timeout; j++) {
2797                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2798                                         break;
2799                                 udelay(1);
2800                         }
2801                         /*we should disable dig if it drives dp sst*/
2802                         /*but we are in radeon_device_init and the topology is unknown*/
2803                         /*and it is available after radeon_modeset_init*/
2804                         /*the following method radeon_atom_encoder_dpms_dig*/
2805                         /*does the job if we initialize it properly*/
2806                         /*for now we do it this manually*/
2807                         /**/
2808                         if (ASIC_IS_DCE5(rdev) &&
2809                             evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2810                                 evergreen_blank_dp_output(rdev, dig_fe);
2811                         /*we could remove 6 lines below*/
2812                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2813                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2814                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2815                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2816                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2817                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2818                         save->crtc_enabled[i] = false;
2819                         /* ***** */
2820                 } else {
2821                         save->crtc_enabled[i] = false;
2822                 }
2823         }
2824
2825         radeon_mc_wait_for_idle(rdev);
2826
2827         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2828         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2829                 /* Block CPU access */
2830                 WREG32(BIF_FB_EN, 0);
2831                 /* blackout the MC */
2832                 blackout &= ~BLACKOUT_MODE_MASK;
2833                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2834         }
2835         /* wait for the MC to settle */
2836         udelay(100);
2837
2838         /* lock double buffered regs */
2839         for (i = 0; i < rdev->num_crtc; i++) {
2840                 if (save->crtc_enabled[i]) {
2841                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2842                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2843                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2844                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2845                         }
2846                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2847                         if (!(tmp & 1)) {
2848                                 tmp |= 1;
2849                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2850                         }
2851                 }
2852         }
2853 }
2854
2855 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2856 {
2857         u32 tmp, frame_count;
2858         int i, j;
2859
2860         /* update crtc base addresses */
2861         for (i = 0; i < rdev->num_crtc; i++) {
2862                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2863                        upper_32_bits(rdev->mc.vram_start));
2864                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2865                        upper_32_bits(rdev->mc.vram_start));
2866                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2867                        (u32)rdev->mc.vram_start);
2868                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2869                        (u32)rdev->mc.vram_start);
2870         }
2871
2872         if (!ASIC_IS_NODCE(rdev)) {
2873                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2874                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2875         }
2876
2877         /* unlock regs and wait for update */
2878         for (i = 0; i < rdev->num_crtc; i++) {
2879                 if (save->crtc_enabled[i]) {
2880                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2881                         if ((tmp & 0x7) != 3) {
2882                                 tmp &= ~0x7;
2883                                 tmp |= 0x3;
2884                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2885                         }
2886                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2887                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2888                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2889                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2890                         }
2891                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2892                         if (tmp & 1) {
2893                                 tmp &= ~1;
2894                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2895                         }
2896                         for (j = 0; j < rdev->usec_timeout; j++) {
2897                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2898                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2899                                         break;
2900                                 udelay(1);
2901                         }
2902                 }
2903         }
2904
2905         /* unblackout the MC */
2906         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2907         tmp &= ~BLACKOUT_MODE_MASK;
2908         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2909         /* allow CPU access */
2910         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2911
2912         for (i = 0; i < rdev->num_crtc; i++) {
2913                 if (save->crtc_enabled[i]) {
2914                         if (ASIC_IS_DCE6(rdev)) {
2915                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2916                                 tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2917                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2918                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2919                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2920                         } else {
2921                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2922                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2923                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2924                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2925                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2926                         }
2927                         /* wait for the next frame */
2928                         frame_count = radeon_get_vblank_counter(rdev, i);
2929                         for (j = 0; j < rdev->usec_timeout; j++) {
2930                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2931                                         break;
2932                                 udelay(1);
2933                         }
2934                 }
2935         }
2936         if (!ASIC_IS_NODCE(rdev)) {
2937                 /* Unlock vga access */
2938                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2939                 mdelay(1);
2940                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2941         }
2942 }
2943
2944 void evergreen_mc_program(struct radeon_device *rdev)
2945 {
2946         struct evergreen_mc_save save;
2947         u32 tmp;
2948         int i, j;
2949
2950         /* Initialize HDP */
2951         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2952                 WREG32((0x2c14 + j), 0x00000000);
2953                 WREG32((0x2c18 + j), 0x00000000);
2954                 WREG32((0x2c1c + j), 0x00000000);
2955                 WREG32((0x2c20 + j), 0x00000000);
2956                 WREG32((0x2c24 + j), 0x00000000);
2957         }
2958         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2959
2960         evergreen_mc_stop(rdev, &save);
2961         if (evergreen_mc_wait_for_idle(rdev)) {
2962                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2963         }
2964         /* Lockout access through VGA aperture*/
2965         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2966         /* Update configuration */
2967         if (rdev->flags & RADEON_IS_AGP) {
2968                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2969                         /* VRAM before AGP */
2970                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2971                                 rdev->mc.vram_start >> 12);
2972                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2973                                 rdev->mc.gtt_end >> 12);
2974                 } else {
2975                         /* VRAM after AGP */
2976                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2977                                 rdev->mc.gtt_start >> 12);
2978                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2979                                 rdev->mc.vram_end >> 12);
2980                 }
2981         } else {
2982                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2983                         rdev->mc.vram_start >> 12);
2984                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2985                         rdev->mc.vram_end >> 12);
2986         }
2987         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2988         /* llano/ontario only */
2989         if ((rdev->family == CHIP_PALM) ||
2990             (rdev->family == CHIP_SUMO) ||
2991             (rdev->family == CHIP_SUMO2)) {
2992                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2993                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2994                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2995                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2996         }
2997         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2998         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2999         WREG32(MC_VM_FB_LOCATION, tmp);
3000         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3001         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3002         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3003         if (rdev->flags & RADEON_IS_AGP) {
3004                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
3005                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
3006                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
3007         } else {
3008                 WREG32(MC_VM_AGP_BASE, 0);
3009                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3010                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3011         }
3012         if (evergreen_mc_wait_for_idle(rdev)) {
3013                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3014         }
3015         evergreen_mc_resume(rdev, &save);
3016         /* we need to own VRAM, so turn off the VGA renderer here
3017          * to stop it overwriting our objects */
3018         rv515_vga_render_disable(rdev);
3019 }
3020
3021 /*
3022  * CP.
3023  */
3024 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3025 {
3026         struct radeon_ring *ring = &rdev->ring[ib->ring];
3027         u32 next_rptr;
3028
3029         /* set to DX10/11 mode */
3030         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3031         radeon_ring_write(ring, 1);
3032
3033         if (ring->rptr_save_reg) {
3034                 next_rptr = ring->wptr + 3 + 4;
3035                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3036                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
3037                                           PACKET3_SET_CONFIG_REG_START) >> 2));
3038                 radeon_ring_write(ring, next_rptr);
3039         } else if (rdev->wb.enabled) {
3040                 next_rptr = ring->wptr + 5 + 4;
3041                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3042                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3043                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3044                 radeon_ring_write(ring, next_rptr);
3045                 radeon_ring_write(ring, 0);
3046         }
3047
3048         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3049         radeon_ring_write(ring,
3050 #ifdef __BIG_ENDIAN
3051                           (2 << 0) |
3052 #endif
3053                           (ib->gpu_addr & 0xFFFFFFFC));
3054         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3055         radeon_ring_write(ring, ib->length_dw);
3056 }
3057
3058
3059 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3060 {
3061         const __be32 *fw_data;
3062         int i;
3063
3064         if (!rdev->me_fw || !rdev->pfp_fw)
3065                 return -EINVAL;
3066
3067         r700_cp_stop(rdev);
3068         WREG32(CP_RB_CNTL,
3069 #ifdef __BIG_ENDIAN
3070                BUF_SWAP_32BIT |
3071 #endif
3072                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3073
3074         fw_data = (const __be32 *)rdev->pfp_fw->data;
3075         WREG32(CP_PFP_UCODE_ADDR, 0);
3076         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3077                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3078         WREG32(CP_PFP_UCODE_ADDR, 0);
3079
3080         fw_data = (const __be32 *)rdev->me_fw->data;
3081         WREG32(CP_ME_RAM_WADDR, 0);
3082         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3083                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3084
3085         WREG32(CP_PFP_UCODE_ADDR, 0);
3086         WREG32(CP_ME_RAM_WADDR, 0);
3087         WREG32(CP_ME_RAM_RADDR, 0);
3088         return 0;
3089 }
3090
3091 static int evergreen_cp_start(struct radeon_device *rdev)
3092 {
3093         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3094         int r, i;
3095         uint32_t cp_me;
3096
3097         r = radeon_ring_lock(rdev, ring, 7);
3098         if (r) {
3099                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3100                 return r;
3101         }
3102         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3103         radeon_ring_write(ring, 0x1);
3104         radeon_ring_write(ring, 0x0);
3105         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3106         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3107         radeon_ring_write(ring, 0);
3108         radeon_ring_write(ring, 0);
3109         radeon_ring_unlock_commit(rdev, ring, false);
3110
3111         cp_me = 0xff;
3112         WREG32(CP_ME_CNTL, cp_me);
3113
3114         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3115         if (r) {
3116                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3117                 return r;
3118         }
3119
3120         /* setup clear context state */
3121         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3122         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3123
3124         for (i = 0; i < evergreen_default_size; i++)
3125                 radeon_ring_write(ring, evergreen_default_state[i]);
3126
3127         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3128         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3129
3130         /* set clear context state */
3131         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3132         radeon_ring_write(ring, 0);
3133
3134         /* SQ_VTX_BASE_VTX_LOC */
3135         radeon_ring_write(ring, 0xc0026f00);
3136         radeon_ring_write(ring, 0x00000000);
3137         radeon_ring_write(ring, 0x00000000);
3138         radeon_ring_write(ring, 0x00000000);
3139
3140         /* Clear consts */
3141         radeon_ring_write(ring, 0xc0036f00);
3142         radeon_ring_write(ring, 0x00000bc4);
3143         radeon_ring_write(ring, 0xffffffff);
3144         radeon_ring_write(ring, 0xffffffff);
3145         radeon_ring_write(ring, 0xffffffff);
3146
3147         radeon_ring_write(ring, 0xc0026900);
3148         radeon_ring_write(ring, 0x00000316);
3149         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3150         radeon_ring_write(ring, 0x00000010); /*  */
3151
3152         radeon_ring_unlock_commit(rdev, ring, false);
3153
3154         return 0;
3155 }
3156
3157 static int evergreen_cp_resume(struct radeon_device *rdev)
3158 {
3159         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3160         u32 tmp;
3161         u32 rb_bufsz;
3162         int r;
3163
3164         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3165         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3166                                  SOFT_RESET_PA |
3167                                  SOFT_RESET_SH |
3168                                  SOFT_RESET_VGT |
3169                                  SOFT_RESET_SPI |
3170                                  SOFT_RESET_SX));
3171         RREG32(GRBM_SOFT_RESET);
3172         mdelay(15);
3173         WREG32(GRBM_SOFT_RESET, 0);
3174         RREG32(GRBM_SOFT_RESET);
3175
3176         /* Set ring buffer size */
3177         rb_bufsz = order_base_2(ring->ring_size / 8);
3178         tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3179 #ifdef __BIG_ENDIAN
3180         tmp |= BUF_SWAP_32BIT;
3181 #endif
3182         WREG32(CP_RB_CNTL, tmp);
3183         WREG32(CP_SEM_WAIT_TIMER, 0x0);
3184         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3185
3186         /* Set the write pointer delay */
3187         WREG32(CP_RB_WPTR_DELAY, 0);
3188
3189         /* Initialize the ring buffer's read and write pointers */
3190         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3191         WREG32(CP_RB_RPTR_WR, 0);
3192         ring->wptr = 0;
3193         WREG32(CP_RB_WPTR, ring->wptr);
3194
3195         /* set the wb address whether it's enabled or not */
3196         WREG32(CP_RB_RPTR_ADDR,
3197                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3198         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3199         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3200
3201         if (rdev->wb.enabled)
3202                 WREG32(SCRATCH_UMSK, 0xff);
3203         else {
3204                 tmp |= RB_NO_UPDATE;
3205                 WREG32(SCRATCH_UMSK, 0);
3206         }
3207
3208         mdelay(1);
3209         WREG32(CP_RB_CNTL, tmp);
3210
3211         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3212         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3213
3214         evergreen_cp_start(rdev);
3215         ring->ready = true;
3216         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3217         if (r) {
3218                 ring->ready = false;
3219                 return r;
3220         }
3221         return 0;
3222 }
3223
3224 /*
3225  * Core functions
3226  */
3227 static void evergreen_gpu_init(struct radeon_device *rdev)
3228 {
3229         u32 gb_addr_config;
3230         u32 mc_shared_chmap, mc_arb_ramcfg;
3231         u32 sx_debug_1;
3232         u32 smx_dc_ctl0;
3233         u32 sq_config;
3234         u32 sq_lds_resource_mgmt;
3235         u32 sq_gpr_resource_mgmt_1;
3236         u32 sq_gpr_resource_mgmt_2;
3237         u32 sq_gpr_resource_mgmt_3;
3238         u32 sq_thread_resource_mgmt;
3239         u32 sq_thread_resource_mgmt_2;
3240         u32 sq_stack_resource_mgmt_1;
3241         u32 sq_stack_resource_mgmt_2;
3242         u32 sq_stack_resource_mgmt_3;
3243         u32 vgt_cache_invalidation;
3244         u32 hdp_host_path_cntl, tmp;
3245         u32 disabled_rb_mask;
3246         int i, j, ps_thread_count;
3247
3248         switch (rdev->family) {
3249         case CHIP_CYPRESS:
3250         case CHIP_HEMLOCK:
3251                 rdev->config.evergreen.num_ses = 2;
3252                 rdev->config.evergreen.max_pipes = 4;
3253                 rdev->config.evergreen.max_tile_pipes = 8;
3254                 rdev->config.evergreen.max_simds = 10;
3255                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3256                 rdev->config.evergreen.max_gprs = 256;
3257                 rdev->config.evergreen.max_threads = 248;
3258                 rdev->config.evergreen.max_gs_threads = 32;
3259                 rdev->config.evergreen.max_stack_entries = 512;
3260                 rdev->config.evergreen.sx_num_of_sets = 4;
3261                 rdev->config.evergreen.sx_max_export_size = 256;
3262                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3263                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3264                 rdev->config.evergreen.max_hw_contexts = 8;
3265                 rdev->config.evergreen.sq_num_cf_insts = 2;
3266
3267                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3268                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3269                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3270                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3271                 break;
3272         case CHIP_JUNIPER:
3273                 rdev->config.evergreen.num_ses = 1;
3274                 rdev->config.evergreen.max_pipes = 4;
3275                 rdev->config.evergreen.max_tile_pipes = 4;
3276                 rdev->config.evergreen.max_simds = 10;
3277                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3278                 rdev->config.evergreen.max_gprs = 256;
3279                 rdev->config.evergreen.max_threads = 248;
3280                 rdev->config.evergreen.max_gs_threads = 32;
3281                 rdev->config.evergreen.max_stack_entries = 512;
3282                 rdev->config.evergreen.sx_num_of_sets = 4;
3283                 rdev->config.evergreen.sx_max_export_size = 256;
3284                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3285                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3286                 rdev->config.evergreen.max_hw_contexts = 8;
3287                 rdev->config.evergreen.sq_num_cf_insts = 2;
3288
3289                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3290                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3291                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3292                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3293                 break;
3294         case CHIP_REDWOOD:
3295                 rdev->config.evergreen.num_ses = 1;
3296                 rdev->config.evergreen.max_pipes = 4;
3297                 rdev->config.evergreen.max_tile_pipes = 4;
3298                 rdev->config.evergreen.max_simds = 5;
3299                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3300                 rdev->config.evergreen.max_gprs = 256;
3301                 rdev->config.evergreen.max_threads = 248;
3302                 rdev->config.evergreen.max_gs_threads = 32;
3303                 rdev->config.evergreen.max_stack_entries = 256;
3304                 rdev->config.evergreen.sx_num_of_sets = 4;
3305                 rdev->config.evergreen.sx_max_export_size = 256;
3306                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3307                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3308                 rdev->config.evergreen.max_hw_contexts = 8;
3309                 rdev->config.evergreen.sq_num_cf_insts = 2;
3310
3311                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3312                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3313                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3314                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3315                 break;
3316         case CHIP_CEDAR:
3317         default:
3318                 rdev->config.evergreen.num_ses = 1;
3319                 rdev->config.evergreen.max_pipes = 2;
3320                 rdev->config.evergreen.max_tile_pipes = 2;
3321                 rdev->config.evergreen.max_simds = 2;
3322                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3323                 rdev->config.evergreen.max_gprs = 256;
3324                 rdev->config.evergreen.max_threads = 192;
3325                 rdev->config.evergreen.max_gs_threads = 16;
3326                 rdev->config.evergreen.max_stack_entries = 256;
3327                 rdev->config.evergreen.sx_num_of_sets = 4;
3328                 rdev->config.evergreen.sx_max_export_size = 128;
3329                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3330                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3331                 rdev->config.evergreen.max_hw_contexts = 4;
3332                 rdev->config.evergreen.sq_num_cf_insts = 1;
3333
3334                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3335                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3336                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3337                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3338                 break;
3339         case CHIP_PALM:
3340                 rdev->config.evergreen.num_ses = 1;
3341                 rdev->config.evergreen.max_pipes = 2;
3342                 rdev->config.evergreen.max_tile_pipes = 2;
3343                 rdev->config.evergreen.max_simds = 2;
3344                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3345                 rdev->config.evergreen.max_gprs = 256;
3346                 rdev->config.evergreen.max_threads = 192;
3347                 rdev->config.evergreen.max_gs_threads = 16;
3348                 rdev->config.evergreen.max_stack_entries = 256;
3349                 rdev->config.evergreen.sx_num_of_sets = 4;
3350                 rdev->config.evergreen.sx_max_export_size = 128;
3351                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3352                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3353                 rdev->config.evergreen.max_hw_contexts = 4;
3354                 rdev->config.evergreen.sq_num_cf_insts = 1;
3355
3356                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3357                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3358                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3359                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3360                 break;
3361         case CHIP_SUMO:
3362                 rdev->config.evergreen.num_ses = 1;
3363                 rdev->config.evergreen.max_pipes = 4;
3364                 rdev->config.evergreen.max_tile_pipes = 4;
3365                 if (rdev->pdev->device == 0x9648)
3366                         rdev->config.evergreen.max_simds = 3;
3367                 else if ((rdev->pdev->device == 0x9647) ||
3368                          (rdev->pdev->device == 0x964a))
3369                         rdev->config.evergreen.max_simds = 4;
3370                 else
3371                         rdev->config.evergreen.max_simds = 5;
3372                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3373                 rdev->config.evergreen.max_gprs = 256;
3374                 rdev->config.evergreen.max_threads = 248;
3375                 rdev->config.evergreen.max_gs_threads = 32;
3376                 rdev->config.evergreen.max_stack_entries = 256;
3377                 rdev->config.evergreen.sx_num_of_sets = 4;
3378                 rdev->config.evergreen.sx_max_export_size = 256;
3379                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3380                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3381                 rdev->config.evergreen.max_hw_contexts = 8;
3382                 rdev->config.evergreen.sq_num_cf_insts = 2;
3383
3384                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3385                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3386                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3387                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3388                 break;
3389         case CHIP_SUMO2:
3390                 rdev->config.evergreen.num_ses = 1;
3391                 rdev->config.evergreen.max_pipes = 4;
3392                 rdev->config.evergreen.max_tile_pipes = 4;
3393                 rdev->config.evergreen.max_simds = 2;
3394                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3395                 rdev->config.evergreen.max_gprs = 256;
3396                 rdev->config.evergreen.max_threads = 248;
3397                 rdev->config.evergreen.max_gs_threads = 32;
3398                 rdev->config.evergreen.max_stack_entries = 512;
3399                 rdev->config.evergreen.sx_num_of_sets = 4;
3400                 rdev->config.evergreen.sx_max_export_size = 256;
3401                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3402                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3403                 rdev->config.evergreen.max_hw_contexts = 4;
3404                 rdev->config.evergreen.sq_num_cf_insts = 2;
3405
3406                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3407                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3408                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3409                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3410                 break;
3411         case CHIP_BARTS:
3412                 rdev->config.evergreen.num_ses = 2;
3413                 rdev->config.evergreen.max_pipes = 4;
3414                 rdev->config.evergreen.max_tile_pipes = 8;
3415                 rdev->config.evergreen.max_simds = 7;
3416                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3417                 rdev->config.evergreen.max_gprs = 256;
3418                 rdev->config.evergreen.max_threads = 248;
3419                 rdev->config.evergreen.max_gs_threads = 32;
3420                 rdev->config.evergreen.max_stack_entries = 512;
3421                 rdev->config.evergreen.sx_num_of_sets = 4;
3422                 rdev->config.evergreen.sx_max_export_size = 256;
3423                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3424                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3425                 rdev->config.evergreen.max_hw_contexts = 8;
3426                 rdev->config.evergreen.sq_num_cf_insts = 2;
3427
3428                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3429                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3430                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3431                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3432                 break;
3433         case CHIP_TURKS:
3434                 rdev->config.evergreen.num_ses = 1;
3435                 rdev->config.evergreen.max_pipes = 4;
3436                 rdev->config.evergreen.max_tile_pipes = 4;
3437                 rdev->config.evergreen.max_simds = 6;
3438                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3439                 rdev->config.evergreen.max_gprs = 256;
3440                 rdev->config.evergreen.max_threads = 248;
3441                 rdev->config.evergreen.max_gs_threads = 32;
3442                 rdev->config.evergreen.max_stack_entries = 256;
3443                 rdev->config.evergreen.sx_num_of_sets = 4;
3444                 rdev->config.evergreen.sx_max_export_size = 256;
3445                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3446                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3447                 rdev->config.evergreen.max_hw_contexts = 8;
3448                 rdev->config.evergreen.sq_num_cf_insts = 2;
3449
3450                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3451                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3452                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3453                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3454                 break;
3455         case CHIP_CAICOS:
3456                 rdev->config.evergreen.num_ses = 1;
3457                 rdev->config.evergreen.max_pipes = 2;
3458                 rdev->config.evergreen.max_tile_pipes = 2;
3459                 rdev->config.evergreen.max_simds = 2;
3460                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3461                 rdev->config.evergreen.max_gprs = 256;
3462                 rdev->config.evergreen.max_threads = 192;
3463                 rdev->config.evergreen.max_gs_threads = 16;
3464                 rdev->config.evergreen.max_stack_entries = 256;
3465                 rdev->config.evergreen.sx_num_of_sets = 4;
3466                 rdev->config.evergreen.sx_max_export_size = 128;
3467                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3468                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3469                 rdev->config.evergreen.max_hw_contexts = 4;
3470                 rdev->config.evergreen.sq_num_cf_insts = 1;
3471
3472                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3473                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3474                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3475                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3476                 break;
3477         }
3478
3479         /* Initialize HDP */
3480         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3481                 WREG32((0x2c14 + j), 0x00000000);
3482                 WREG32((0x2c18 + j), 0x00000000);
3483                 WREG32((0x2c1c + j), 0x00000000);
3484                 WREG32((0x2c20 + j), 0x00000000);
3485                 WREG32((0x2c24 + j), 0x00000000);
3486         }
3487
3488         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3489         WREG32(SRBM_INT_CNTL, 0x1);
3490         WREG32(SRBM_INT_ACK, 0x1);
3491
3492         evergreen_fix_pci_max_read_req_size(rdev);
3493
3494         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3495         if ((rdev->family == CHIP_PALM) ||
3496             (rdev->family == CHIP_SUMO) ||
3497             (rdev->family == CHIP_SUMO2))
3498                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3499         else
3500                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3501
3502         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3503          * not have bank info, so create a custom tiling dword.
3504          * bits 3:0   num_pipes
3505          * bits 7:4   num_banks
3506          * bits 11:8  group_size
3507          * bits 15:12 row_size
3508          */
3509         rdev->config.evergreen.tile_config = 0;
3510         switch (rdev->config.evergreen.max_tile_pipes) {
3511         case 1:
3512         default:
3513                 rdev->config.evergreen.tile_config |= (0 << 0);
3514                 break;
3515         case 2:
3516                 rdev->config.evergreen.tile_config |= (1 << 0);
3517                 break;
3518         case 4:
3519                 rdev->config.evergreen.tile_config |= (2 << 0);
3520                 break;
3521         case 8:
3522                 rdev->config.evergreen.tile_config |= (3 << 0);
3523                 break;
3524         }
3525         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3526         if (rdev->flags & RADEON_IS_IGP)
3527                 rdev->config.evergreen.tile_config |= 1 << 4;
3528         else {
3529                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3530                 case 0: /* four banks */
3531                         rdev->config.evergreen.tile_config |= 0 << 4;
3532                         break;
3533                 case 1: /* eight banks */
3534                         rdev->config.evergreen.tile_config |= 1 << 4;
3535                         break;
3536                 case 2: /* sixteen banks */
3537                 default:
3538                         rdev->config.evergreen.tile_config |= 2 << 4;
3539                         break;
3540                 }
3541         }
3542         rdev->config.evergreen.tile_config |= 0 << 8;
3543         rdev->config.evergreen.tile_config |=
3544                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3545
3546         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3547                 u32 efuse_straps_4;
3548                 u32 efuse_straps_3;
3549
3550                 efuse_straps_4 = RREG32_RCU(0x204);
3551                 efuse_straps_3 = RREG32_RCU(0x203);
3552                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3553                       ((efuse_straps_3 & 0xf0000000) >> 28));
3554         } else {
3555                 tmp = 0;
3556                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3557                         u32 rb_disable_bitmap;
3558
3559                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3560                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3561                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3562                         tmp <<= 4;
3563                         tmp |= rb_disable_bitmap;
3564                 }
3565         }
3566         /* enabled rb are just the one not disabled :) */
3567         disabled_rb_mask = tmp;
3568         tmp = 0;
3569         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3570                 tmp |= (1 << i);
3571         /* if all the backends are disabled, fix it up here */
3572         if ((disabled_rb_mask & tmp) == tmp) {
3573                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3574                         disabled_rb_mask &= ~(1 << i);
3575         }
3576
3577         for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3578                 u32 simd_disable_bitmap;
3579
3580                 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3581                 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3582                 simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3583                 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3584                 tmp <<= 16;
3585                 tmp |= simd_disable_bitmap;
3586         }
3587         rdev->config.evergreen.active_simds = hweight32(~tmp);
3588
3589         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3590         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3591
3592         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3593         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3594         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3595         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3596         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3597         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3598         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3599
3600         if ((rdev->config.evergreen.max_backends == 1) &&
3601             (rdev->flags & RADEON_IS_IGP)) {
3602                 if ((disabled_rb_mask & 3) == 1) {
3603                         /* RB0 disabled, RB1 enabled */
3604                         tmp = 0x11111111;
3605                 } else {
3606                         /* RB1 disabled, RB0 enabled */
3607                         tmp = 0x00000000;
3608                 }
3609         } else {
3610                 tmp = gb_addr_config & NUM_PIPES_MASK;
3611                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3612                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3613         }
3614         WREG32(GB_BACKEND_MAP, tmp);
3615
3616         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3617         WREG32(CGTS_TCC_DISABLE, 0);
3618         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3619         WREG32(CGTS_USER_TCC_DISABLE, 0);
3620
3621         /* set HW defaults for 3D engine */
3622         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3623                                      ROQ_IB2_START(0x2b)));
3624
3625         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3626
3627         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3628                              SYNC_GRADIENT |
3629                              SYNC_WALKER |
3630                              SYNC_ALIGNER));
3631
3632         sx_debug_1 = RREG32(SX_DEBUG_1);
3633         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3634         WREG32(SX_DEBUG_1, sx_debug_1);
3635
3636
3637         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3638         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3639         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3640         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3641
3642         if (rdev->family <= CHIP_SUMO2)
3643                 WREG32(SMX_SAR_CTL0, 0x00010000);
3644
3645         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3646                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3647                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3648
3649         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3650                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3651                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3652
3653         WREG32(VGT_NUM_INSTANCES, 1);
3654         WREG32(SPI_CONFIG_CNTL, 0);
3655         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3656         WREG32(CP_PERFMON_CNTL, 0);
3657
3658         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3659                                   FETCH_FIFO_HIWATER(0x4) |
3660                                   DONE_FIFO_HIWATER(0xe0) |
3661                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3662
3663         sq_config = RREG32(SQ_CONFIG);
3664         sq_config &= ~(PS_PRIO(3) |
3665                        VS_PRIO(3) |
3666                        GS_PRIO(3) |
3667                        ES_PRIO(3));
3668         sq_config |= (VC_ENABLE |
3669                       EXPORT_SRC_C |
3670                       PS_PRIO(0) |
3671                       VS_PRIO(1) |
3672                       GS_PRIO(2) |
3673                       ES_PRIO(3));
3674
3675         switch (rdev->family) {
3676         case CHIP_CEDAR:
3677         case CHIP_PALM:
3678         case CHIP_SUMO:
3679         case CHIP_SUMO2:
3680         case CHIP_CAICOS:
3681                 /* no vertex cache */
3682                 sq_config &= ~VC_ENABLE;
3683                 break;
3684         default:
3685                 break;
3686         }
3687
3688         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3689
3690         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3691         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3692         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3693         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3694         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3695         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3696         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3697
3698         switch (rdev->family) {
3699         case CHIP_CEDAR:
3700         case CHIP_PALM:
3701         case CHIP_SUMO:
3702         case CHIP_SUMO2:
3703                 ps_thread_count = 96;
3704                 break;
3705         default:
3706                 ps_thread_count = 128;
3707                 break;
3708         }
3709
3710         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3711         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3712         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3713         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3714         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3715         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3716
3717         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3718         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3719         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3720         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3721         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3722         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3723
3724         WREG32(SQ_CONFIG, sq_config);
3725         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3726         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3727         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3728         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3729         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3730         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3731         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3732         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3733         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3734         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3735
3736         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3737                                           FORCE_EOV_MAX_REZ_CNT(255)));
3738
3739         switch (rdev->family) {
3740         case CHIP_CEDAR:
3741         case CHIP_PALM:
3742         case CHIP_SUMO:
3743         case CHIP_SUMO2:
3744         case CHIP_CAICOS:
3745                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3746                 break;
3747         default:
3748                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3749                 break;
3750         }
3751         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3752         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3753
3754         WREG32(VGT_GS_VERTEX_REUSE, 16);
3755         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3756         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3757
3758         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3759         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3760
3761         WREG32(CB_PERF_CTR0_SEL_0, 0);
3762         WREG32(CB_PERF_CTR0_SEL_1, 0);
3763         WREG32(CB_PERF_CTR1_SEL_0, 0);
3764         WREG32(CB_PERF_CTR1_SEL_1, 0);
3765         WREG32(CB_PERF_CTR2_SEL_0, 0);
3766         WREG32(CB_PERF_CTR2_SEL_1, 0);
3767         WREG32(CB_PERF_CTR3_SEL_0, 0);
3768         WREG32(CB_PERF_CTR3_SEL_1, 0);
3769
3770         /* clear render buffer base addresses */
3771         WREG32(CB_COLOR0_BASE, 0);
3772         WREG32(CB_COLOR1_BASE, 0);
3773         WREG32(CB_COLOR2_BASE, 0);
3774         WREG32(CB_COLOR3_BASE, 0);
3775         WREG32(CB_COLOR4_BASE, 0);
3776         WREG32(CB_COLOR5_BASE, 0);
3777         WREG32(CB_COLOR6_BASE, 0);
3778         WREG32(CB_COLOR7_BASE, 0);
3779         WREG32(CB_COLOR8_BASE, 0);
3780         WREG32(CB_COLOR9_BASE, 0);
3781         WREG32(CB_COLOR10_BASE, 0);
3782         WREG32(CB_COLOR11_BASE, 0);
3783
3784         /* set the shader const cache sizes to 0 */
3785         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3786                 WREG32(i, 0);
3787         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3788                 WREG32(i, 0);
3789
3790         tmp = RREG32(HDP_MISC_CNTL);
3791         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3792         WREG32(HDP_MISC_CNTL, tmp);
3793
3794         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3795         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3796
3797         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3798
3799         udelay(50);
3800
3801 }
3802
3803 int evergreen_mc_init(struct radeon_device *rdev)
3804 {
3805         u32 tmp;
3806         int chansize, numchan;
3807
3808         /* Get VRAM informations */
3809         rdev->mc.vram_is_ddr = true;
3810         if ((rdev->family == CHIP_PALM) ||
3811             (rdev->family == CHIP_SUMO) ||
3812             (rdev->family == CHIP_SUMO2))
3813                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3814         else
3815                 tmp = RREG32(MC_ARB_RAMCFG);
3816         if (tmp & CHANSIZE_OVERRIDE) {
3817                 chansize = 16;
3818         } else if (tmp & CHANSIZE_MASK) {
3819                 chansize = 64;
3820         } else {
3821                 chansize = 32;
3822         }
3823         tmp = RREG32(MC_SHARED_CHMAP);
3824         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3825         case 0:
3826         default:
3827                 numchan = 1;
3828                 break;
3829         case 1:
3830                 numchan = 2;
3831                 break;
3832         case 2:
3833                 numchan = 4;
3834                 break;
3835         case 3:
3836                 numchan = 8;
3837                 break;
3838         }
3839         rdev->mc.vram_width = numchan * chansize;
3840         /* Could aper size report 0 ? */
3841         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3842         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3843         /* Setup GPU memory space */
3844         if ((rdev->family == CHIP_PALM) ||
3845             (rdev->family == CHIP_SUMO) ||
3846             (rdev->family == CHIP_SUMO2)) {
3847                 /* size in bytes on fusion */
3848                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3849                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3850         } else {
3851                 /* size in MB on evergreen/cayman/tn */
3852                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3853                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3854         }
3855         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3856         r700_vram_gtt_location(rdev, &rdev->mc);
3857         radeon_update_bandwidth_info(rdev);
3858
3859         return 0;
3860 }
3861
3862 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3863 {
3864         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3865                 RREG32(GRBM_STATUS));
3866         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3867                 RREG32(GRBM_STATUS_SE0));
3868         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3869                 RREG32(GRBM_STATUS_SE1));
3870         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3871                 RREG32(SRBM_STATUS));
3872         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3873                 RREG32(SRBM_STATUS2));
3874         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3875                 RREG32(CP_STALLED_STAT1));
3876         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3877                 RREG32(CP_STALLED_STAT2));
3878         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3879                 RREG32(CP_BUSY_STAT));
3880         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3881                 RREG32(CP_STAT));
3882         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3883                 RREG32(DMA_STATUS_REG));
3884         if (rdev->family >= CHIP_CAYMAN) {
3885                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3886                          RREG32(DMA_STATUS_REG + 0x800));
3887         }
3888 }
3889
3890 bool evergreen_is_display_hung(struct radeon_device *rdev)
3891 {
3892         u32 crtc_hung = 0;
3893         u32 crtc_status[6];
3894         u32 i, j, tmp;
3895
3896         for (i = 0; i < rdev->num_crtc; i++) {
3897                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3898                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3899                         crtc_hung |= (1 << i);
3900                 }
3901         }
3902
3903         for (j = 0; j < 10; j++) {
3904                 for (i = 0; i < rdev->num_crtc; i++) {
3905                         if (crtc_hung & (1 << i)) {
3906                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3907                                 if (tmp != crtc_status[i])
3908                                         crtc_hung &= ~(1 << i);
3909                         }
3910                 }
3911                 if (crtc_hung == 0)
3912                         return false;
3913                 udelay(100);
3914         }
3915
3916         return true;
3917 }
3918
3919 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3920 {
3921         u32 reset_mask = 0;
3922         u32 tmp;
3923
3924         /* GRBM_STATUS */
3925         tmp = RREG32(GRBM_STATUS);
3926         if (tmp & (PA_BUSY | SC_BUSY |
3927                    SH_BUSY | SX_BUSY |
3928                    TA_BUSY | VGT_BUSY |
3929                    DB_BUSY | CB_BUSY |
3930                    SPI_BUSY | VGT_BUSY_NO_DMA))
3931                 reset_mask |= RADEON_RESET_GFX;
3932
3933         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3934                    CP_BUSY | CP_COHERENCY_BUSY))
3935                 reset_mask |= RADEON_RESET_CP;
3936
3937         if (tmp & GRBM_EE_BUSY)
3938                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3939
3940         /* DMA_STATUS_REG */
3941         tmp = RREG32(DMA_STATUS_REG);
3942         if (!(tmp & DMA_IDLE))
3943                 reset_mask |= RADEON_RESET_DMA;
3944
3945         /* SRBM_STATUS2 */
3946         tmp = RREG32(SRBM_STATUS2);
3947         if (tmp & DMA_BUSY)
3948                 reset_mask |= RADEON_RESET_DMA;
3949
3950         /* SRBM_STATUS */
3951         tmp = RREG32(SRBM_STATUS);
3952         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3953                 reset_mask |= RADEON_RESET_RLC;
3954
3955         if (tmp & IH_BUSY)
3956                 reset_mask |= RADEON_RESET_IH;
3957
3958         if (tmp & SEM_BUSY)
3959                 reset_mask |= RADEON_RESET_SEM;
3960
3961         if (tmp & GRBM_RQ_PENDING)
3962                 reset_mask |= RADEON_RESET_GRBM;
3963
3964         if (tmp & VMC_BUSY)
3965                 reset_mask |= RADEON_RESET_VMC;
3966
3967         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3968                    MCC_BUSY | MCD_BUSY))
3969                 reset_mask |= RADEON_RESET_MC;
3970
3971         if (evergreen_is_display_hung(rdev))
3972                 reset_mask |= RADEON_RESET_DISPLAY;
3973
3974         /* VM_L2_STATUS */
3975         tmp = RREG32(VM_L2_STATUS);
3976         if (tmp & L2_BUSY)
3977                 reset_mask |= RADEON_RESET_VMC;
3978
3979         /* Skip MC reset as it's mostly likely not hung, just busy */
3980         if (reset_mask & RADEON_RESET_MC) {
3981                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3982                 reset_mask &= ~RADEON_RESET_MC;
3983         }
3984
3985         return reset_mask;
3986 }
3987
3988 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3989 {
3990         struct evergreen_mc_save save;
3991         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3992         u32 tmp;
3993
3994         if (reset_mask == 0)
3995                 return;
3996
3997         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3998
3999         evergreen_print_gpu_status_regs(rdev);
4000
4001         /* Disable CP parsing/prefetching */
4002         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4003
4004         if (reset_mask & RADEON_RESET_DMA) {
4005                 /* Disable DMA */
4006                 tmp = RREG32(DMA_RB_CNTL);
4007                 tmp &= ~DMA_RB_ENABLE;
4008                 WREG32(DMA_RB_CNTL, tmp);
4009         }
4010
4011         udelay(50);
4012
4013         evergreen_mc_stop(rdev, &save);
4014         if (evergreen_mc_wait_for_idle(rdev)) {
4015                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4016         }
4017
4018         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4019                 grbm_soft_reset |= SOFT_RESET_DB |
4020                         SOFT_RESET_CB |
4021                         SOFT_RESET_PA |
4022                         SOFT_RESET_SC |
4023                         SOFT_RESET_SPI |
4024                         SOFT_RESET_SX |
4025                         SOFT_RESET_SH |
4026                         SOFT_RESET_TC |
4027                         SOFT_RESET_TA |
4028                         SOFT_RESET_VC |
4029                         SOFT_RESET_VGT;
4030         }
4031
4032         if (reset_mask & RADEON_RESET_CP) {
4033                 grbm_soft_reset |= SOFT_RESET_CP |
4034                         SOFT_RESET_VGT;
4035
4036                 srbm_soft_reset |= SOFT_RESET_GRBM;
4037         }
4038
4039         if (reset_mask & RADEON_RESET_DMA)
4040                 srbm_soft_reset |= SOFT_RESET_DMA;
4041
4042         if (reset_mask & RADEON_RESET_DISPLAY)
4043                 srbm_soft_reset |= SOFT_RESET_DC;
4044
4045         if (reset_mask & RADEON_RESET_RLC)
4046                 srbm_soft_reset |= SOFT_RESET_RLC;
4047
4048         if (reset_mask & RADEON_RESET_SEM)
4049                 srbm_soft_reset |= SOFT_RESET_SEM;
4050
4051         if (reset_mask & RADEON_RESET_IH)
4052                 srbm_soft_reset |= SOFT_RESET_IH;
4053
4054         if (reset_mask & RADEON_RESET_GRBM)
4055                 srbm_soft_reset |= SOFT_RESET_GRBM;
4056
4057         if (reset_mask & RADEON_RESET_VMC)
4058                 srbm_soft_reset |= SOFT_RESET_VMC;
4059
4060         if (!(rdev->flags & RADEON_IS_IGP)) {
4061                 if (reset_mask & RADEON_RESET_MC)
4062                         srbm_soft_reset |= SOFT_RESET_MC;
4063         }
4064
4065         if (grbm_soft_reset) {
4066                 tmp = RREG32(GRBM_SOFT_RESET);
4067                 tmp |= grbm_soft_reset;
4068                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4069                 WREG32(GRBM_SOFT_RESET, tmp);
4070                 tmp = RREG32(GRBM_SOFT_RESET);
4071
4072                 udelay(50);
4073
4074                 tmp &= ~grbm_soft_reset;
4075                 WREG32(GRBM_SOFT_RESET, tmp);
4076                 tmp = RREG32(GRBM_SOFT_RESET);
4077         }
4078
4079         if (srbm_soft_reset) {
4080                 tmp = RREG32(SRBM_SOFT_RESET);
4081                 tmp |= srbm_soft_reset;
4082                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4083                 WREG32(SRBM_SOFT_RESET, tmp);
4084                 tmp = RREG32(SRBM_SOFT_RESET);
4085
4086                 udelay(50);
4087
4088                 tmp &= ~srbm_soft_reset;
4089                 WREG32(SRBM_SOFT_RESET, tmp);
4090                 tmp = RREG32(SRBM_SOFT_RESET);
4091         }
4092
4093         /* Wait a little for things to settle down */
4094         udelay(50);
4095
4096         evergreen_mc_resume(rdev, &save);
4097         udelay(50);
4098
4099         evergreen_print_gpu_status_regs(rdev);
4100 }
4101
4102 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4103 {
4104         struct evergreen_mc_save save;
4105         u32 tmp, i;
4106
4107         dev_info(rdev->dev, "GPU pci config reset\n");
4108
4109         /* disable dpm? */
4110
4111         /* Disable CP parsing/prefetching */
4112         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4113         udelay(50);
4114         /* Disable DMA */
4115         tmp = RREG32(DMA_RB_CNTL);
4116         tmp &= ~DMA_RB_ENABLE;
4117         WREG32(DMA_RB_CNTL, tmp);
4118         /* XXX other engines? */
4119
4120         /* halt the rlc */
4121         r600_rlc_stop(rdev);
4122
4123         udelay(50);
4124
4125         /* set mclk/sclk to bypass */
4126         rv770_set_clk_bypass_mode(rdev);
4127         /* disable BM */
4128         pci_clear_master(rdev->pdev);
4129         /* disable mem access */
4130         evergreen_mc_stop(rdev, &save);
4131         if (evergreen_mc_wait_for_idle(rdev)) {
4132                 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4133         }
4134         /* reset */
4135         radeon_pci_config_reset(rdev);
4136         /* wait for asic to come out of reset */
4137         for (i = 0; i < rdev->usec_timeout; i++) {
4138                 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4139                         break;
4140                 udelay(1);
4141         }
4142 }
4143
4144 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4145 {
4146         u32 reset_mask;
4147
4148         if (hard) {
4149                 evergreen_gpu_pci_config_reset(rdev);
4150                 return 0;
4151         }
4152
4153         reset_mask = evergreen_gpu_check_soft_reset(rdev);
4154
4155         if (reset_mask)
4156                 r600_set_bios_scratch_engine_hung(rdev, true);
4157
4158         /* try soft reset */
4159         evergreen_gpu_soft_reset(rdev, reset_mask);
4160
4161         reset_mask = evergreen_gpu_check_soft_reset(rdev);
4162
4163         /* try pci config reset */
4164         if (reset_mask && radeon_hard_reset)
4165                 evergreen_gpu_pci_config_reset(rdev);
4166
4167         reset_mask = evergreen_gpu_check_soft_reset(rdev);
4168
4169         if (!reset_mask)
4170                 r600_set_bios_scratch_engine_hung(rdev, false);
4171
4172         return 0;
4173 }
4174
4175 /**
4176  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4177  *
4178  * @rdev: radeon_device pointer
4179  * @ring: radeon_ring structure holding ring information
4180  *
4181  * Check if the GFX engine is locked up.
4182  * Returns true if the engine appears to be locked up, false if not.
4183  */
4184 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4185 {
4186         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4187
4188         if (!(reset_mask & (RADEON_RESET_GFX |
4189                             RADEON_RESET_COMPUTE |
4190                             RADEON_RESET_CP))) {
4191                 radeon_ring_lockup_update(rdev, ring);
4192                 return false;
4193         }
4194         return radeon_ring_test_lockup(rdev, ring);
4195 }
4196
4197 /*
4198  * RLC
4199  */
4200 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4201 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4202
4203 void sumo_rlc_fini(struct radeon_device *rdev)
4204 {
4205         int r;
4206
4207         /* save restore block */
4208         if (rdev->rlc.save_restore_obj) {
4209                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4210                 if (unlikely(r != 0))
4211                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4212                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
4213                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4214
4215                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
4216                 rdev->rlc.save_restore_obj = NULL;
4217         }
4218
4219         /* clear state block */
4220         if (rdev->rlc.clear_state_obj) {
4221                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4222                 if (unlikely(r != 0))
4223                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4224                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
4225                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4226
4227                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
4228                 rdev->rlc.clear_state_obj = NULL;
4229         }
4230
4231         /* clear state block */
4232         if (rdev->rlc.cp_table_obj) {
4233                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4234                 if (unlikely(r != 0))
4235                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4236                 radeon_bo_unpin(rdev->rlc.cp_table_obj);
4237                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4238
4239                 radeon_bo_unref(&rdev->rlc.cp_table_obj);
4240                 rdev->rlc.cp_table_obj = NULL;
4241         }
4242 }
4243
4244 #define CP_ME_TABLE_SIZE    96
4245
4246 int sumo_rlc_init(struct radeon_device *rdev)
4247 {
4248         const u32 *src_ptr;
4249         volatile u32 *dst_ptr;
4250         u32 dws, data, i, j, k, reg_num;
4251         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4252         u64 reg_list_mc_addr;
4253         const struct cs_section_def *cs_data;
4254         int r;
4255
4256         src_ptr = rdev->rlc.reg_list;
4257         dws = rdev->rlc.reg_list_size;
4258         if (rdev->family >= CHIP_BONAIRE) {
4259                 dws += (5 * 16) + 48 + 48 + 64;
4260         }
4261         cs_data = rdev->rlc.cs_data;
4262
4263         if (src_ptr) {
4264                 /* save restore block */
4265                 if (rdev->rlc.save_restore_obj == NULL) {
4266                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4267                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4268                                              NULL, &rdev->rlc.save_restore_obj);
4269                         if (r) {
4270                                 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4271                                 return r;
4272                         }
4273                 }
4274
4275                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4276                 if (unlikely(r != 0)) {
4277                         sumo_rlc_fini(rdev);
4278                         return r;
4279                 }
4280                 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4281                                   &rdev->rlc.save_restore_gpu_addr);
4282                 if (r) {
4283                         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4284                         dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4285                         sumo_rlc_fini(rdev);
4286                         return r;
4287                 }
4288
4289                 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4290                 if (r) {
4291                         dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4292                         sumo_rlc_fini(rdev);
4293                         return r;
4294                 }
4295                 /* write the sr buffer */
4296                 dst_ptr = rdev->rlc.sr_ptr;
4297                 if (rdev->family >= CHIP_TAHITI) {
4298                         /* SI */
4299                         for (i = 0; i < rdev->rlc.reg_list_size; i++)
4300                                 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4301                 } else {
4302                         /* ON/LN/TN */
4303                         /* format:
4304                          * dw0: (reg2 << 16) | reg1
4305                          * dw1: reg1 save space
4306                          * dw2: reg2 save space
4307                          */
4308                         for (i = 0; i < dws; i++) {
4309                                 data = src_ptr[i] >> 2;
4310                                 i++;
4311                                 if (i < dws)
4312                                         data |= (src_ptr[i] >> 2) << 16;
4313                                 j = (((i - 1) * 3) / 2);
4314                                 dst_ptr[j] = cpu_to_le32(data);
4315                         }
4316                         j = ((i * 3) / 2);
4317                         dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4318                 }
4319                 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4320                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4321         }
4322
4323         if (cs_data) {
4324                 /* clear state block */
4325                 if (rdev->family >= CHIP_BONAIRE) {
4326                         rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4327                 } else if (rdev->family >= CHIP_TAHITI) {
4328                         rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4329                         dws = rdev->rlc.clear_state_size + (256 / 4);
4330                 } else {
4331                         reg_list_num = 0;
4332                         dws = 0;
4333                         for (i = 0; cs_data[i].section != NULL; i++) {
4334                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4335                                         reg_list_num++;
4336                                         dws += cs_data[i].section[j].reg_count;
4337                                 }
4338                         }
4339                         reg_list_blk_index = (3 * reg_list_num + 2);
4340                         dws += reg_list_blk_index;
4341                         rdev->rlc.clear_state_size = dws;
4342                 }
4343
4344                 if (rdev->rlc.clear_state_obj == NULL) {
4345                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4346                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4347                                              NULL, &rdev->rlc.clear_state_obj);
4348                         if (r) {
4349                                 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4350                                 sumo_rlc_fini(rdev);
4351                                 return r;
4352                         }
4353                 }
4354                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4355                 if (unlikely(r != 0)) {
4356                         sumo_rlc_fini(rdev);
4357                         return r;
4358                 }
4359                 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4360                                   &rdev->rlc.clear_state_gpu_addr);
4361                 if (r) {
4362                         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4363                         dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4364                         sumo_rlc_fini(rdev);
4365                         return r;
4366                 }
4367
4368                 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4369                 if (r) {
4370                         dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4371                         sumo_rlc_fini(rdev);
4372                         return r;
4373                 }
4374                 /* set up the cs buffer */
4375                 dst_ptr = rdev->rlc.cs_ptr;
4376                 if (rdev->family >= CHIP_BONAIRE) {
4377                         cik_get_csb_buffer(rdev, dst_ptr);
4378                 } else if (rdev->family >= CHIP_TAHITI) {
4379                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4380                         dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4381                         dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4382                         dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4383                         si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4384                 } else {
4385                         reg_list_hdr_blk_index = 0;
4386                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4387                         data = upper_32_bits(reg_list_mc_addr);
4388                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4389                         reg_list_hdr_blk_index++;
4390                         for (i = 0; cs_data[i].section != NULL; i++) {
4391                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4392                                         reg_num = cs_data[i].section[j].reg_count;
4393                                         data = reg_list_mc_addr & 0xffffffff;
4394                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4395                                         reg_list_hdr_blk_index++;
4396
4397                                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4398                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4399                                         reg_list_hdr_blk_index++;
4400
4401                                         data = 0x08000000 | (reg_num * 4);
4402                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4403                                         reg_list_hdr_blk_index++;
4404
4405                                         for (k = 0; k < reg_num; k++) {
4406                                                 data = cs_data[i].section[j].extent[k];
4407                                                 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4408                                         }
4409                                         reg_list_mc_addr += reg_num * 4;
4410                                         reg_list_blk_index += reg_num;
4411                                 }
4412                         }
4413                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4414                 }
4415                 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4416                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4417         }
4418
4419         if (rdev->rlc.cp_table_size) {
4420                 if (rdev->rlc.cp_table_obj == NULL) {
4421                         r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4422                                              PAGE_SIZE, true,
4423                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4424                                              NULL, &rdev->rlc.cp_table_obj);
4425                         if (r) {
4426                                 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4427                                 sumo_rlc_fini(rdev);
4428                                 return r;
4429                         }
4430                 }
4431
4432                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4433                 if (unlikely(r != 0)) {
4434                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4435                         sumo_rlc_fini(rdev);
4436                         return r;
4437                 }
4438                 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4439                                   &rdev->rlc.cp_table_gpu_addr);
4440                 if (r) {
4441                         radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4442                         dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4443                         sumo_rlc_fini(rdev);
4444                         return r;
4445                 }
4446                 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4447                 if (r) {
4448                         dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4449                         sumo_rlc_fini(rdev);
4450                         return r;
4451                 }
4452
4453                 cik_init_cp_pg_table(rdev);
4454
4455                 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4456                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4457
4458         }
4459
4460         return 0;
4461 }
4462
4463 static void evergreen_rlc_start(struct radeon_device *rdev)
4464 {
4465         u32 mask = RLC_ENABLE;
4466
4467         if (rdev->flags & RADEON_IS_IGP) {
4468                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4469         }
4470
4471         WREG32(RLC_CNTL, mask);
4472 }
4473
4474 int evergreen_rlc_resume(struct radeon_device *rdev)
4475 {
4476         u32 i;
4477         const __be32 *fw_data;
4478
4479         if (!rdev->rlc_fw)
4480                 return -EINVAL;
4481
4482         r600_rlc_stop(rdev);
4483
4484         WREG32(RLC_HB_CNTL, 0);
4485
4486         if (rdev->flags & RADEON_IS_IGP) {
4487                 if (rdev->family == CHIP_ARUBA) {
4488                         u32 always_on_bitmap =
4489                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4490                         /* find out the number of active simds */
4491                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4492                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4493                         tmp = hweight32(~tmp);
4494                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4495                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4496                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4497                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4498                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4499                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4500                         }
4501                 } else {
4502                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4503                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4504                 }
4505                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4506                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4507         } else {
4508                 WREG32(RLC_HB_BASE, 0);
4509                 WREG32(RLC_HB_RPTR, 0);
4510                 WREG32(RLC_HB_WPTR, 0);
4511                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4512                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4513         }
4514         WREG32(RLC_MC_CNTL, 0);
4515         WREG32(RLC_UCODE_CNTL, 0);
4516
4517         fw_data = (const __be32 *)rdev->rlc_fw->data;
4518         if (rdev->family >= CHIP_ARUBA) {
4519                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4520                         WREG32(RLC_UCODE_ADDR, i);
4521                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4522                 }
4523         } else if (rdev->family >= CHIP_CAYMAN) {
4524                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4525                         WREG32(RLC_UCODE_ADDR, i);
4526                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4527                 }
4528         } else {
4529                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4530                         WREG32(RLC_UCODE_ADDR, i);
4531                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4532                 }
4533         }
4534         WREG32(RLC_UCODE_ADDR, 0);
4535
4536         evergreen_rlc_start(rdev);
4537
4538         return 0;
4539 }
4540
4541 /* Interrupts */
4542
4543 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4544 {
4545         if (crtc >= rdev->num_crtc)
4546                 return 0;
4547         else
4548                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4549 }
4550
4551 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4552 {
4553         u32 tmp;
4554
4555         if (rdev->family >= CHIP_CAYMAN) {
4556                 cayman_cp_int_cntl_setup(rdev, 0,
4557                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4558                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4559                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4560                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4561                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4562         } else
4563                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4564         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4565         WREG32(DMA_CNTL, tmp);
4566         WREG32(GRBM_INT_CNTL, 0);
4567         WREG32(SRBM_INT_CNTL, 0);
4568         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4569         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4570         if (rdev->num_crtc >= 4) {
4571                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4572                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4573         }
4574         if (rdev->num_crtc >= 6) {
4575                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4576                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4577         }
4578
4579         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4580         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4581         if (rdev->num_crtc >= 4) {
4582                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4583                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4584         }
4585         if (rdev->num_crtc >= 6) {
4586                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4587                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4588         }
4589
4590         /* only one DAC on DCE5 */
4591         if (!ASIC_IS_DCE5(rdev))
4592                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4593         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4594
4595         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4596         WREG32(DC_HPD1_INT_CONTROL, tmp);
4597         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4598         WREG32(DC_HPD2_INT_CONTROL, tmp);
4599         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4600         WREG32(DC_HPD3_INT_CONTROL, tmp);
4601         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4602         WREG32(DC_HPD4_INT_CONTROL, tmp);
4603         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4604         WREG32(DC_HPD5_INT_CONTROL, tmp);
4605         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4606         WREG32(DC_HPD6_INT_CONTROL, tmp);
4607
4608 }
4609
4610 int evergreen_irq_set(struct radeon_device *rdev)
4611 {
4612         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4613         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4614         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4615         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4616         u32 grbm_int_cntl = 0;
4617         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4618         u32 dma_cntl, dma_cntl1 = 0;
4619         u32 thermal_int = 0;
4620
4621         if (!rdev->irq.installed) {
4622                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4623                 return -EINVAL;
4624         }
4625         /* don't enable anything if the ih is disabled */
4626         if (!rdev->ih.enabled) {
4627                 r600_disable_interrupts(rdev);
4628                 /* force the active interrupt state to all disabled */
4629                 evergreen_disable_interrupt_state(rdev);
4630                 return 0;
4631         }
4632
4633         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4634         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4635         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4636         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4637         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4638         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4639         if (rdev->family == CHIP_ARUBA)
4640                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4641                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4642         else
4643                 thermal_int = RREG32(CG_THERMAL_INT) &
4644                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4645
4646         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4647         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4648         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4649         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4650         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4651         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4652
4653         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4654
4655         if (rdev->family >= CHIP_CAYMAN) {
4656                 /* enable CP interrupts on all rings */
4657                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4658                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4659                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4660                 }
4661                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4662                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4663                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4664                 }
4665                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4666                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4667                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4668                 }
4669         } else {
4670                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4671                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4672                         cp_int_cntl |= RB_INT_ENABLE;
4673                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4674                 }
4675         }
4676
4677         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4678                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4679                 dma_cntl |= TRAP_ENABLE;
4680         }
4681
4682         if (rdev->family >= CHIP_CAYMAN) {
4683                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4684                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4685                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4686                         dma_cntl1 |= TRAP_ENABLE;
4687                 }
4688         }
4689
4690         if (rdev->irq.dpm_thermal) {
4691                 DRM_DEBUG("dpm thermal\n");
4692                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4693         }
4694
4695         if (rdev->irq.crtc_vblank_int[0] ||
4696             atomic_read(&rdev->irq.pflip[0])) {
4697                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4698                 crtc1 |= VBLANK_INT_MASK;
4699         }
4700         if (rdev->irq.crtc_vblank_int[1] ||
4701             atomic_read(&rdev->irq.pflip[1])) {
4702                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4703                 crtc2 |= VBLANK_INT_MASK;
4704         }
4705         if (rdev->irq.crtc_vblank_int[2] ||
4706             atomic_read(&rdev->irq.pflip[2])) {
4707                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4708                 crtc3 |= VBLANK_INT_MASK;
4709         }
4710         if (rdev->irq.crtc_vblank_int[3] ||
4711             atomic_read(&rdev->irq.pflip[3])) {
4712                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4713                 crtc4 |= VBLANK_INT_MASK;
4714         }
4715         if (rdev->irq.crtc_vblank_int[4] ||
4716             atomic_read(&rdev->irq.pflip[4])) {
4717                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4718                 crtc5 |= VBLANK_INT_MASK;
4719         }
4720         if (rdev->irq.crtc_vblank_int[5] ||
4721             atomic_read(&rdev->irq.pflip[5])) {
4722                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4723                 crtc6 |= VBLANK_INT_MASK;
4724         }
4725         if (rdev->irq.hpd[0]) {
4726                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4727                 hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4728         }
4729         if (rdev->irq.hpd[1]) {
4730                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4731                 hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4732         }
4733         if (rdev->irq.hpd[2]) {
4734                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4735                 hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4736         }
4737         if (rdev->irq.hpd[3]) {
4738                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4739                 hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4740         }
4741         if (rdev->irq.hpd[4]) {
4742                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4743                 hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4744         }
4745         if (rdev->irq.hpd[5]) {
4746                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4747                 hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4748         }
4749         if (rdev->irq.afmt[0]) {
4750                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4751                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4752         }
4753         if (rdev->irq.afmt[1]) {
4754                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4755                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4756         }
4757         if (rdev->irq.afmt[2]) {
4758                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4759                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4760         }
4761         if (rdev->irq.afmt[3]) {
4762                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4763                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4764         }
4765         if (rdev->irq.afmt[4]) {
4766                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4767                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4768         }
4769         if (rdev->irq.afmt[5]) {
4770                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4771                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4772         }
4773
4774         if (rdev->family >= CHIP_CAYMAN) {
4775                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4776                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4777                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4778         } else
4779                 WREG32(CP_INT_CNTL, cp_int_cntl);
4780
4781         WREG32(DMA_CNTL, dma_cntl);
4782
4783         if (rdev->family >= CHIP_CAYMAN)
4784                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4785
4786         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4787
4788         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4789         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4790         if (rdev->num_crtc >= 4) {
4791                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4792                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4793         }
4794         if (rdev->num_crtc >= 6) {
4795                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4796                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4797         }
4798
4799         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4800                GRPH_PFLIP_INT_MASK);
4801         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4802                GRPH_PFLIP_INT_MASK);
4803         if (rdev->num_crtc >= 4) {
4804                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4805                        GRPH_PFLIP_INT_MASK);
4806                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4807                        GRPH_PFLIP_INT_MASK);
4808         }
4809         if (rdev->num_crtc >= 6) {
4810                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4811                        GRPH_PFLIP_INT_MASK);
4812                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4813                        GRPH_PFLIP_INT_MASK);
4814         }
4815
4816         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4817         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4818         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4819         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4820         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4821         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4822         if (rdev->family == CHIP_ARUBA)
4823                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4824         else
4825                 WREG32(CG_THERMAL_INT, thermal_int);
4826
4827         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4828         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4829         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4830         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4831         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4832         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4833
4834         /* posting read */
4835         RREG32(SRBM_STATUS);
4836
4837         return 0;
4838 }
4839
4840 static void evergreen_irq_ack(struct radeon_device *rdev)
4841 {
4842         u32 tmp;
4843
4844         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4845         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4846         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4847         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4848         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4849         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4850         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4851         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4852         if (rdev->num_crtc >= 4) {
4853                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4854                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4855         }
4856         if (rdev->num_crtc >= 6) {
4857                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4858                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4859         }
4860
4861         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4862         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4863         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4864         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4865         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4866         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4867
4868         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4869                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4870         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4871                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4872         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4873                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4874         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4875                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4876         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4877                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4878         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4879                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4880
4881         if (rdev->num_crtc >= 4) {
4882                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4883                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4884                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4885                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4886                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4887                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4888                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4889                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4890                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4891                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4892                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4893                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4894         }
4895
4896         if (rdev->num_crtc >= 6) {
4897                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4898                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4899                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4900                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4901                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4902                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4903                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4904                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4905                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4906                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4907                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4908                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4909         }
4910
4911         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4912                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4913                 tmp |= DC_HPDx_INT_ACK;
4914                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4915         }
4916         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4917                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4918                 tmp |= DC_HPDx_INT_ACK;
4919                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4920         }
4921         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4922                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4923                 tmp |= DC_HPDx_INT_ACK;
4924                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4925         }
4926         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4927                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4928                 tmp |= DC_HPDx_INT_ACK;
4929                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4930         }
4931         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4932                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4933                 tmp |= DC_HPDx_INT_ACK;
4934                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4935         }
4936         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4937                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4938                 tmp |= DC_HPDx_INT_ACK;
4939                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4940         }
4941
4942         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4943                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4944                 tmp |= DC_HPDx_RX_INT_ACK;
4945                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4946         }
4947         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4948                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4949                 tmp |= DC_HPDx_RX_INT_ACK;
4950                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4951         }
4952         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4953                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4954                 tmp |= DC_HPDx_RX_INT_ACK;
4955                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4956         }
4957         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4958                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4959                 tmp |= DC_HPDx_RX_INT_ACK;
4960                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4961         }
4962         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4963                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4964                 tmp |= DC_HPDx_RX_INT_ACK;
4965                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4966         }
4967         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4968                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4969                 tmp |= DC_HPDx_RX_INT_ACK;
4970                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4971         }
4972
4973         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4974                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4975                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4976                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4977         }
4978         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4979                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4980                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4981                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4982         }
4983         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4984                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4985                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4986                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4987         }
4988         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4989                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4990                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4991                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4992         }
4993         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4994                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4995                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4996                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4997         }
4998         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4999                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
5000                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
5001                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
5002         }
5003 }
5004
5005 static void evergreen_irq_disable(struct radeon_device *rdev)
5006 {
5007         r600_disable_interrupts(rdev);
5008         /* Wait and acknowledge irq */
5009         mdelay(1);
5010         evergreen_irq_ack(rdev);
5011         evergreen_disable_interrupt_state(rdev);
5012 }
5013
5014 void evergreen_irq_suspend(struct radeon_device *rdev)
5015 {
5016         evergreen_irq_disable(rdev);
5017         r600_rlc_stop(rdev);
5018 }
5019
5020 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5021 {
5022         u32 wptr, tmp;
5023
5024         if (rdev->wb.enabled)
5025                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5026         else
5027                 wptr = RREG32(IH_RB_WPTR);
5028
5029         if (wptr & RB_OVERFLOW) {
5030                 wptr &= ~RB_OVERFLOW;
5031                 /* When a ring buffer overflow happen start parsing interrupt
5032                  * from the last not overwritten vector (wptr + 16). Hopefully
5033                  * this should allow us to catchup.
5034                  */
5035                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5036                          wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5037                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5038                 tmp = RREG32(IH_RB_CNTL);
5039                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
5040                 WREG32(IH_RB_CNTL, tmp);
5041         }
5042         return (wptr & rdev->ih.ptr_mask);
5043 }
5044
5045 int evergreen_irq_process(struct radeon_device *rdev)
5046 {
5047         u32 wptr;
5048         u32 rptr;
5049         u32 src_id, src_data;
5050         u32 ring_index;
5051         bool queue_hotplug = false;
5052         bool queue_hdmi = false;
5053         bool queue_dp = false;
5054         bool queue_thermal = false;
5055         u32 status, addr;
5056
5057         if (!rdev->ih.enabled || rdev->shutdown)
5058                 return IRQ_NONE;
5059
5060         wptr = evergreen_get_ih_wptr(rdev);
5061
5062 restart_ih:
5063         /* is somebody else already processing irqs? */
5064         if (atomic_xchg(&rdev->ih.lock, 1))
5065                 return IRQ_NONE;
5066
5067         rptr = rdev->ih.rptr;
5068         DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5069
5070         /* Order reading of wptr vs. reading of IH ring data */
5071         rmb();
5072
5073         /* display interrupts */
5074         evergreen_irq_ack(rdev);
5075
5076         while (rptr != wptr) {
5077                 /* wptr/rptr are in bytes! */
5078                 ring_index = rptr / 4;
5079                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5080                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5081
5082                 switch (src_id) {
5083                 case 1: /* D1 vblank/vline */
5084                         switch (src_data) {
5085                         case 0: /* D1 vblank */
5086                                 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5087                                         DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5088
5089                                 if (rdev->irq.crtc_vblank_int[0]) {
5090                                         drm_handle_vblank(rdev->ddev, 0);
5091                                         rdev->pm.vblank_sync = true;
5092                                         wake_up(&rdev->irq.vblank_queue);
5093                                 }
5094                                 if (atomic_read(&rdev->irq.pflip[0]))
5095                                         radeon_crtc_handle_vblank(rdev, 0);
5096                                 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5097                                 DRM_DEBUG("IH: D1 vblank\n");
5098
5099                                 break;
5100                         case 1: /* D1 vline */
5101                                 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5102                                         DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5103
5104                                 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5105                                 DRM_DEBUG("IH: D1 vline\n");
5106
5107                                 break;
5108                         default:
5109                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5110                                 break;
5111                         }
5112                         break;
5113                 case 2: /* D2 vblank/vline */
5114                         switch (src_data) {
5115                         case 0: /* D2 vblank */
5116                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5117                                         DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5118
5119                                 if (rdev->irq.crtc_vblank_int[1]) {
5120                                         drm_handle_vblank(rdev->ddev, 1);
5121                                         rdev->pm.vblank_sync = true;
5122                                         wake_up(&rdev->irq.vblank_queue);
5123                                 }
5124                                 if (atomic_read(&rdev->irq.pflip[1]))
5125                                         radeon_crtc_handle_vblank(rdev, 1);
5126                                 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5127                                 DRM_DEBUG("IH: D2 vblank\n");
5128
5129                                 break;
5130                         case 1: /* D2 vline */
5131                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5132                                         DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5133
5134                                 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5135                                 DRM_DEBUG("IH: D2 vline\n");
5136
5137                                 break;
5138                         default:
5139                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5140                                 break;
5141                         }
5142                         break;
5143                 case 3: /* D3 vblank/vline */
5144                         switch (src_data) {
5145                         case 0: /* D3 vblank */
5146                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5147                                         DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5148
5149                                 if (rdev->irq.crtc_vblank_int[2]) {
5150                                         drm_handle_vblank(rdev->ddev, 2);
5151                                         rdev->pm.vblank_sync = true;
5152                                         wake_up(&rdev->irq.vblank_queue);
5153                                 }
5154                                 if (atomic_read(&rdev->irq.pflip[2]))
5155                                         radeon_crtc_handle_vblank(rdev, 2);
5156                                 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5157                                 DRM_DEBUG("IH: D3 vblank\n");
5158
5159                                 break;
5160                         case 1: /* D3 vline */
5161                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5162                                         DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5163
5164                                 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5165                                 DRM_DEBUG("IH: D3 vline\n");
5166
5167                                 break;
5168                         default:
5169                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5170                                 break;
5171                         }
5172                         break;
5173                 case 4: /* D4 vblank/vline */
5174                         switch (src_data) {
5175                         case 0: /* D4 vblank */
5176                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5177                                         DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5178
5179                                 if (rdev->irq.crtc_vblank_int[3]) {
5180                                         drm_handle_vblank(rdev->ddev, 3);
5181                                         rdev->pm.vblank_sync = true;
5182                                         wake_up(&rdev->irq.vblank_queue);
5183                                 }
5184                                 if (atomic_read(&rdev->irq.pflip[3]))
5185                                         radeon_crtc_handle_vblank(rdev, 3);
5186                                 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5187                                 DRM_DEBUG("IH: D4 vblank\n");
5188
5189                                 break;
5190                         case 1: /* D4 vline */
5191                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5192                                         DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5193
5194                                 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5195                                 DRM_DEBUG("IH: D4 vline\n");
5196
5197                                 break;
5198                         default:
5199                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5200                                 break;
5201                         }
5202                         break;
5203                 case 5: /* D5 vblank/vline */
5204                         switch (src_data) {
5205                         case 0: /* D5 vblank */
5206                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5207                                         DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5208
5209                                 if (rdev->irq.crtc_vblank_int[4]) {
5210                                         drm_handle_vblank(rdev->ddev, 4);
5211                                         rdev->pm.vblank_sync = true;
5212                                         wake_up(&rdev->irq.vblank_queue);
5213                                 }
5214                                 if (atomic_read(&rdev->irq.pflip[4]))
5215                                         radeon_crtc_handle_vblank(rdev, 4);
5216                                 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5217                                 DRM_DEBUG("IH: D5 vblank\n");
5218
5219                                 break;
5220                         case 1: /* D5 vline */
5221                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5222                                         DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5223
5224                                 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5225                                 DRM_DEBUG("IH: D5 vline\n");
5226
5227                                 break;
5228                         default:
5229                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5230                                 break;
5231                         }
5232                         break;
5233                 case 6: /* D6 vblank/vline */
5234                         switch (src_data) {
5235                         case 0: /* D6 vblank */
5236                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5237                                         DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5238
5239                                 if (rdev->irq.crtc_vblank_int[5]) {
5240                                         drm_handle_vblank(rdev->ddev, 5);
5241                                         rdev->pm.vblank_sync = true;
5242                                         wake_up(&rdev->irq.vblank_queue);
5243                                 }
5244                                 if (atomic_read(&rdev->irq.pflip[5]))
5245                                         radeon_crtc_handle_vblank(rdev, 5);
5246                                 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5247                                 DRM_DEBUG("IH: D6 vblank\n");
5248
5249                                 break;
5250                         case 1: /* D6 vline */
5251                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5252                                         DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5253
5254                                 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5255                                 DRM_DEBUG("IH: D6 vline\n");
5256
5257                                 break;
5258                         default:
5259                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5260                                 break;
5261                         }
5262                         break;
5263                 case 8: /* D1 page flip */
5264                 case 10: /* D2 page flip */
5265                 case 12: /* D3 page flip */
5266                 case 14: /* D4 page flip */
5267                 case 16: /* D5 page flip */
5268                 case 18: /* D6 page flip */
5269                         DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5270                         if (radeon_use_pflipirq > 0)
5271                                 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5272                         break;
5273                 case 42: /* HPD hotplug */
5274                         switch (src_data) {
5275                         case 0:
5276                                 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5277                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5278
5279                                 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5280                                 queue_hotplug = true;
5281                                 DRM_DEBUG("IH: HPD1\n");
5282                                 break;
5283                         case 1:
5284                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5285                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5286
5287                                 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5288                                 queue_hotplug = true;
5289                                 DRM_DEBUG("IH: HPD2\n");
5290                                 break;
5291                         case 2:
5292                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5293                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5294
5295                                 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5296                                 queue_hotplug = true;
5297                                 DRM_DEBUG("IH: HPD3\n");
5298                                 break;
5299                         case 3:
5300                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5301                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5302
5303                                 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5304                                 queue_hotplug = true;
5305                                 DRM_DEBUG("IH: HPD4\n");
5306                                 break;
5307                         case 4:
5308                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5309                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5310
5311                                 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5312                                 queue_hotplug = true;
5313                                 DRM_DEBUG("IH: HPD5\n");
5314                                 break;
5315                         case 5:
5316                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5317                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5318
5319                                 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5320                                 queue_hotplug = true;
5321                                 DRM_DEBUG("IH: HPD6\n");
5322                                 break;
5323                         case 6:
5324                                 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5325                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5326
5327                                 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5328                                 queue_dp = true;
5329                                 DRM_DEBUG("IH: HPD_RX 1\n");
5330                                 break;
5331                         case 7:
5332                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5333                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5334
5335                                 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5336                                 queue_dp = true;
5337                                 DRM_DEBUG("IH: HPD_RX 2\n");
5338                                 break;
5339                         case 8:
5340                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5341                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5342
5343                                 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5344                                 queue_dp = true;
5345                                 DRM_DEBUG("IH: HPD_RX 3\n");
5346                                 break;
5347                         case 9:
5348                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5349                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5350
5351                                 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5352                                 queue_dp = true;
5353                                 DRM_DEBUG("IH: HPD_RX 4\n");
5354                                 break;
5355                         case 10:
5356                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5357                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5358
5359                                 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5360                                 queue_dp = true;
5361                                 DRM_DEBUG("IH: HPD_RX 5\n");
5362                                 break;
5363                         case 11:
5364                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5365                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5366
5367                                 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5368                                 queue_dp = true;
5369                                 DRM_DEBUG("IH: HPD_RX 6\n");
5370                                 break;
5371                         default:
5372                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5373                                 break;
5374                         }
5375                         break;
5376                 case 44: /* hdmi */
5377                         switch (src_data) {
5378                         case 0:
5379                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5380                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5381
5382                                 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5383                                 queue_hdmi = true;
5384                                 DRM_DEBUG("IH: HDMI0\n");
5385                                 break;
5386                         case 1:
5387                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5388                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5389
5390                                 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5391                                 queue_hdmi = true;
5392                                 DRM_DEBUG("IH: HDMI1\n");
5393                                 break;
5394                         case 2:
5395                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5396                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5397
5398                                 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5399                                 queue_hdmi = true;
5400                                 DRM_DEBUG("IH: HDMI2\n");
5401                                 break;
5402                         case 3:
5403                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5404                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5405
5406                                 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5407                                 queue_hdmi = true;
5408                                 DRM_DEBUG("IH: HDMI3\n");
5409                                 break;
5410                         case 4:
5411                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5412                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5413
5414                                 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5415                                 queue_hdmi = true;
5416                                 DRM_DEBUG("IH: HDMI4\n");
5417                                 break;
5418                         case 5:
5419                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5420                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5421
5422                                 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5423                                 queue_hdmi = true;
5424                                 DRM_DEBUG("IH: HDMI5\n");
5425                                 break;
5426                         default:
5427                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5428                                 break;
5429                         }
5430                 case 96:
5431                         DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5432                         WREG32(SRBM_INT_ACK, 0x1);
5433                         break;
5434                 case 124: /* UVD */
5435                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5436                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5437                         break;
5438                 case 146:
5439                 case 147:
5440                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5441                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5442                         /* reset addr and status */
5443                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5444                         if (addr == 0x0 && status == 0x0)
5445                                 break;
5446                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5447                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5448                                 addr);
5449                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5450                                 status);
5451                         cayman_vm_decode_fault(rdev, status, addr);
5452                         break;
5453                 case 176: /* CP_INT in ring buffer */
5454                 case 177: /* CP_INT in IB1 */
5455                 case 178: /* CP_INT in IB2 */
5456                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5457                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5458                         break;
5459                 case 181: /* CP EOP event */
5460                         DRM_DEBUG("IH: CP EOP\n");
5461                         if (rdev->family >= CHIP_CAYMAN) {
5462                                 switch (src_data) {
5463                                 case 0:
5464                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5465                                         break;
5466                                 case 1:
5467                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5468                                         break;
5469                                 case 2:
5470                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5471                                         break;
5472                                 }
5473                         } else
5474                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5475                         break;
5476                 case 224: /* DMA trap event */
5477                         DRM_DEBUG("IH: DMA trap\n");
5478                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5479                         break;
5480                 case 230: /* thermal low to high */
5481                         DRM_DEBUG("IH: thermal low to high\n");
5482                         rdev->pm.dpm.thermal.high_to_low = false;
5483                         queue_thermal = true;
5484                         break;
5485                 case 231: /* thermal high to low */
5486                         DRM_DEBUG("IH: thermal high to low\n");
5487                         rdev->pm.dpm.thermal.high_to_low = true;
5488                         queue_thermal = true;
5489                         break;
5490                 case 233: /* GUI IDLE */
5491                         DRM_DEBUG("IH: GUI idle\n");
5492                         break;
5493                 case 244: /* DMA trap event */
5494                         if (rdev->family >= CHIP_CAYMAN) {
5495                                 DRM_DEBUG("IH: DMA1 trap\n");
5496                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5497                         }
5498                         break;
5499                 default:
5500                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5501                         break;
5502                 }
5503
5504                 /* wptr/rptr are in bytes! */
5505                 rptr += 16;
5506                 rptr &= rdev->ih.ptr_mask;
5507                 WREG32(IH_RB_RPTR, rptr);
5508         }
5509         if (queue_dp)
5510                 schedule_work(&rdev->dp_work);
5511         if (queue_hotplug)
5512                 schedule_delayed_work(&rdev->hotplug_work, 0);
5513         if (queue_hdmi)
5514                 schedule_work(&rdev->audio_work);
5515         if (queue_thermal && rdev->pm.dpm_enabled)
5516                 schedule_work(&rdev->pm.dpm.thermal.work);
5517         rdev->ih.rptr = rptr;
5518         atomic_set(&rdev->ih.lock, 0);
5519
5520         /* make sure wptr hasn't changed while processing */
5521         wptr = evergreen_get_ih_wptr(rdev);
5522         if (wptr != rptr)
5523                 goto restart_ih;
5524
5525         return IRQ_HANDLED;
5526 }
5527
5528 static void evergreen_uvd_init(struct radeon_device *rdev)
5529 {
5530         int r;
5531
5532         if (!rdev->has_uvd)
5533                 return;
5534
5535         r = radeon_uvd_init(rdev);
5536         if (r) {
5537                 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5538                 /*
5539                  * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5540                  * to early fails uvd_v2_2_resume() and thus nothing happens
5541                  * there. So it is pointless to try to go through that code
5542                  * hence why we disable uvd here.
5543                  */
5544                 rdev->has_uvd = 0;
5545                 return;
5546         }
5547         rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5548         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5549 }
5550
5551 static void evergreen_uvd_start(struct radeon_device *rdev)
5552 {
5553         int r;
5554
5555         if (!rdev->has_uvd)
5556                 return;
5557
5558         r = uvd_v2_2_resume(rdev);
5559         if (r) {
5560                 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5561                 goto error;
5562         }
5563         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5564         if (r) {
5565                 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5566                 goto error;
5567         }
5568         return;
5569
5570 error:
5571         rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5572 }
5573
5574 static void evergreen_uvd_resume(struct radeon_device *rdev)
5575 {
5576         struct radeon_ring *ring;
5577         int r;
5578
5579         if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5580                 return;
5581
5582         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5583         r = radeon_ring_init(rdev, ring, ring->ring_size, 0, RADEON_CP_PACKET2);
5584         if (r) {
5585                 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5586                 return;
5587         }
5588         r = uvd_v1_0_init(rdev);
5589         if (r) {
5590                 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5591                 return;
5592         }
5593 }
5594
5595 static int evergreen_startup(struct radeon_device *rdev)
5596 {
5597         struct radeon_ring *ring;
5598         int r;
5599
5600         /* enable pcie gen2 link */
5601         evergreen_pcie_gen2_enable(rdev);
5602         /* enable aspm */
5603         evergreen_program_aspm(rdev);
5604
5605         /* scratch needs to be initialized before MC */
5606         r = r600_vram_scratch_init(rdev);
5607         if (r)
5608                 return r;
5609
5610         evergreen_mc_program(rdev);
5611
5612         if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5613                 r = ni_mc_load_microcode(rdev);
5614                 if (r) {
5615                         DRM_ERROR("Failed to load MC firmware!\n");
5616                         return r;
5617                 }
5618         }
5619
5620         if (rdev->flags & RADEON_IS_AGP) {
5621                 evergreen_agp_enable(rdev);
5622         } else {
5623                 r = evergreen_pcie_gart_enable(rdev);
5624                 if (r)
5625                         return r;
5626         }
5627         evergreen_gpu_init(rdev);
5628
5629         /* allocate rlc buffers */
5630         if (rdev->flags & RADEON_IS_IGP) {
5631                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5632                 rdev->rlc.reg_list_size =
5633                         (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5634                 rdev->rlc.cs_data = evergreen_cs_data;
5635                 r = sumo_rlc_init(rdev);
5636                 if (r) {
5637                         DRM_ERROR("Failed to init rlc BOs!\n");
5638                         return r;
5639                 }
5640         }
5641
5642         /* allocate wb buffer */
5643         r = radeon_wb_init(rdev);
5644         if (r)
5645                 return r;
5646
5647         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5648         if (r) {
5649                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5650                 return r;
5651         }
5652
5653         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5654         if (r) {
5655                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5656                 return r;
5657         }
5658
5659         evergreen_uvd_start(rdev);
5660
5661         /* Enable IRQ */
5662         if (!rdev->irq.installed) {
5663                 r = radeon_irq_kms_init(rdev);
5664                 if (r)
5665                         return r;
5666         }
5667
5668         r = r600_irq_init(rdev);
5669         if (r) {
5670                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5671                 radeon_irq_kms_fini(rdev);
5672                 return r;
5673         }
5674         evergreen_irq_set(rdev);
5675
5676         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5677         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5678                              RADEON_CP_PACKET2);
5679         if (r)
5680                 return r;
5681
5682         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5683         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5684                              DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5685         if (r)
5686                 return r;
5687
5688         r = evergreen_cp_load_microcode(rdev);
5689         if (r)
5690                 return r;
5691         r = evergreen_cp_resume(rdev);
5692         if (r)
5693                 return r;
5694         r = r600_dma_resume(rdev);
5695         if (r)
5696                 return r;
5697
5698         evergreen_uvd_resume(rdev);
5699
5700         r = radeon_ib_pool_init(rdev);
5701         if (r) {
5702                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5703                 return r;
5704         }
5705
5706         r = radeon_audio_init(rdev);
5707         if (r) {
5708                 DRM_ERROR("radeon: audio init failed\n");
5709                 return r;
5710         }
5711
5712         return 0;
5713 }
5714
5715 int evergreen_resume(struct radeon_device *rdev)
5716 {
5717         int r;
5718
5719         /* reset the asic, the gfx blocks are often in a bad state
5720          * after the driver is unloaded or after a resume
5721          */
5722         if (radeon_asic_reset(rdev))
5723                 dev_warn(rdev->dev, "GPU reset failed !\n");
5724         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5725          * posting will perform necessary task to bring back GPU into good
5726          * shape.
5727          */
5728         /* post card */
5729         atom_asic_init(rdev->mode_info.atom_context);
5730
5731         /* init golden registers */
5732         evergreen_init_golden_registers(rdev);
5733
5734         if (rdev->pm.pm_method == PM_METHOD_DPM)
5735                 radeon_pm_resume(rdev);
5736
5737         rdev->accel_working = true;
5738         r = evergreen_startup(rdev);
5739         if (r) {
5740                 DRM_ERROR("evergreen startup failed on resume\n");
5741                 rdev->accel_working = false;
5742                 return r;
5743         }
5744
5745         return r;
5746
5747 }
5748
5749 int evergreen_suspend(struct radeon_device *rdev)
5750 {
5751         radeon_pm_suspend(rdev);
5752         radeon_audio_fini(rdev);
5753         if (rdev->has_uvd) {
5754                 uvd_v1_0_fini(rdev);
5755                 radeon_uvd_suspend(rdev);
5756         }
5757         r700_cp_stop(rdev);
5758         r600_dma_stop(rdev);
5759         evergreen_irq_suspend(rdev);
5760         radeon_wb_disable(rdev);
5761         evergreen_pcie_gart_disable(rdev);
5762
5763         return 0;
5764 }
5765
5766 /* Plan is to move initialization in that function and use
5767  * helper function so that radeon_device_init pretty much
5768  * do nothing more than calling asic specific function. This
5769  * should also allow to remove a bunch of callback function
5770  * like vram_info.
5771  */
5772 int evergreen_init(struct radeon_device *rdev)
5773 {
5774         int r;
5775
5776         /* Read BIOS */
5777         if (!radeon_get_bios(rdev)) {
5778                 if (ASIC_IS_AVIVO(rdev))
5779                         return -EINVAL;
5780         }
5781         /* Must be an ATOMBIOS */
5782         if (!rdev->is_atom_bios) {
5783                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5784                 return -EINVAL;
5785         }
5786         r = radeon_atombios_init(rdev);
5787         if (r)
5788                 return r;
5789         /* reset the asic, the gfx blocks are often in a bad state
5790          * after the driver is unloaded or after a resume
5791          */
5792         if (radeon_asic_reset(rdev))
5793                 dev_warn(rdev->dev, "GPU reset failed !\n");
5794         /* Post card if necessary */
5795         if (!radeon_card_posted(rdev)) {
5796                 if (!rdev->bios) {
5797                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5798                         return -EINVAL;
5799                 }
5800                 DRM_INFO("GPU not posted. posting now...\n");
5801                 atom_asic_init(rdev->mode_info.atom_context);
5802         }
5803         /* init golden registers */
5804         evergreen_init_golden_registers(rdev);
5805         /* Initialize scratch registers */
5806         r600_scratch_init(rdev);
5807         /* Initialize surface registers */
5808         radeon_surface_init(rdev);
5809         /* Initialize clocks */
5810         radeon_get_clock_info(rdev->ddev);
5811         /* Fence driver */
5812         r = radeon_fence_driver_init(rdev);
5813         if (r)
5814                 return r;
5815         /* initialize AGP */
5816         if (rdev->flags & RADEON_IS_AGP) {
5817                 r = radeon_agp_init(rdev);
5818                 if (r)
5819                         radeon_agp_disable(rdev);
5820         }
5821         /* initialize memory controller */
5822         r = evergreen_mc_init(rdev);
5823         if (r)
5824                 return r;
5825         /* Memory manager */
5826         r = radeon_bo_init(rdev);
5827         if (r)
5828                 return r;
5829
5830         if (ASIC_IS_DCE5(rdev)) {
5831                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5832                         r = ni_init_microcode(rdev);
5833                         if (r) {
5834                                 DRM_ERROR("Failed to load firmware!\n");
5835                                 return r;
5836                         }
5837                 }
5838         } else {
5839                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5840                         r = r600_init_microcode(rdev);
5841                         if (r) {
5842                                 DRM_ERROR("Failed to load firmware!\n");
5843                                 return r;
5844                         }
5845                 }
5846         }
5847
5848         /* Initialize power management */
5849         radeon_pm_init(rdev);
5850
5851         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5852         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5853
5854         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5855         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5856
5857         evergreen_uvd_init(rdev);
5858
5859         rdev->ih.ring_obj = NULL;
5860         r600_ih_ring_init(rdev, 64 * 1024);
5861
5862         r = r600_pcie_gart_init(rdev);
5863         if (r)
5864                 return r;
5865
5866         rdev->accel_working = true;
5867         r = evergreen_startup(rdev);
5868         if (r) {
5869                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5870                 r700_cp_fini(rdev);
5871                 r600_dma_fini(rdev);
5872                 r600_irq_fini(rdev);
5873                 if (rdev->flags & RADEON_IS_IGP)
5874                         sumo_rlc_fini(rdev);
5875                 radeon_wb_fini(rdev);
5876                 radeon_ib_pool_fini(rdev);
5877                 radeon_irq_kms_fini(rdev);
5878                 evergreen_pcie_gart_fini(rdev);
5879                 rdev->accel_working = false;
5880         }
5881
5882         /* Don't start up if the MC ucode is missing on BTC parts.
5883          * The default clocks and voltages before the MC ucode
5884          * is loaded are not suffient for advanced operations.
5885          */
5886         if (ASIC_IS_DCE5(rdev)) {
5887                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5888                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5889                         return -EINVAL;
5890                 }
5891         }
5892
5893         return 0;
5894 }
5895
5896 void evergreen_fini(struct radeon_device *rdev)
5897 {
5898         radeon_pm_fini(rdev);
5899         radeon_audio_fini(rdev);
5900         r700_cp_fini(rdev);
5901         r600_dma_fini(rdev);
5902         r600_irq_fini(rdev);
5903         if (rdev->flags & RADEON_IS_IGP)
5904                 sumo_rlc_fini(rdev);
5905         radeon_wb_fini(rdev);
5906         radeon_ib_pool_fini(rdev);
5907         radeon_irq_kms_fini(rdev);
5908         uvd_v1_0_fini(rdev);
5909         radeon_uvd_fini(rdev);
5910         evergreen_pcie_gart_fini(rdev);
5911         r600_vram_scratch_fini(rdev);
5912         radeon_gem_fini(rdev);
5913         radeon_fence_driver_fini(rdev);
5914         radeon_agp_fini(rdev);
5915         radeon_bo_fini(rdev);
5916         radeon_atombios_fini(rdev);
5917         kfree(rdev->bios);
5918         rdev->bios = NULL;
5919 }
5920
5921 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5922 {
5923         u32 link_width_cntl, speed_cntl;
5924
5925         if (radeon_pcie_gen2 == 0)
5926                 return;
5927
5928         if (rdev->flags & RADEON_IS_IGP)
5929                 return;
5930
5931         if (!(rdev->flags & RADEON_IS_PCIE))
5932                 return;
5933
5934         /* x2 cards have a special sequence */
5935         if (ASIC_IS_X2(rdev))
5936                 return;
5937
5938         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5939                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5940                 return;
5941
5942         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5943         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5944                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5945                 return;
5946         }
5947
5948         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5949
5950         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5951             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5952
5953                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5954                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5955                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5956
5957                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5958                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5959                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5960
5961                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5962                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5963                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5964
5965                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5966                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5967                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5968
5969                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5970                 speed_cntl |= LC_GEN2_EN_STRAP;
5971                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5972
5973         } else {
5974                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5975                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5976                 if (1)
5977                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5978                 else
5979                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5980                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5981         }
5982 }
5983
5984 void evergreen_program_aspm(struct radeon_device *rdev)
5985 {
5986         u32 data, orig;
5987         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5988         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5989         /* fusion_platform = true
5990          * if the system is a fusion system
5991          * (APU or DGPU in a fusion system).
5992          * todo: check if the system is a fusion platform.
5993          */
5994         bool fusion_platform = false;
5995
5996         if (radeon_aspm == 0)
5997                 return;
5998
5999         if (!(rdev->flags & RADEON_IS_PCIE))
6000                 return;
6001
6002         switch (rdev->family) {
6003         case CHIP_CYPRESS:
6004         case CHIP_HEMLOCK:
6005         case CHIP_JUNIPER:
6006         case CHIP_REDWOOD:
6007         case CHIP_CEDAR:
6008         case CHIP_SUMO:
6009         case CHIP_SUMO2:
6010         case CHIP_PALM:
6011         case CHIP_ARUBA:
6012                 disable_l0s = true;
6013                 break;
6014         default:
6015                 disable_l0s = false;
6016                 break;
6017         }
6018
6019         if (rdev->flags & RADEON_IS_IGP)
6020                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
6021
6022         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6023         if (fusion_platform)
6024                 data &= ~MULTI_PIF;
6025         else
6026                 data |= MULTI_PIF;
6027         if (data != orig)
6028                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6029
6030         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6031         if (fusion_platform)
6032                 data &= ~MULTI_PIF;
6033         else
6034                 data |= MULTI_PIF;
6035         if (data != orig)
6036                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6037
6038         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6039         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6040         if (!disable_l0s) {
6041                 if (rdev->family >= CHIP_BARTS)
6042                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6043                 else
6044                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6045         }
6046
6047         if (!disable_l1) {
6048                 if (rdev->family >= CHIP_BARTS)
6049                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6050                 else
6051                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6052
6053                 if (!disable_plloff_in_l1) {
6054                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6055                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6056                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6057                         if (data != orig)
6058                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6059
6060                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6061                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6062                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6063                         if (data != orig)
6064                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6065
6066                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6067                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6068                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6069                         if (data != orig)
6070                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6071
6072                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6073                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6074                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6075                         if (data != orig)
6076                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6077
6078                         if (rdev->family >= CHIP_BARTS) {
6079                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6080                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6081                                 data |= PLL_RAMP_UP_TIME_0(4);
6082                                 if (data != orig)
6083                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6084
6085                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6086                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6087                                 data |= PLL_RAMP_UP_TIME_1(4);
6088                                 if (data != orig)
6089                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6090
6091                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6092                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6093                                 data |= PLL_RAMP_UP_TIME_0(4);
6094                                 if (data != orig)
6095                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6096
6097                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6098                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6099                                 data |= PLL_RAMP_UP_TIME_1(4);
6100                                 if (data != orig)
6101                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6102                         }
6103
6104                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6105                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6106                         data |= LC_DYN_LANES_PWR_STATE(3);
6107                         if (data != orig)
6108                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6109
6110                         if (rdev->family >= CHIP_BARTS) {
6111                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6112                                 data &= ~LS2_EXIT_TIME_MASK;
6113                                 data |= LS2_EXIT_TIME(1);
6114                                 if (data != orig)
6115                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6116
6117                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6118                                 data &= ~LS2_EXIT_TIME_MASK;
6119                                 data |= LS2_EXIT_TIME(1);
6120                                 if (data != orig)
6121                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6122                         }
6123                 }
6124         }
6125
6126         /* evergreen parts only */
6127         if (rdev->family < CHIP_BARTS)
6128                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6129
6130         if (pcie_lc_cntl != pcie_lc_cntl_old)
6131                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6132 }