These changes are the raw update to linux-4.4.6-rt14. Kernel sources
[kvmfornfv.git] / kernel / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_audio.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 /*
39  * Indirect registers accessor
40  */
41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
42 {
43         unsigned long flags;
44         u32 r;
45
46         spin_lock_irqsave(&rdev->cg_idx_lock, flags);
47         WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
48         r = RREG32(EVERGREEN_CG_IND_DATA);
49         spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
50         return r;
51 }
52
53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
54 {
55         unsigned long flags;
56
57         spin_lock_irqsave(&rdev->cg_idx_lock, flags);
58         WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
59         WREG32(EVERGREEN_CG_IND_DATA, (v));
60         spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
61 }
62
63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
64 {
65         unsigned long flags;
66         u32 r;
67
68         spin_lock_irqsave(&rdev->pif_idx_lock, flags);
69         WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
70         r = RREG32(EVERGREEN_PIF_PHY0_DATA);
71         spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
72         return r;
73 }
74
75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
76 {
77         unsigned long flags;
78
79         spin_lock_irqsave(&rdev->pif_idx_lock, flags);
80         WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
81         WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
82         spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
83 }
84
85 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
86 {
87         unsigned long flags;
88         u32 r;
89
90         spin_lock_irqsave(&rdev->pif_idx_lock, flags);
91         WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
92         r = RREG32(EVERGREEN_PIF_PHY1_DATA);
93         spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
94         return r;
95 }
96
97 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
98 {
99         unsigned long flags;
100
101         spin_lock_irqsave(&rdev->pif_idx_lock, flags);
102         WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
103         WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
104         spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
105 }
106
107 static const u32 crtc_offsets[6] =
108 {
109         EVERGREEN_CRTC0_REGISTER_OFFSET,
110         EVERGREEN_CRTC1_REGISTER_OFFSET,
111         EVERGREEN_CRTC2_REGISTER_OFFSET,
112         EVERGREEN_CRTC3_REGISTER_OFFSET,
113         EVERGREEN_CRTC4_REGISTER_OFFSET,
114         EVERGREEN_CRTC5_REGISTER_OFFSET
115 };
116
117 #include "clearstate_evergreen.h"
118
119 static const u32 sumo_rlc_save_restore_register_list[] =
120 {
121         0x98fc,
122         0x9830,
123         0x9834,
124         0x9838,
125         0x9870,
126         0x9874,
127         0x8a14,
128         0x8b24,
129         0x8bcc,
130         0x8b10,
131         0x8d00,
132         0x8d04,
133         0x8c00,
134         0x8c04,
135         0x8c08,
136         0x8c0c,
137         0x8d8c,
138         0x8c20,
139         0x8c24,
140         0x8c28,
141         0x8c18,
142         0x8c1c,
143         0x8cf0,
144         0x8e2c,
145         0x8e38,
146         0x8c30,
147         0x9508,
148         0x9688,
149         0x9608,
150         0x960c,
151         0x9610,
152         0x9614,
153         0x88c4,
154         0x88d4,
155         0xa008,
156         0x900c,
157         0x9100,
158         0x913c,
159         0x98f8,
160         0x98f4,
161         0x9b7c,
162         0x3f8c,
163         0x8950,
164         0x8954,
165         0x8a18,
166         0x8b28,
167         0x9144,
168         0x9148,
169         0x914c,
170         0x3f90,
171         0x3f94,
172         0x915c,
173         0x9160,
174         0x9178,
175         0x917c,
176         0x9180,
177         0x918c,
178         0x9190,
179         0x9194,
180         0x9198,
181         0x919c,
182         0x91a8,
183         0x91ac,
184         0x91b0,
185         0x91b4,
186         0x91b8,
187         0x91c4,
188         0x91c8,
189         0x91cc,
190         0x91d0,
191         0x91d4,
192         0x91e0,
193         0x91e4,
194         0x91ec,
195         0x91f0,
196         0x91f4,
197         0x9200,
198         0x9204,
199         0x929c,
200         0x9150,
201         0x802c,
202 };
203
204 static void evergreen_gpu_init(struct radeon_device *rdev);
205 void evergreen_fini(struct radeon_device *rdev);
206 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207 void evergreen_program_aspm(struct radeon_device *rdev);
208 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
209                                      int ring, u32 cp_int_cntl);
210 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
211                                    u32 status, u32 addr);
212 void cik_init_cp_pg_table(struct radeon_device *rdev);
213
214 extern u32 si_get_csb_size(struct radeon_device *rdev);
215 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 extern u32 cik_get_csb_size(struct radeon_device *rdev);
217 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
219
220 static const u32 evergreen_golden_registers[] =
221 {
222         0x3f90, 0xffff0000, 0xff000000,
223         0x9148, 0xffff0000, 0xff000000,
224         0x3f94, 0xffff0000, 0xff000000,
225         0x914c, 0xffff0000, 0xff000000,
226         0x9b7c, 0xffffffff, 0x00000000,
227         0x8a14, 0xffffffff, 0x00000007,
228         0x8b10, 0xffffffff, 0x00000000,
229         0x960c, 0xffffffff, 0x54763210,
230         0x88c4, 0xffffffff, 0x000000c2,
231         0x88d4, 0xffffffff, 0x00000010,
232         0x8974, 0xffffffff, 0x00000000,
233         0xc78, 0x00000080, 0x00000080,
234         0x5eb4, 0xffffffff, 0x00000002,
235         0x5e78, 0xffffffff, 0x001000f0,
236         0x6104, 0x01000300, 0x00000000,
237         0x5bc0, 0x00300000, 0x00000000,
238         0x7030, 0xffffffff, 0x00000011,
239         0x7c30, 0xffffffff, 0x00000011,
240         0x10830, 0xffffffff, 0x00000011,
241         0x11430, 0xffffffff, 0x00000011,
242         0x12030, 0xffffffff, 0x00000011,
243         0x12c30, 0xffffffff, 0x00000011,
244         0xd02c, 0xffffffff, 0x08421000,
245         0x240c, 0xffffffff, 0x00000380,
246         0x8b24, 0xffffffff, 0x00ff0fff,
247         0x28a4c, 0x06000000, 0x06000000,
248         0x10c, 0x00000001, 0x00000001,
249         0x8d00, 0xffffffff, 0x100e4848,
250         0x8d04, 0xffffffff, 0x00164745,
251         0x8c00, 0xffffffff, 0xe4000003,
252         0x8c04, 0xffffffff, 0x40600060,
253         0x8c08, 0xffffffff, 0x001c001c,
254         0x8cf0, 0xffffffff, 0x08e00620,
255         0x8c20, 0xffffffff, 0x00800080,
256         0x8c24, 0xffffffff, 0x00800080,
257         0x8c18, 0xffffffff, 0x20202078,
258         0x8c1c, 0xffffffff, 0x00001010,
259         0x28350, 0xffffffff, 0x00000000,
260         0xa008, 0xffffffff, 0x00010000,
261         0x5c4, 0xffffffff, 0x00000001,
262         0x9508, 0xffffffff, 0x00000002,
263         0x913c, 0x0000000f, 0x0000000a
264 };
265
266 static const u32 evergreen_golden_registers2[] =
267 {
268         0x2f4c, 0xffffffff, 0x00000000,
269         0x54f4, 0xffffffff, 0x00000000,
270         0x54f0, 0xffffffff, 0x00000000,
271         0x5498, 0xffffffff, 0x00000000,
272         0x549c, 0xffffffff, 0x00000000,
273         0x5494, 0xffffffff, 0x00000000,
274         0x53cc, 0xffffffff, 0x00000000,
275         0x53c8, 0xffffffff, 0x00000000,
276         0x53c4, 0xffffffff, 0x00000000,
277         0x53c0, 0xffffffff, 0x00000000,
278         0x53bc, 0xffffffff, 0x00000000,
279         0x53b8, 0xffffffff, 0x00000000,
280         0x53b4, 0xffffffff, 0x00000000,
281         0x53b0, 0xffffffff, 0x00000000
282 };
283
284 static const u32 cypress_mgcg_init[] =
285 {
286         0x802c, 0xffffffff, 0xc0000000,
287         0x5448, 0xffffffff, 0x00000100,
288         0x55e4, 0xffffffff, 0x00000100,
289         0x160c, 0xffffffff, 0x00000100,
290         0x5644, 0xffffffff, 0x00000100,
291         0xc164, 0xffffffff, 0x00000100,
292         0x8a18, 0xffffffff, 0x00000100,
293         0x897c, 0xffffffff, 0x06000100,
294         0x8b28, 0xffffffff, 0x00000100,
295         0x9144, 0xffffffff, 0x00000100,
296         0x9a60, 0xffffffff, 0x00000100,
297         0x9868, 0xffffffff, 0x00000100,
298         0x8d58, 0xffffffff, 0x00000100,
299         0x9510, 0xffffffff, 0x00000100,
300         0x949c, 0xffffffff, 0x00000100,
301         0x9654, 0xffffffff, 0x00000100,
302         0x9030, 0xffffffff, 0x00000100,
303         0x9034, 0xffffffff, 0x00000100,
304         0x9038, 0xffffffff, 0x00000100,
305         0x903c, 0xffffffff, 0x00000100,
306         0x9040, 0xffffffff, 0x00000100,
307         0xa200, 0xffffffff, 0x00000100,
308         0xa204, 0xffffffff, 0x00000100,
309         0xa208, 0xffffffff, 0x00000100,
310         0xa20c, 0xffffffff, 0x00000100,
311         0x971c, 0xffffffff, 0x00000100,
312         0x977c, 0xffffffff, 0x00000100,
313         0x3f80, 0xffffffff, 0x00000100,
314         0xa210, 0xffffffff, 0x00000100,
315         0xa214, 0xffffffff, 0x00000100,
316         0x4d8, 0xffffffff, 0x00000100,
317         0x9784, 0xffffffff, 0x00000100,
318         0x9698, 0xffffffff, 0x00000100,
319         0x4d4, 0xffffffff, 0x00000200,
320         0x30cc, 0xffffffff, 0x00000100,
321         0xd0c0, 0xffffffff, 0xff000100,
322         0x802c, 0xffffffff, 0x40000000,
323         0x915c, 0xffffffff, 0x00010000,
324         0x9160, 0xffffffff, 0x00030002,
325         0x9178, 0xffffffff, 0x00070000,
326         0x917c, 0xffffffff, 0x00030002,
327         0x9180, 0xffffffff, 0x00050004,
328         0x918c, 0xffffffff, 0x00010006,
329         0x9190, 0xffffffff, 0x00090008,
330         0x9194, 0xffffffff, 0x00070000,
331         0x9198, 0xffffffff, 0x00030002,
332         0x919c, 0xffffffff, 0x00050004,
333         0x91a8, 0xffffffff, 0x00010006,
334         0x91ac, 0xffffffff, 0x00090008,
335         0x91b0, 0xffffffff, 0x00070000,
336         0x91b4, 0xffffffff, 0x00030002,
337         0x91b8, 0xffffffff, 0x00050004,
338         0x91c4, 0xffffffff, 0x00010006,
339         0x91c8, 0xffffffff, 0x00090008,
340         0x91cc, 0xffffffff, 0x00070000,
341         0x91d0, 0xffffffff, 0x00030002,
342         0x91d4, 0xffffffff, 0x00050004,
343         0x91e0, 0xffffffff, 0x00010006,
344         0x91e4, 0xffffffff, 0x00090008,
345         0x91e8, 0xffffffff, 0x00000000,
346         0x91ec, 0xffffffff, 0x00070000,
347         0x91f0, 0xffffffff, 0x00030002,
348         0x91f4, 0xffffffff, 0x00050004,
349         0x9200, 0xffffffff, 0x00010006,
350         0x9204, 0xffffffff, 0x00090008,
351         0x9208, 0xffffffff, 0x00070000,
352         0x920c, 0xffffffff, 0x00030002,
353         0x9210, 0xffffffff, 0x00050004,
354         0x921c, 0xffffffff, 0x00010006,
355         0x9220, 0xffffffff, 0x00090008,
356         0x9224, 0xffffffff, 0x00070000,
357         0x9228, 0xffffffff, 0x00030002,
358         0x922c, 0xffffffff, 0x00050004,
359         0x9238, 0xffffffff, 0x00010006,
360         0x923c, 0xffffffff, 0x00090008,
361         0x9240, 0xffffffff, 0x00070000,
362         0x9244, 0xffffffff, 0x00030002,
363         0x9248, 0xffffffff, 0x00050004,
364         0x9254, 0xffffffff, 0x00010006,
365         0x9258, 0xffffffff, 0x00090008,
366         0x925c, 0xffffffff, 0x00070000,
367         0x9260, 0xffffffff, 0x00030002,
368         0x9264, 0xffffffff, 0x00050004,
369         0x9270, 0xffffffff, 0x00010006,
370         0x9274, 0xffffffff, 0x00090008,
371         0x9278, 0xffffffff, 0x00070000,
372         0x927c, 0xffffffff, 0x00030002,
373         0x9280, 0xffffffff, 0x00050004,
374         0x928c, 0xffffffff, 0x00010006,
375         0x9290, 0xffffffff, 0x00090008,
376         0x9294, 0xffffffff, 0x00000000,
377         0x929c, 0xffffffff, 0x00000001,
378         0x802c, 0xffffffff, 0x40010000,
379         0x915c, 0xffffffff, 0x00010000,
380         0x9160, 0xffffffff, 0x00030002,
381         0x9178, 0xffffffff, 0x00070000,
382         0x917c, 0xffffffff, 0x00030002,
383         0x9180, 0xffffffff, 0x00050004,
384         0x918c, 0xffffffff, 0x00010006,
385         0x9190, 0xffffffff, 0x00090008,
386         0x9194, 0xffffffff, 0x00070000,
387         0x9198, 0xffffffff, 0x00030002,
388         0x919c, 0xffffffff, 0x00050004,
389         0x91a8, 0xffffffff, 0x00010006,
390         0x91ac, 0xffffffff, 0x00090008,
391         0x91b0, 0xffffffff, 0x00070000,
392         0x91b4, 0xffffffff, 0x00030002,
393         0x91b8, 0xffffffff, 0x00050004,
394         0x91c4, 0xffffffff, 0x00010006,
395         0x91c8, 0xffffffff, 0x00090008,
396         0x91cc, 0xffffffff, 0x00070000,
397         0x91d0, 0xffffffff, 0x00030002,
398         0x91d4, 0xffffffff, 0x00050004,
399         0x91e0, 0xffffffff, 0x00010006,
400         0x91e4, 0xffffffff, 0x00090008,
401         0x91e8, 0xffffffff, 0x00000000,
402         0x91ec, 0xffffffff, 0x00070000,
403         0x91f0, 0xffffffff, 0x00030002,
404         0x91f4, 0xffffffff, 0x00050004,
405         0x9200, 0xffffffff, 0x00010006,
406         0x9204, 0xffffffff, 0x00090008,
407         0x9208, 0xffffffff, 0x00070000,
408         0x920c, 0xffffffff, 0x00030002,
409         0x9210, 0xffffffff, 0x00050004,
410         0x921c, 0xffffffff, 0x00010006,
411         0x9220, 0xffffffff, 0x00090008,
412         0x9224, 0xffffffff, 0x00070000,
413         0x9228, 0xffffffff, 0x00030002,
414         0x922c, 0xffffffff, 0x00050004,
415         0x9238, 0xffffffff, 0x00010006,
416         0x923c, 0xffffffff, 0x00090008,
417         0x9240, 0xffffffff, 0x00070000,
418         0x9244, 0xffffffff, 0x00030002,
419         0x9248, 0xffffffff, 0x00050004,
420         0x9254, 0xffffffff, 0x00010006,
421         0x9258, 0xffffffff, 0x00090008,
422         0x925c, 0xffffffff, 0x00070000,
423         0x9260, 0xffffffff, 0x00030002,
424         0x9264, 0xffffffff, 0x00050004,
425         0x9270, 0xffffffff, 0x00010006,
426         0x9274, 0xffffffff, 0x00090008,
427         0x9278, 0xffffffff, 0x00070000,
428         0x927c, 0xffffffff, 0x00030002,
429         0x9280, 0xffffffff, 0x00050004,
430         0x928c, 0xffffffff, 0x00010006,
431         0x9290, 0xffffffff, 0x00090008,
432         0x9294, 0xffffffff, 0x00000000,
433         0x929c, 0xffffffff, 0x00000001,
434         0x802c, 0xffffffff, 0xc0000000
435 };
436
437 static const u32 redwood_mgcg_init[] =
438 {
439         0x802c, 0xffffffff, 0xc0000000,
440         0x5448, 0xffffffff, 0x00000100,
441         0x55e4, 0xffffffff, 0x00000100,
442         0x160c, 0xffffffff, 0x00000100,
443         0x5644, 0xffffffff, 0x00000100,
444         0xc164, 0xffffffff, 0x00000100,
445         0x8a18, 0xffffffff, 0x00000100,
446         0x897c, 0xffffffff, 0x06000100,
447         0x8b28, 0xffffffff, 0x00000100,
448         0x9144, 0xffffffff, 0x00000100,
449         0x9a60, 0xffffffff, 0x00000100,
450         0x9868, 0xffffffff, 0x00000100,
451         0x8d58, 0xffffffff, 0x00000100,
452         0x9510, 0xffffffff, 0x00000100,
453         0x949c, 0xffffffff, 0x00000100,
454         0x9654, 0xffffffff, 0x00000100,
455         0x9030, 0xffffffff, 0x00000100,
456         0x9034, 0xffffffff, 0x00000100,
457         0x9038, 0xffffffff, 0x00000100,
458         0x903c, 0xffffffff, 0x00000100,
459         0x9040, 0xffffffff, 0x00000100,
460         0xa200, 0xffffffff, 0x00000100,
461         0xa204, 0xffffffff, 0x00000100,
462         0xa208, 0xffffffff, 0x00000100,
463         0xa20c, 0xffffffff, 0x00000100,
464         0x971c, 0xffffffff, 0x00000100,
465         0x977c, 0xffffffff, 0x00000100,
466         0x3f80, 0xffffffff, 0x00000100,
467         0xa210, 0xffffffff, 0x00000100,
468         0xa214, 0xffffffff, 0x00000100,
469         0x4d8, 0xffffffff, 0x00000100,
470         0x9784, 0xffffffff, 0x00000100,
471         0x9698, 0xffffffff, 0x00000100,
472         0x4d4, 0xffffffff, 0x00000200,
473         0x30cc, 0xffffffff, 0x00000100,
474         0xd0c0, 0xffffffff, 0xff000100,
475         0x802c, 0xffffffff, 0x40000000,
476         0x915c, 0xffffffff, 0x00010000,
477         0x9160, 0xffffffff, 0x00030002,
478         0x9178, 0xffffffff, 0x00070000,
479         0x917c, 0xffffffff, 0x00030002,
480         0x9180, 0xffffffff, 0x00050004,
481         0x918c, 0xffffffff, 0x00010006,
482         0x9190, 0xffffffff, 0x00090008,
483         0x9194, 0xffffffff, 0x00070000,
484         0x9198, 0xffffffff, 0x00030002,
485         0x919c, 0xffffffff, 0x00050004,
486         0x91a8, 0xffffffff, 0x00010006,
487         0x91ac, 0xffffffff, 0x00090008,
488         0x91b0, 0xffffffff, 0x00070000,
489         0x91b4, 0xffffffff, 0x00030002,
490         0x91b8, 0xffffffff, 0x00050004,
491         0x91c4, 0xffffffff, 0x00010006,
492         0x91c8, 0xffffffff, 0x00090008,
493         0x91cc, 0xffffffff, 0x00070000,
494         0x91d0, 0xffffffff, 0x00030002,
495         0x91d4, 0xffffffff, 0x00050004,
496         0x91e0, 0xffffffff, 0x00010006,
497         0x91e4, 0xffffffff, 0x00090008,
498         0x91e8, 0xffffffff, 0x00000000,
499         0x91ec, 0xffffffff, 0x00070000,
500         0x91f0, 0xffffffff, 0x00030002,
501         0x91f4, 0xffffffff, 0x00050004,
502         0x9200, 0xffffffff, 0x00010006,
503         0x9204, 0xffffffff, 0x00090008,
504         0x9294, 0xffffffff, 0x00000000,
505         0x929c, 0xffffffff, 0x00000001,
506         0x802c, 0xffffffff, 0xc0000000
507 };
508
509 static const u32 cedar_golden_registers[] =
510 {
511         0x3f90, 0xffff0000, 0xff000000,
512         0x9148, 0xffff0000, 0xff000000,
513         0x3f94, 0xffff0000, 0xff000000,
514         0x914c, 0xffff0000, 0xff000000,
515         0x9b7c, 0xffffffff, 0x00000000,
516         0x8a14, 0xffffffff, 0x00000007,
517         0x8b10, 0xffffffff, 0x00000000,
518         0x960c, 0xffffffff, 0x54763210,
519         0x88c4, 0xffffffff, 0x000000c2,
520         0x88d4, 0xffffffff, 0x00000000,
521         0x8974, 0xffffffff, 0x00000000,
522         0xc78, 0x00000080, 0x00000080,
523         0x5eb4, 0xffffffff, 0x00000002,
524         0x5e78, 0xffffffff, 0x001000f0,
525         0x6104, 0x01000300, 0x00000000,
526         0x5bc0, 0x00300000, 0x00000000,
527         0x7030, 0xffffffff, 0x00000011,
528         0x7c30, 0xffffffff, 0x00000011,
529         0x10830, 0xffffffff, 0x00000011,
530         0x11430, 0xffffffff, 0x00000011,
531         0xd02c, 0xffffffff, 0x08421000,
532         0x240c, 0xffffffff, 0x00000380,
533         0x8b24, 0xffffffff, 0x00ff0fff,
534         0x28a4c, 0x06000000, 0x06000000,
535         0x10c, 0x00000001, 0x00000001,
536         0x8d00, 0xffffffff, 0x100e4848,
537         0x8d04, 0xffffffff, 0x00164745,
538         0x8c00, 0xffffffff, 0xe4000003,
539         0x8c04, 0xffffffff, 0x40600060,
540         0x8c08, 0xffffffff, 0x001c001c,
541         0x8cf0, 0xffffffff, 0x08e00410,
542         0x8c20, 0xffffffff, 0x00800080,
543         0x8c24, 0xffffffff, 0x00800080,
544         0x8c18, 0xffffffff, 0x20202078,
545         0x8c1c, 0xffffffff, 0x00001010,
546         0x28350, 0xffffffff, 0x00000000,
547         0xa008, 0xffffffff, 0x00010000,
548         0x5c4, 0xffffffff, 0x00000001,
549         0x9508, 0xffffffff, 0x00000002
550 };
551
552 static const u32 cedar_mgcg_init[] =
553 {
554         0x802c, 0xffffffff, 0xc0000000,
555         0x5448, 0xffffffff, 0x00000100,
556         0x55e4, 0xffffffff, 0x00000100,
557         0x160c, 0xffffffff, 0x00000100,
558         0x5644, 0xffffffff, 0x00000100,
559         0xc164, 0xffffffff, 0x00000100,
560         0x8a18, 0xffffffff, 0x00000100,
561         0x897c, 0xffffffff, 0x06000100,
562         0x8b28, 0xffffffff, 0x00000100,
563         0x9144, 0xffffffff, 0x00000100,
564         0x9a60, 0xffffffff, 0x00000100,
565         0x9868, 0xffffffff, 0x00000100,
566         0x8d58, 0xffffffff, 0x00000100,
567         0x9510, 0xffffffff, 0x00000100,
568         0x949c, 0xffffffff, 0x00000100,
569         0x9654, 0xffffffff, 0x00000100,
570         0x9030, 0xffffffff, 0x00000100,
571         0x9034, 0xffffffff, 0x00000100,
572         0x9038, 0xffffffff, 0x00000100,
573         0x903c, 0xffffffff, 0x00000100,
574         0x9040, 0xffffffff, 0x00000100,
575         0xa200, 0xffffffff, 0x00000100,
576         0xa204, 0xffffffff, 0x00000100,
577         0xa208, 0xffffffff, 0x00000100,
578         0xa20c, 0xffffffff, 0x00000100,
579         0x971c, 0xffffffff, 0x00000100,
580         0x977c, 0xffffffff, 0x00000100,
581         0x3f80, 0xffffffff, 0x00000100,
582         0xa210, 0xffffffff, 0x00000100,
583         0xa214, 0xffffffff, 0x00000100,
584         0x4d8, 0xffffffff, 0x00000100,
585         0x9784, 0xffffffff, 0x00000100,
586         0x9698, 0xffffffff, 0x00000100,
587         0x4d4, 0xffffffff, 0x00000200,
588         0x30cc, 0xffffffff, 0x00000100,
589         0xd0c0, 0xffffffff, 0xff000100,
590         0x802c, 0xffffffff, 0x40000000,
591         0x915c, 0xffffffff, 0x00010000,
592         0x9178, 0xffffffff, 0x00050000,
593         0x917c, 0xffffffff, 0x00030002,
594         0x918c, 0xffffffff, 0x00010004,
595         0x9190, 0xffffffff, 0x00070006,
596         0x9194, 0xffffffff, 0x00050000,
597         0x9198, 0xffffffff, 0x00030002,
598         0x91a8, 0xffffffff, 0x00010004,
599         0x91ac, 0xffffffff, 0x00070006,
600         0x91e8, 0xffffffff, 0x00000000,
601         0x9294, 0xffffffff, 0x00000000,
602         0x929c, 0xffffffff, 0x00000001,
603         0x802c, 0xffffffff, 0xc0000000
604 };
605
606 static const u32 juniper_mgcg_init[] =
607 {
608         0x802c, 0xffffffff, 0xc0000000,
609         0x5448, 0xffffffff, 0x00000100,
610         0x55e4, 0xffffffff, 0x00000100,
611         0x160c, 0xffffffff, 0x00000100,
612         0x5644, 0xffffffff, 0x00000100,
613         0xc164, 0xffffffff, 0x00000100,
614         0x8a18, 0xffffffff, 0x00000100,
615         0x897c, 0xffffffff, 0x06000100,
616         0x8b28, 0xffffffff, 0x00000100,
617         0x9144, 0xffffffff, 0x00000100,
618         0x9a60, 0xffffffff, 0x00000100,
619         0x9868, 0xffffffff, 0x00000100,
620         0x8d58, 0xffffffff, 0x00000100,
621         0x9510, 0xffffffff, 0x00000100,
622         0x949c, 0xffffffff, 0x00000100,
623         0x9654, 0xffffffff, 0x00000100,
624         0x9030, 0xffffffff, 0x00000100,
625         0x9034, 0xffffffff, 0x00000100,
626         0x9038, 0xffffffff, 0x00000100,
627         0x903c, 0xffffffff, 0x00000100,
628         0x9040, 0xffffffff, 0x00000100,
629         0xa200, 0xffffffff, 0x00000100,
630         0xa204, 0xffffffff, 0x00000100,
631         0xa208, 0xffffffff, 0x00000100,
632         0xa20c, 0xffffffff, 0x00000100,
633         0x971c, 0xffffffff, 0x00000100,
634         0xd0c0, 0xffffffff, 0xff000100,
635         0x802c, 0xffffffff, 0x40000000,
636         0x915c, 0xffffffff, 0x00010000,
637         0x9160, 0xffffffff, 0x00030002,
638         0x9178, 0xffffffff, 0x00070000,
639         0x917c, 0xffffffff, 0x00030002,
640         0x9180, 0xffffffff, 0x00050004,
641         0x918c, 0xffffffff, 0x00010006,
642         0x9190, 0xffffffff, 0x00090008,
643         0x9194, 0xffffffff, 0x00070000,
644         0x9198, 0xffffffff, 0x00030002,
645         0x919c, 0xffffffff, 0x00050004,
646         0x91a8, 0xffffffff, 0x00010006,
647         0x91ac, 0xffffffff, 0x00090008,
648         0x91b0, 0xffffffff, 0x00070000,
649         0x91b4, 0xffffffff, 0x00030002,
650         0x91b8, 0xffffffff, 0x00050004,
651         0x91c4, 0xffffffff, 0x00010006,
652         0x91c8, 0xffffffff, 0x00090008,
653         0x91cc, 0xffffffff, 0x00070000,
654         0x91d0, 0xffffffff, 0x00030002,
655         0x91d4, 0xffffffff, 0x00050004,
656         0x91e0, 0xffffffff, 0x00010006,
657         0x91e4, 0xffffffff, 0x00090008,
658         0x91e8, 0xffffffff, 0x00000000,
659         0x91ec, 0xffffffff, 0x00070000,
660         0x91f0, 0xffffffff, 0x00030002,
661         0x91f4, 0xffffffff, 0x00050004,
662         0x9200, 0xffffffff, 0x00010006,
663         0x9204, 0xffffffff, 0x00090008,
664         0x9208, 0xffffffff, 0x00070000,
665         0x920c, 0xffffffff, 0x00030002,
666         0x9210, 0xffffffff, 0x00050004,
667         0x921c, 0xffffffff, 0x00010006,
668         0x9220, 0xffffffff, 0x00090008,
669         0x9224, 0xffffffff, 0x00070000,
670         0x9228, 0xffffffff, 0x00030002,
671         0x922c, 0xffffffff, 0x00050004,
672         0x9238, 0xffffffff, 0x00010006,
673         0x923c, 0xffffffff, 0x00090008,
674         0x9240, 0xffffffff, 0x00070000,
675         0x9244, 0xffffffff, 0x00030002,
676         0x9248, 0xffffffff, 0x00050004,
677         0x9254, 0xffffffff, 0x00010006,
678         0x9258, 0xffffffff, 0x00090008,
679         0x925c, 0xffffffff, 0x00070000,
680         0x9260, 0xffffffff, 0x00030002,
681         0x9264, 0xffffffff, 0x00050004,
682         0x9270, 0xffffffff, 0x00010006,
683         0x9274, 0xffffffff, 0x00090008,
684         0x9278, 0xffffffff, 0x00070000,
685         0x927c, 0xffffffff, 0x00030002,
686         0x9280, 0xffffffff, 0x00050004,
687         0x928c, 0xffffffff, 0x00010006,
688         0x9290, 0xffffffff, 0x00090008,
689         0x9294, 0xffffffff, 0x00000000,
690         0x929c, 0xffffffff, 0x00000001,
691         0x802c, 0xffffffff, 0xc0000000,
692         0x977c, 0xffffffff, 0x00000100,
693         0x3f80, 0xffffffff, 0x00000100,
694         0xa210, 0xffffffff, 0x00000100,
695         0xa214, 0xffffffff, 0x00000100,
696         0x4d8, 0xffffffff, 0x00000100,
697         0x9784, 0xffffffff, 0x00000100,
698         0x9698, 0xffffffff, 0x00000100,
699         0x4d4, 0xffffffff, 0x00000200,
700         0x30cc, 0xffffffff, 0x00000100,
701         0x802c, 0xffffffff, 0xc0000000
702 };
703
704 static const u32 supersumo_golden_registers[] =
705 {
706         0x5eb4, 0xffffffff, 0x00000002,
707         0x5c4, 0xffffffff, 0x00000001,
708         0x7030, 0xffffffff, 0x00000011,
709         0x7c30, 0xffffffff, 0x00000011,
710         0x6104, 0x01000300, 0x00000000,
711         0x5bc0, 0x00300000, 0x00000000,
712         0x8c04, 0xffffffff, 0x40600060,
713         0x8c08, 0xffffffff, 0x001c001c,
714         0x8c20, 0xffffffff, 0x00800080,
715         0x8c24, 0xffffffff, 0x00800080,
716         0x8c18, 0xffffffff, 0x20202078,
717         0x8c1c, 0xffffffff, 0x00001010,
718         0x918c, 0xffffffff, 0x00010006,
719         0x91a8, 0xffffffff, 0x00010006,
720         0x91c4, 0xffffffff, 0x00010006,
721         0x91e0, 0xffffffff, 0x00010006,
722         0x9200, 0xffffffff, 0x00010006,
723         0x9150, 0xffffffff, 0x6e944040,
724         0x917c, 0xffffffff, 0x00030002,
725         0x9180, 0xffffffff, 0x00050004,
726         0x9198, 0xffffffff, 0x00030002,
727         0x919c, 0xffffffff, 0x00050004,
728         0x91b4, 0xffffffff, 0x00030002,
729         0x91b8, 0xffffffff, 0x00050004,
730         0x91d0, 0xffffffff, 0x00030002,
731         0x91d4, 0xffffffff, 0x00050004,
732         0x91f0, 0xffffffff, 0x00030002,
733         0x91f4, 0xffffffff, 0x00050004,
734         0x915c, 0xffffffff, 0x00010000,
735         0x9160, 0xffffffff, 0x00030002,
736         0x3f90, 0xffff0000, 0xff000000,
737         0x9178, 0xffffffff, 0x00070000,
738         0x9194, 0xffffffff, 0x00070000,
739         0x91b0, 0xffffffff, 0x00070000,
740         0x91cc, 0xffffffff, 0x00070000,
741         0x91ec, 0xffffffff, 0x00070000,
742         0x9148, 0xffff0000, 0xff000000,
743         0x9190, 0xffffffff, 0x00090008,
744         0x91ac, 0xffffffff, 0x00090008,
745         0x91c8, 0xffffffff, 0x00090008,
746         0x91e4, 0xffffffff, 0x00090008,
747         0x9204, 0xffffffff, 0x00090008,
748         0x3f94, 0xffff0000, 0xff000000,
749         0x914c, 0xffff0000, 0xff000000,
750         0x929c, 0xffffffff, 0x00000001,
751         0x8a18, 0xffffffff, 0x00000100,
752         0x8b28, 0xffffffff, 0x00000100,
753         0x9144, 0xffffffff, 0x00000100,
754         0x5644, 0xffffffff, 0x00000100,
755         0x9b7c, 0xffffffff, 0x00000000,
756         0x8030, 0xffffffff, 0x0000100a,
757         0x8a14, 0xffffffff, 0x00000007,
758         0x8b24, 0xffffffff, 0x00ff0fff,
759         0x8b10, 0xffffffff, 0x00000000,
760         0x28a4c, 0x06000000, 0x06000000,
761         0x4d8, 0xffffffff, 0x00000100,
762         0x913c, 0xffff000f, 0x0100000a,
763         0x960c, 0xffffffff, 0x54763210,
764         0x88c4, 0xffffffff, 0x000000c2,
765         0x88d4, 0xffffffff, 0x00000010,
766         0x8974, 0xffffffff, 0x00000000,
767         0xc78, 0x00000080, 0x00000080,
768         0x5e78, 0xffffffff, 0x001000f0,
769         0xd02c, 0xffffffff, 0x08421000,
770         0xa008, 0xffffffff, 0x00010000,
771         0x8d00, 0xffffffff, 0x100e4848,
772         0x8d04, 0xffffffff, 0x00164745,
773         0x8c00, 0xffffffff, 0xe4000003,
774         0x8cf0, 0x1fffffff, 0x08e00620,
775         0x28350, 0xffffffff, 0x00000000,
776         0x9508, 0xffffffff, 0x00000002
777 };
778
779 static const u32 sumo_golden_registers[] =
780 {
781         0x900c, 0x00ffffff, 0x0017071f,
782         0x8c18, 0xffffffff, 0x10101060,
783         0x8c1c, 0xffffffff, 0x00001010,
784         0x8c30, 0x0000000f, 0x00000005,
785         0x9688, 0x0000000f, 0x00000007
786 };
787
788 static const u32 wrestler_golden_registers[] =
789 {
790         0x5eb4, 0xffffffff, 0x00000002,
791         0x5c4, 0xffffffff, 0x00000001,
792         0x7030, 0xffffffff, 0x00000011,
793         0x7c30, 0xffffffff, 0x00000011,
794         0x6104, 0x01000300, 0x00000000,
795         0x5bc0, 0x00300000, 0x00000000,
796         0x918c, 0xffffffff, 0x00010006,
797         0x91a8, 0xffffffff, 0x00010006,
798         0x9150, 0xffffffff, 0x6e944040,
799         0x917c, 0xffffffff, 0x00030002,
800         0x9198, 0xffffffff, 0x00030002,
801         0x915c, 0xffffffff, 0x00010000,
802         0x3f90, 0xffff0000, 0xff000000,
803         0x9178, 0xffffffff, 0x00070000,
804         0x9194, 0xffffffff, 0x00070000,
805         0x9148, 0xffff0000, 0xff000000,
806         0x9190, 0xffffffff, 0x00090008,
807         0x91ac, 0xffffffff, 0x00090008,
808         0x3f94, 0xffff0000, 0xff000000,
809         0x914c, 0xffff0000, 0xff000000,
810         0x929c, 0xffffffff, 0x00000001,
811         0x8a18, 0xffffffff, 0x00000100,
812         0x8b28, 0xffffffff, 0x00000100,
813         0x9144, 0xffffffff, 0x00000100,
814         0x9b7c, 0xffffffff, 0x00000000,
815         0x8030, 0xffffffff, 0x0000100a,
816         0x8a14, 0xffffffff, 0x00000001,
817         0x8b24, 0xffffffff, 0x00ff0fff,
818         0x8b10, 0xffffffff, 0x00000000,
819         0x28a4c, 0x06000000, 0x06000000,
820         0x4d8, 0xffffffff, 0x00000100,
821         0x913c, 0xffff000f, 0x0100000a,
822         0x960c, 0xffffffff, 0x54763210,
823         0x88c4, 0xffffffff, 0x000000c2,
824         0x88d4, 0xffffffff, 0x00000010,
825         0x8974, 0xffffffff, 0x00000000,
826         0xc78, 0x00000080, 0x00000080,
827         0x5e78, 0xffffffff, 0x001000f0,
828         0xd02c, 0xffffffff, 0x08421000,
829         0xa008, 0xffffffff, 0x00010000,
830         0x8d00, 0xffffffff, 0x100e4848,
831         0x8d04, 0xffffffff, 0x00164745,
832         0x8c00, 0xffffffff, 0xe4000003,
833         0x8cf0, 0x1fffffff, 0x08e00410,
834         0x28350, 0xffffffff, 0x00000000,
835         0x9508, 0xffffffff, 0x00000002,
836         0x900c, 0xffffffff, 0x0017071f,
837         0x8c18, 0xffffffff, 0x10101060,
838         0x8c1c, 0xffffffff, 0x00001010
839 };
840
841 static const u32 barts_golden_registers[] =
842 {
843         0x5eb4, 0xffffffff, 0x00000002,
844         0x5e78, 0x8f311ff1, 0x001000f0,
845         0x3f90, 0xffff0000, 0xff000000,
846         0x9148, 0xffff0000, 0xff000000,
847         0x3f94, 0xffff0000, 0xff000000,
848         0x914c, 0xffff0000, 0xff000000,
849         0xc78, 0x00000080, 0x00000080,
850         0xbd4, 0x70073777, 0x00010001,
851         0xd02c, 0xbfffff1f, 0x08421000,
852         0xd0b8, 0x03773777, 0x02011003,
853         0x5bc0, 0x00200000, 0x50100000,
854         0x98f8, 0x33773777, 0x02011003,
855         0x98fc, 0xffffffff, 0x76543210,
856         0x7030, 0x31000311, 0x00000011,
857         0x2f48, 0x00000007, 0x02011003,
858         0x6b28, 0x00000010, 0x00000012,
859         0x7728, 0x00000010, 0x00000012,
860         0x10328, 0x00000010, 0x00000012,
861         0x10f28, 0x00000010, 0x00000012,
862         0x11b28, 0x00000010, 0x00000012,
863         0x12728, 0x00000010, 0x00000012,
864         0x240c, 0x000007ff, 0x00000380,
865         0x8a14, 0xf000001f, 0x00000007,
866         0x8b24, 0x3fff3fff, 0x00ff0fff,
867         0x8b10, 0x0000ff0f, 0x00000000,
868         0x28a4c, 0x07ffffff, 0x06000000,
869         0x10c, 0x00000001, 0x00010003,
870         0xa02c, 0xffffffff, 0x0000009b,
871         0x913c, 0x0000000f, 0x0100000a,
872         0x8d00, 0xffff7f7f, 0x100e4848,
873         0x8d04, 0x00ffffff, 0x00164745,
874         0x8c00, 0xfffc0003, 0xe4000003,
875         0x8c04, 0xf8ff00ff, 0x40600060,
876         0x8c08, 0x00ff00ff, 0x001c001c,
877         0x8cf0, 0x1fff1fff, 0x08e00620,
878         0x8c20, 0x0fff0fff, 0x00800080,
879         0x8c24, 0x0fff0fff, 0x00800080,
880         0x8c18, 0xffffffff, 0x20202078,
881         0x8c1c, 0x0000ffff, 0x00001010,
882         0x28350, 0x00000f01, 0x00000000,
883         0x9508, 0x3700001f, 0x00000002,
884         0x960c, 0xffffffff, 0x54763210,
885         0x88c4, 0x001f3ae3, 0x000000c2,
886         0x88d4, 0x0000001f, 0x00000010,
887         0x8974, 0xffffffff, 0x00000000
888 };
889
890 static const u32 turks_golden_registers[] =
891 {
892         0x5eb4, 0xffffffff, 0x00000002,
893         0x5e78, 0x8f311ff1, 0x001000f0,
894         0x8c8, 0x00003000, 0x00001070,
895         0x8cc, 0x000fffff, 0x00040035,
896         0x3f90, 0xffff0000, 0xfff00000,
897         0x9148, 0xffff0000, 0xfff00000,
898         0x3f94, 0xffff0000, 0xfff00000,
899         0x914c, 0xffff0000, 0xfff00000,
900         0xc78, 0x00000080, 0x00000080,
901         0xbd4, 0x00073007, 0x00010002,
902         0xd02c, 0xbfffff1f, 0x08421000,
903         0xd0b8, 0x03773777, 0x02010002,
904         0x5bc0, 0x00200000, 0x50100000,
905         0x98f8, 0x33773777, 0x00010002,
906         0x98fc, 0xffffffff, 0x33221100,
907         0x7030, 0x31000311, 0x00000011,
908         0x2f48, 0x33773777, 0x00010002,
909         0x6b28, 0x00000010, 0x00000012,
910         0x7728, 0x00000010, 0x00000012,
911         0x10328, 0x00000010, 0x00000012,
912         0x10f28, 0x00000010, 0x00000012,
913         0x11b28, 0x00000010, 0x00000012,
914         0x12728, 0x00000010, 0x00000012,
915         0x240c, 0x000007ff, 0x00000380,
916         0x8a14, 0xf000001f, 0x00000007,
917         0x8b24, 0x3fff3fff, 0x00ff0fff,
918         0x8b10, 0x0000ff0f, 0x00000000,
919         0x28a4c, 0x07ffffff, 0x06000000,
920         0x10c, 0x00000001, 0x00010003,
921         0xa02c, 0xffffffff, 0x0000009b,
922         0x913c, 0x0000000f, 0x0100000a,
923         0x8d00, 0xffff7f7f, 0x100e4848,
924         0x8d04, 0x00ffffff, 0x00164745,
925         0x8c00, 0xfffc0003, 0xe4000003,
926         0x8c04, 0xf8ff00ff, 0x40600060,
927         0x8c08, 0x00ff00ff, 0x001c001c,
928         0x8cf0, 0x1fff1fff, 0x08e00410,
929         0x8c20, 0x0fff0fff, 0x00800080,
930         0x8c24, 0x0fff0fff, 0x00800080,
931         0x8c18, 0xffffffff, 0x20202078,
932         0x8c1c, 0x0000ffff, 0x00001010,
933         0x28350, 0x00000f01, 0x00000000,
934         0x9508, 0x3700001f, 0x00000002,
935         0x960c, 0xffffffff, 0x54763210,
936         0x88c4, 0x001f3ae3, 0x000000c2,
937         0x88d4, 0x0000001f, 0x00000010,
938         0x8974, 0xffffffff, 0x00000000
939 };
940
941 static const u32 caicos_golden_registers[] =
942 {
943         0x5eb4, 0xffffffff, 0x00000002,
944         0x5e78, 0x8f311ff1, 0x001000f0,
945         0x8c8, 0x00003420, 0x00001450,
946         0x8cc, 0x000fffff, 0x00040035,
947         0x3f90, 0xffff0000, 0xfffc0000,
948         0x9148, 0xffff0000, 0xfffc0000,
949         0x3f94, 0xffff0000, 0xfffc0000,
950         0x914c, 0xffff0000, 0xfffc0000,
951         0xc78, 0x00000080, 0x00000080,
952         0xbd4, 0x00073007, 0x00010001,
953         0xd02c, 0xbfffff1f, 0x08421000,
954         0xd0b8, 0x03773777, 0x02010001,
955         0x5bc0, 0x00200000, 0x50100000,
956         0x98f8, 0x33773777, 0x02010001,
957         0x98fc, 0xffffffff, 0x33221100,
958         0x7030, 0x31000311, 0x00000011,
959         0x2f48, 0x33773777, 0x02010001,
960         0x6b28, 0x00000010, 0x00000012,
961         0x7728, 0x00000010, 0x00000012,
962         0x10328, 0x00000010, 0x00000012,
963         0x10f28, 0x00000010, 0x00000012,
964         0x11b28, 0x00000010, 0x00000012,
965         0x12728, 0x00000010, 0x00000012,
966         0x240c, 0x000007ff, 0x00000380,
967         0x8a14, 0xf000001f, 0x00000001,
968         0x8b24, 0x3fff3fff, 0x00ff0fff,
969         0x8b10, 0x0000ff0f, 0x00000000,
970         0x28a4c, 0x07ffffff, 0x06000000,
971         0x10c, 0x00000001, 0x00010003,
972         0xa02c, 0xffffffff, 0x0000009b,
973         0x913c, 0x0000000f, 0x0100000a,
974         0x8d00, 0xffff7f7f, 0x100e4848,
975         0x8d04, 0x00ffffff, 0x00164745,
976         0x8c00, 0xfffc0003, 0xe4000003,
977         0x8c04, 0xf8ff00ff, 0x40600060,
978         0x8c08, 0x00ff00ff, 0x001c001c,
979         0x8cf0, 0x1fff1fff, 0x08e00410,
980         0x8c20, 0x0fff0fff, 0x00800080,
981         0x8c24, 0x0fff0fff, 0x00800080,
982         0x8c18, 0xffffffff, 0x20202078,
983         0x8c1c, 0x0000ffff, 0x00001010,
984         0x28350, 0x00000f01, 0x00000000,
985         0x9508, 0x3700001f, 0x00000002,
986         0x960c, 0xffffffff, 0x54763210,
987         0x88c4, 0x001f3ae3, 0x000000c2,
988         0x88d4, 0x0000001f, 0x00000010,
989         0x8974, 0xffffffff, 0x00000000
990 };
991
992 static void evergreen_init_golden_registers(struct radeon_device *rdev)
993 {
994         switch (rdev->family) {
995         case CHIP_CYPRESS:
996         case CHIP_HEMLOCK:
997                 radeon_program_register_sequence(rdev,
998                                                  evergreen_golden_registers,
999                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
1000                 radeon_program_register_sequence(rdev,
1001                                                  evergreen_golden_registers2,
1002                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1003                 radeon_program_register_sequence(rdev,
1004                                                  cypress_mgcg_init,
1005                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
1006                 break;
1007         case CHIP_JUNIPER:
1008                 radeon_program_register_sequence(rdev,
1009                                                  evergreen_golden_registers,
1010                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
1011                 radeon_program_register_sequence(rdev,
1012                                                  evergreen_golden_registers2,
1013                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1014                 radeon_program_register_sequence(rdev,
1015                                                  juniper_mgcg_init,
1016                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
1017                 break;
1018         case CHIP_REDWOOD:
1019                 radeon_program_register_sequence(rdev,
1020                                                  evergreen_golden_registers,
1021                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
1022                 radeon_program_register_sequence(rdev,
1023                                                  evergreen_golden_registers2,
1024                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1025                 radeon_program_register_sequence(rdev,
1026                                                  redwood_mgcg_init,
1027                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
1028                 break;
1029         case CHIP_CEDAR:
1030                 radeon_program_register_sequence(rdev,
1031                                                  cedar_golden_registers,
1032                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
1033                 radeon_program_register_sequence(rdev,
1034                                                  evergreen_golden_registers2,
1035                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1036                 radeon_program_register_sequence(rdev,
1037                                                  cedar_mgcg_init,
1038                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
1039                 break;
1040         case CHIP_PALM:
1041                 radeon_program_register_sequence(rdev,
1042                                                  wrestler_golden_registers,
1043                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
1044                 break;
1045         case CHIP_SUMO:
1046                 radeon_program_register_sequence(rdev,
1047                                                  supersumo_golden_registers,
1048                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
1049                 break;
1050         case CHIP_SUMO2:
1051                 radeon_program_register_sequence(rdev,
1052                                                  supersumo_golden_registers,
1053                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054                 radeon_program_register_sequence(rdev,
1055                                                  sumo_golden_registers,
1056                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
1057                 break;
1058         case CHIP_BARTS:
1059                 radeon_program_register_sequence(rdev,
1060                                                  barts_golden_registers,
1061                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
1062                 break;
1063         case CHIP_TURKS:
1064                 radeon_program_register_sequence(rdev,
1065                                                  turks_golden_registers,
1066                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
1067                 break;
1068         case CHIP_CAICOS:
1069                 radeon_program_register_sequence(rdev,
1070                                                  caicos_golden_registers,
1071                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
1072                 break;
1073         default:
1074                 break;
1075         }
1076 }
1077
1078 /**
1079  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1080  *
1081  * @rdev: radeon_device pointer
1082  * @reg: register offset in bytes
1083  * @val: register value
1084  *
1085  * Returns 0 for success or -EINVAL for an invalid register
1086  *
1087  */
1088 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1089                                         u32 reg, u32 *val)
1090 {
1091         switch (reg) {
1092         case GRBM_STATUS:
1093         case GRBM_STATUS_SE0:
1094         case GRBM_STATUS_SE1:
1095         case SRBM_STATUS:
1096         case SRBM_STATUS2:
1097         case DMA_STATUS_REG:
1098         case UVD_STATUS:
1099                 *val = RREG32(reg);
1100                 return 0;
1101         default:
1102                 return -EINVAL;
1103         }
1104 }
1105
1106 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1107                              unsigned *bankh, unsigned *mtaspect,
1108                              unsigned *tile_split)
1109 {
1110         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1111         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1112         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1113         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1114         switch (*bankw) {
1115         default:
1116         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1117         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1118         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1119         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1120         }
1121         switch (*bankh) {
1122         default:
1123         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1124         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1125         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1126         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1127         }
1128         switch (*mtaspect) {
1129         default:
1130         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1131         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1132         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1133         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1134         }
1135 }
1136
1137 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1138                               u32 cntl_reg, u32 status_reg)
1139 {
1140         int r, i;
1141         struct atom_clock_dividers dividers;
1142
1143         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1144                                            clock, false, &dividers);
1145         if (r)
1146                 return r;
1147
1148         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1149
1150         for (i = 0; i < 100; i++) {
1151                 if (RREG32(status_reg) & DCLK_STATUS)
1152                         break;
1153                 mdelay(10);
1154         }
1155         if (i == 100)
1156                 return -ETIMEDOUT;
1157
1158         return 0;
1159 }
1160
1161 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1162 {
1163         int r = 0;
1164         u32 cg_scratch = RREG32(CG_SCRATCH1);
1165
1166         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1167         if (r)
1168                 goto done;
1169         cg_scratch &= 0xffff0000;
1170         cg_scratch |= vclk / 100; /* Mhz */
1171
1172         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1173         if (r)
1174                 goto done;
1175         cg_scratch &= 0x0000ffff;
1176         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1177
1178 done:
1179         WREG32(CG_SCRATCH1, cg_scratch);
1180
1181         return r;
1182 }
1183
1184 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1185 {
1186         /* start off with something large */
1187         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1188         int r;
1189
1190         /* bypass vclk and dclk with bclk */
1191         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1192                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1193                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1194
1195         /* put PLL in bypass mode */
1196         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1197
1198         if (!vclk || !dclk) {
1199                 /* keep the Bypass mode, put PLL to sleep */
1200                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1201                 return 0;
1202         }
1203
1204         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1205                                           16384, 0x03FFFFFF, 0, 128, 5,
1206                                           &fb_div, &vclk_div, &dclk_div);
1207         if (r)
1208                 return r;
1209
1210         /* set VCO_MODE to 1 */
1211         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1212
1213         /* toggle UPLL_SLEEP to 1 then back to 0 */
1214         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1215         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1216
1217         /* deassert UPLL_RESET */
1218         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1219
1220         mdelay(1);
1221
1222         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1223         if (r)
1224                 return r;
1225
1226         /* assert UPLL_RESET again */
1227         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1228
1229         /* disable spread spectrum. */
1230         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1231
1232         /* set feedback divider */
1233         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1234
1235         /* set ref divider to 0 */
1236         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1237
1238         if (fb_div < 307200)
1239                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1240         else
1241                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1242
1243         /* set PDIV_A and PDIV_B */
1244         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1245                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1246                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1247
1248         /* give the PLL some time to settle */
1249         mdelay(15);
1250
1251         /* deassert PLL_RESET */
1252         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1253
1254         mdelay(15);
1255
1256         /* switch from bypass mode to normal mode */
1257         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1258
1259         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1260         if (r)
1261                 return r;
1262
1263         /* switch VCLK and DCLK selection */
1264         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1265                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1266                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1267
1268         mdelay(100);
1269
1270         return 0;
1271 }
1272
1273 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1274 {
1275         int readrq;
1276         u16 v;
1277
1278         readrq = pcie_get_readrq(rdev->pdev);
1279         v = ffs(readrq) - 8;
1280         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1281          * to avoid hangs or perfomance issues
1282          */
1283         if ((v == 0) || (v == 6) || (v == 7))
1284                 pcie_set_readrq(rdev->pdev, 512);
1285 }
1286
1287 void dce4_program_fmt(struct drm_encoder *encoder)
1288 {
1289         struct drm_device *dev = encoder->dev;
1290         struct radeon_device *rdev = dev->dev_private;
1291         struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1292         struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1293         struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1294         int bpc = 0;
1295         u32 tmp = 0;
1296         enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1297
1298         if (connector) {
1299                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1300                 bpc = radeon_get_monitor_bpc(connector);
1301                 dither = radeon_connector->dither;
1302         }
1303
1304         /* LVDS/eDP FMT is set up by atom */
1305         if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1306                 return;
1307
1308         /* not needed for analog */
1309         if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1310             (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1311                 return;
1312
1313         if (bpc == 0)
1314                 return;
1315
1316         switch (bpc) {
1317         case 6:
1318                 if (dither == RADEON_FMT_DITHER_ENABLE)
1319                         /* XXX sort out optimal dither settings */
1320                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1321                                 FMT_SPATIAL_DITHER_EN);
1322                 else
1323                         tmp |= FMT_TRUNCATE_EN;
1324                 break;
1325         case 8:
1326                 if (dither == RADEON_FMT_DITHER_ENABLE)
1327                         /* XXX sort out optimal dither settings */
1328                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329                                 FMT_RGB_RANDOM_ENABLE |
1330                                 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1331                 else
1332                         tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1333                 break;
1334         case 10:
1335         default:
1336                 /* not needed */
1337                 break;
1338         }
1339
1340         WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1341 }
1342
1343 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1344 {
1345         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1346                 return true;
1347         else
1348                 return false;
1349 }
1350
1351 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1352 {
1353         u32 pos1, pos2;
1354
1355         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1357
1358         if (pos1 != pos2)
1359                 return true;
1360         else
1361                 return false;
1362 }
1363
1364 /**
1365  * dce4_wait_for_vblank - vblank wait asic callback.
1366  *
1367  * @rdev: radeon_device pointer
1368  * @crtc: crtc to wait for vblank on
1369  *
1370  * Wait for vblank on the requested crtc (evergreen+).
1371  */
1372 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1373 {
1374         unsigned i = 0;
1375
1376         if (crtc >= rdev->num_crtc)
1377                 return;
1378
1379         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1380                 return;
1381
1382         /* depending on when we hit vblank, we may be close to active; if so,
1383          * wait for another frame.
1384          */
1385         while (dce4_is_in_vblank(rdev, crtc)) {
1386                 if (i++ % 100 == 0) {
1387                         if (!dce4_is_counter_moving(rdev, crtc))
1388                                 break;
1389                 }
1390         }
1391
1392         while (!dce4_is_in_vblank(rdev, crtc)) {
1393                 if (i++ % 100 == 0) {
1394                         if (!dce4_is_counter_moving(rdev, crtc))
1395                                 break;
1396                 }
1397         }
1398 }
1399
1400 /**
1401  * evergreen_page_flip - pageflip callback.
1402  *
1403  * @rdev: radeon_device pointer
1404  * @crtc_id: crtc to cleanup pageflip on
1405  * @crtc_base: new address of the crtc (GPU MC address)
1406  *
1407  * Triggers the actual pageflip by updating the primary
1408  * surface base address (evergreen+).
1409  */
1410 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1411 {
1412         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1413
1414         /* update the scanout addresses */
1415         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1416                upper_32_bits(crtc_base));
1417         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1418                (u32)crtc_base);
1419         /* post the write */
1420         RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1421 }
1422
1423 /**
1424  * evergreen_page_flip_pending - check if page flip is still pending
1425  *
1426  * @rdev: radeon_device pointer
1427  * @crtc_id: crtc to check
1428  *
1429  * Returns the current update pending status.
1430  */
1431 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1432 {
1433         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1434
1435         /* Return current update_pending status: */
1436         return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1437                 EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1438 }
1439
1440 /* get temperature in millidegrees */
1441 int evergreen_get_temp(struct radeon_device *rdev)
1442 {
1443         u32 temp, toffset;
1444         int actual_temp = 0;
1445
1446         if (rdev->family == CHIP_JUNIPER) {
1447                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1448                         TOFFSET_SHIFT;
1449                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1450                         TS0_ADC_DOUT_SHIFT;
1451
1452                 if (toffset & 0x100)
1453                         actual_temp = temp / 2 - (0x200 - toffset);
1454                 else
1455                         actual_temp = temp / 2 + toffset;
1456
1457                 actual_temp = actual_temp * 1000;
1458
1459         } else {
1460                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1461                         ASIC_T_SHIFT;
1462
1463                 if (temp & 0x400)
1464                         actual_temp = -256;
1465                 else if (temp & 0x200)
1466                         actual_temp = 255;
1467                 else if (temp & 0x100) {
1468                         actual_temp = temp & 0x1ff;
1469                         actual_temp |= ~0x1ff;
1470                 } else
1471                         actual_temp = temp & 0xff;
1472
1473                 actual_temp = (actual_temp * 1000) / 2;
1474         }
1475
1476         return actual_temp;
1477 }
1478
1479 int sumo_get_temp(struct radeon_device *rdev)
1480 {
1481         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1482         int actual_temp = temp - 49;
1483
1484         return actual_temp * 1000;
1485 }
1486
1487 /**
1488  * sumo_pm_init_profile - Initialize power profiles callback.
1489  *
1490  * @rdev: radeon_device pointer
1491  *
1492  * Initialize the power states used in profile mode
1493  * (sumo, trinity, SI).
1494  * Used for profile mode only.
1495  */
1496 void sumo_pm_init_profile(struct radeon_device *rdev)
1497 {
1498         int idx;
1499
1500         /* default */
1501         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1502         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1503         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1504         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1505
1506         /* low,mid sh/mh */
1507         if (rdev->flags & RADEON_IS_MOBILITY)
1508                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1509         else
1510                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1511
1512         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1513         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1514         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1515         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1516
1517         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1518         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1519         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1520         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1521
1522         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1523         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1524         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1525         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1526
1527         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1528         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1529         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1530         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1531
1532         /* high sh/mh */
1533         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1534         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1535         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1536         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1537         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1538                 rdev->pm.power_state[idx].num_clock_modes - 1;
1539
1540         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1541         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1542         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1543         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1544                 rdev->pm.power_state[idx].num_clock_modes - 1;
1545 }
1546
1547 /**
1548  * btc_pm_init_profile - Initialize power profiles callback.
1549  *
1550  * @rdev: radeon_device pointer
1551  *
1552  * Initialize the power states used in profile mode
1553  * (BTC, cayman).
1554  * Used for profile mode only.
1555  */
1556 void btc_pm_init_profile(struct radeon_device *rdev)
1557 {
1558         int idx;
1559
1560         /* default */
1561         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1562         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1563         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1564         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1565         /* starting with BTC, there is one state that is used for both
1566          * MH and SH.  Difference is that we always use the high clock index for
1567          * mclk.
1568          */
1569         if (rdev->flags & RADEON_IS_MOBILITY)
1570                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1571         else
1572                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1573         /* low sh */
1574         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1575         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1576         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1577         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1578         /* mid sh */
1579         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1580         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1581         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1582         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1583         /* high sh */
1584         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1585         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1586         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1587         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1588         /* low mh */
1589         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1590         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1591         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1592         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1593         /* mid mh */
1594         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1595         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1596         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1597         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1598         /* high mh */
1599         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1600         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1601         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1602         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1603 }
1604
1605 /**
1606  * evergreen_pm_misc - set additional pm hw parameters callback.
1607  *
1608  * @rdev: radeon_device pointer
1609  *
1610  * Set non-clock parameters associated with a power state
1611  * (voltage, etc.) (evergreen+).
1612  */
1613 void evergreen_pm_misc(struct radeon_device *rdev)
1614 {
1615         int req_ps_idx = rdev->pm.requested_power_state_index;
1616         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1617         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1618         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1619
1620         if (voltage->type == VOLTAGE_SW) {
1621                 /* 0xff0x are flags rather then an actual voltage */
1622                 if ((voltage->voltage & 0xff00) == 0xff00)
1623                         return;
1624                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1625                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1626                         rdev->pm.current_vddc = voltage->voltage;
1627                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1628                 }
1629
1630                 /* starting with BTC, there is one state that is used for both
1631                  * MH and SH.  Difference is that we always use the high clock index for
1632                  * mclk and vddci.
1633                  */
1634                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1635                     (rdev->family >= CHIP_BARTS) &&
1636                     rdev->pm.active_crtc_count &&
1637                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1638                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1639                         voltage = &rdev->pm.power_state[req_ps_idx].
1640                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1641
1642                 /* 0xff0x are flags rather then an actual voltage */
1643                 if ((voltage->vddci & 0xff00) == 0xff00)
1644                         return;
1645                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1646                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1647                         rdev->pm.current_vddci = voltage->vddci;
1648                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1649                 }
1650         }
1651 }
1652
1653 /**
1654  * evergreen_pm_prepare - pre-power state change callback.
1655  *
1656  * @rdev: radeon_device pointer
1657  *
1658  * Prepare for a power state change (evergreen+).
1659  */
1660 void evergreen_pm_prepare(struct radeon_device *rdev)
1661 {
1662         struct drm_device *ddev = rdev->ddev;
1663         struct drm_crtc *crtc;
1664         struct radeon_crtc *radeon_crtc;
1665         u32 tmp;
1666
1667         /* disable any active CRTCs */
1668         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1669                 radeon_crtc = to_radeon_crtc(crtc);
1670                 if (radeon_crtc->enabled) {
1671                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1672                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1673                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1674                 }
1675         }
1676 }
1677
1678 /**
1679  * evergreen_pm_finish - post-power state change callback.
1680  *
1681  * @rdev: radeon_device pointer
1682  *
1683  * Clean up after a power state change (evergreen+).
1684  */
1685 void evergreen_pm_finish(struct radeon_device *rdev)
1686 {
1687         struct drm_device *ddev = rdev->ddev;
1688         struct drm_crtc *crtc;
1689         struct radeon_crtc *radeon_crtc;
1690         u32 tmp;
1691
1692         /* enable any active CRTCs */
1693         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1694                 radeon_crtc = to_radeon_crtc(crtc);
1695                 if (radeon_crtc->enabled) {
1696                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1697                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1698                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1699                 }
1700         }
1701 }
1702
1703 /**
1704  * evergreen_hpd_sense - hpd sense callback.
1705  *
1706  * @rdev: radeon_device pointer
1707  * @hpd: hpd (hotplug detect) pin
1708  *
1709  * Checks if a digital monitor is connected (evergreen+).
1710  * Returns true if connected, false if not connected.
1711  */
1712 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1713 {
1714         bool connected = false;
1715
1716         switch (hpd) {
1717         case RADEON_HPD_1:
1718                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1719                         connected = true;
1720                 break;
1721         case RADEON_HPD_2:
1722                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1723                         connected = true;
1724                 break;
1725         case RADEON_HPD_3:
1726                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1727                         connected = true;
1728                 break;
1729         case RADEON_HPD_4:
1730                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1731                         connected = true;
1732                 break;
1733         case RADEON_HPD_5:
1734                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1735                         connected = true;
1736                 break;
1737         case RADEON_HPD_6:
1738                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1739                         connected = true;
1740                 break;
1741         default:
1742                 break;
1743         }
1744
1745         return connected;
1746 }
1747
1748 /**
1749  * evergreen_hpd_set_polarity - hpd set polarity callback.
1750  *
1751  * @rdev: radeon_device pointer
1752  * @hpd: hpd (hotplug detect) pin
1753  *
1754  * Set the polarity of the hpd pin (evergreen+).
1755  */
1756 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1757                                 enum radeon_hpd_id hpd)
1758 {
1759         u32 tmp;
1760         bool connected = evergreen_hpd_sense(rdev, hpd);
1761
1762         switch (hpd) {
1763         case RADEON_HPD_1:
1764                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1765                 if (connected)
1766                         tmp &= ~DC_HPDx_INT_POLARITY;
1767                 else
1768                         tmp |= DC_HPDx_INT_POLARITY;
1769                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1770                 break;
1771         case RADEON_HPD_2:
1772                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1773                 if (connected)
1774                         tmp &= ~DC_HPDx_INT_POLARITY;
1775                 else
1776                         tmp |= DC_HPDx_INT_POLARITY;
1777                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1778                 break;
1779         case RADEON_HPD_3:
1780                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1781                 if (connected)
1782                         tmp &= ~DC_HPDx_INT_POLARITY;
1783                 else
1784                         tmp |= DC_HPDx_INT_POLARITY;
1785                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1786                 break;
1787         case RADEON_HPD_4:
1788                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1789                 if (connected)
1790                         tmp &= ~DC_HPDx_INT_POLARITY;
1791                 else
1792                         tmp |= DC_HPDx_INT_POLARITY;
1793                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1794                 break;
1795         case RADEON_HPD_5:
1796                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1797                 if (connected)
1798                         tmp &= ~DC_HPDx_INT_POLARITY;
1799                 else
1800                         tmp |= DC_HPDx_INT_POLARITY;
1801                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1802                         break;
1803         case RADEON_HPD_6:
1804                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1805                 if (connected)
1806                         tmp &= ~DC_HPDx_INT_POLARITY;
1807                 else
1808                         tmp |= DC_HPDx_INT_POLARITY;
1809                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1810                 break;
1811         default:
1812                 break;
1813         }
1814 }
1815
1816 /**
1817  * evergreen_hpd_init - hpd setup callback.
1818  *
1819  * @rdev: radeon_device pointer
1820  *
1821  * Setup the hpd pins used by the card (evergreen+).
1822  * Enable the pin, set the polarity, and enable the hpd interrupts.
1823  */
1824 void evergreen_hpd_init(struct radeon_device *rdev)
1825 {
1826         struct drm_device *dev = rdev->ddev;
1827         struct drm_connector *connector;
1828         unsigned enabled = 0;
1829         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1830                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1831
1832         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1833                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1834
1835                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1836                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1837                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1838                          * aux dp channel on imac and help (but not completely fix)
1839                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1840                          * also avoid interrupt storms during dpms.
1841                          */
1842                         continue;
1843                 }
1844                 switch (radeon_connector->hpd.hpd) {
1845                 case RADEON_HPD_1:
1846                         WREG32(DC_HPD1_CONTROL, tmp);
1847                         break;
1848                 case RADEON_HPD_2:
1849                         WREG32(DC_HPD2_CONTROL, tmp);
1850                         break;
1851                 case RADEON_HPD_3:
1852                         WREG32(DC_HPD3_CONTROL, tmp);
1853                         break;
1854                 case RADEON_HPD_4:
1855                         WREG32(DC_HPD4_CONTROL, tmp);
1856                         break;
1857                 case RADEON_HPD_5:
1858                         WREG32(DC_HPD5_CONTROL, tmp);
1859                         break;
1860                 case RADEON_HPD_6:
1861                         WREG32(DC_HPD6_CONTROL, tmp);
1862                         break;
1863                 default:
1864                         break;
1865                 }
1866                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1867                 enabled |= 1 << radeon_connector->hpd.hpd;
1868         }
1869         radeon_irq_kms_enable_hpd(rdev, enabled);
1870 }
1871
1872 /**
1873  * evergreen_hpd_fini - hpd tear down callback.
1874  *
1875  * @rdev: radeon_device pointer
1876  *
1877  * Tear down the hpd pins used by the card (evergreen+).
1878  * Disable the hpd interrupts.
1879  */
1880 void evergreen_hpd_fini(struct radeon_device *rdev)
1881 {
1882         struct drm_device *dev = rdev->ddev;
1883         struct drm_connector *connector;
1884         unsigned disabled = 0;
1885
1886         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1887                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1888                 switch (radeon_connector->hpd.hpd) {
1889                 case RADEON_HPD_1:
1890                         WREG32(DC_HPD1_CONTROL, 0);
1891                         break;
1892                 case RADEON_HPD_2:
1893                         WREG32(DC_HPD2_CONTROL, 0);
1894                         break;
1895                 case RADEON_HPD_3:
1896                         WREG32(DC_HPD3_CONTROL, 0);
1897                         break;
1898                 case RADEON_HPD_4:
1899                         WREG32(DC_HPD4_CONTROL, 0);
1900                         break;
1901                 case RADEON_HPD_5:
1902                         WREG32(DC_HPD5_CONTROL, 0);
1903                         break;
1904                 case RADEON_HPD_6:
1905                         WREG32(DC_HPD6_CONTROL, 0);
1906                         break;
1907                 default:
1908                         break;
1909                 }
1910                 disabled |= 1 << radeon_connector->hpd.hpd;
1911         }
1912         radeon_irq_kms_disable_hpd(rdev, disabled);
1913 }
1914
1915 /* watermark setup */
1916
1917 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1918                                         struct radeon_crtc *radeon_crtc,
1919                                         struct drm_display_mode *mode,
1920                                         struct drm_display_mode *other_mode)
1921 {
1922         u32 tmp, buffer_alloc, i;
1923         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1924         /*
1925          * Line Buffer Setup
1926          * There are 3 line buffers, each one shared by 2 display controllers.
1927          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1928          * the display controllers.  The paritioning is done via one of four
1929          * preset allocations specified in bits 2:0:
1930          * first display controller
1931          *  0 - first half of lb (3840 * 2)
1932          *  1 - first 3/4 of lb (5760 * 2)
1933          *  2 - whole lb (7680 * 2), other crtc must be disabled
1934          *  3 - first 1/4 of lb (1920 * 2)
1935          * second display controller
1936          *  4 - second half of lb (3840 * 2)
1937          *  5 - second 3/4 of lb (5760 * 2)
1938          *  6 - whole lb (7680 * 2), other crtc must be disabled
1939          *  7 - last 1/4 of lb (1920 * 2)
1940          */
1941         /* this can get tricky if we have two large displays on a paired group
1942          * of crtcs.  Ideally for multiple large displays we'd assign them to
1943          * non-linked crtcs for maximum line buffer allocation.
1944          */
1945         if (radeon_crtc->base.enabled && mode) {
1946                 if (other_mode) {
1947                         tmp = 0; /* 1/2 */
1948                         buffer_alloc = 1;
1949                 } else {
1950                         tmp = 2; /* whole */
1951                         buffer_alloc = 2;
1952                 }
1953         } else {
1954                 tmp = 0;
1955                 buffer_alloc = 0;
1956         }
1957
1958         /* second controller of the pair uses second half of the lb */
1959         if (radeon_crtc->crtc_id % 2)
1960                 tmp += 4;
1961         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1962
1963         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1964                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1965                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1966                 for (i = 0; i < rdev->usec_timeout; i++) {
1967                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1968                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1969                                 break;
1970                         udelay(1);
1971                 }
1972         }
1973
1974         if (radeon_crtc->base.enabled && mode) {
1975                 switch (tmp) {
1976                 case 0:
1977                 case 4:
1978                 default:
1979                         if (ASIC_IS_DCE5(rdev))
1980                                 return 4096 * 2;
1981                         else
1982                                 return 3840 * 2;
1983                 case 1:
1984                 case 5:
1985                         if (ASIC_IS_DCE5(rdev))
1986                                 return 6144 * 2;
1987                         else
1988                                 return 5760 * 2;
1989                 case 2:
1990                 case 6:
1991                         if (ASIC_IS_DCE5(rdev))
1992                                 return 8192 * 2;
1993                         else
1994                                 return 7680 * 2;
1995                 case 3:
1996                 case 7:
1997                         if (ASIC_IS_DCE5(rdev))
1998                                 return 2048 * 2;
1999                         else
2000                                 return 1920 * 2;
2001                 }
2002         }
2003
2004         /* controller not enabled, so no lb used */
2005         return 0;
2006 }
2007
2008 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2009 {
2010         u32 tmp = RREG32(MC_SHARED_CHMAP);
2011
2012         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2013         case 0:
2014         default:
2015                 return 1;
2016         case 1:
2017                 return 2;
2018         case 2:
2019                 return 4;
2020         case 3:
2021                 return 8;
2022         }
2023 }
2024
2025 struct evergreen_wm_params {
2026         u32 dram_channels; /* number of dram channels */
2027         u32 yclk;          /* bandwidth per dram data pin in kHz */
2028         u32 sclk;          /* engine clock in kHz */
2029         u32 disp_clk;      /* display clock in kHz */
2030         u32 src_width;     /* viewport width */
2031         u32 active_time;   /* active display time in ns */
2032         u32 blank_time;    /* blank time in ns */
2033         bool interlaced;    /* mode is interlaced */
2034         fixed20_12 vsc;    /* vertical scale ratio */
2035         u32 num_heads;     /* number of active crtcs */
2036         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2037         u32 lb_size;       /* line buffer allocated to pipe */
2038         u32 vtaps;         /* vertical scaler taps */
2039 };
2040
2041 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2042 {
2043         /* Calculate DRAM Bandwidth and the part allocated to display. */
2044         fixed20_12 dram_efficiency; /* 0.7 */
2045         fixed20_12 yclk, dram_channels, bandwidth;
2046         fixed20_12 a;
2047
2048         a.full = dfixed_const(1000);
2049         yclk.full = dfixed_const(wm->yclk);
2050         yclk.full = dfixed_div(yclk, a);
2051         dram_channels.full = dfixed_const(wm->dram_channels * 4);
2052         a.full = dfixed_const(10);
2053         dram_efficiency.full = dfixed_const(7);
2054         dram_efficiency.full = dfixed_div(dram_efficiency, a);
2055         bandwidth.full = dfixed_mul(dram_channels, yclk);
2056         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2057
2058         return dfixed_trunc(bandwidth);
2059 }
2060
2061 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2062 {
2063         /* Calculate DRAM Bandwidth and the part allocated to display. */
2064         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2065         fixed20_12 yclk, dram_channels, bandwidth;
2066         fixed20_12 a;
2067
2068         a.full = dfixed_const(1000);
2069         yclk.full = dfixed_const(wm->yclk);
2070         yclk.full = dfixed_div(yclk, a);
2071         dram_channels.full = dfixed_const(wm->dram_channels * 4);
2072         a.full = dfixed_const(10);
2073         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2074         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2075         bandwidth.full = dfixed_mul(dram_channels, yclk);
2076         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2077
2078         return dfixed_trunc(bandwidth);
2079 }
2080
2081 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2082 {
2083         /* Calculate the display Data return Bandwidth */
2084         fixed20_12 return_efficiency; /* 0.8 */
2085         fixed20_12 sclk, bandwidth;
2086         fixed20_12 a;
2087
2088         a.full = dfixed_const(1000);
2089         sclk.full = dfixed_const(wm->sclk);
2090         sclk.full = dfixed_div(sclk, a);
2091         a.full = dfixed_const(10);
2092         return_efficiency.full = dfixed_const(8);
2093         return_efficiency.full = dfixed_div(return_efficiency, a);
2094         a.full = dfixed_const(32);
2095         bandwidth.full = dfixed_mul(a, sclk);
2096         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2097
2098         return dfixed_trunc(bandwidth);
2099 }
2100
2101 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2102 {
2103         /* Calculate the DMIF Request Bandwidth */
2104         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2105         fixed20_12 disp_clk, bandwidth;
2106         fixed20_12 a;
2107
2108         a.full = dfixed_const(1000);
2109         disp_clk.full = dfixed_const(wm->disp_clk);
2110         disp_clk.full = dfixed_div(disp_clk, a);
2111         a.full = dfixed_const(10);
2112         disp_clk_request_efficiency.full = dfixed_const(8);
2113         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2114         a.full = dfixed_const(32);
2115         bandwidth.full = dfixed_mul(a, disp_clk);
2116         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2117
2118         return dfixed_trunc(bandwidth);
2119 }
2120
2121 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2122 {
2123         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2124         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2125         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2126         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2127
2128         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2129 }
2130
2131 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2132 {
2133         /* Calculate the display mode Average Bandwidth
2134          * DisplayMode should contain the source and destination dimensions,
2135          * timing, etc.
2136          */
2137         fixed20_12 bpp;
2138         fixed20_12 line_time;
2139         fixed20_12 src_width;
2140         fixed20_12 bandwidth;
2141         fixed20_12 a;
2142
2143         a.full = dfixed_const(1000);
2144         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2145         line_time.full = dfixed_div(line_time, a);
2146         bpp.full = dfixed_const(wm->bytes_per_pixel);
2147         src_width.full = dfixed_const(wm->src_width);
2148         bandwidth.full = dfixed_mul(src_width, bpp);
2149         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2150         bandwidth.full = dfixed_div(bandwidth, line_time);
2151
2152         return dfixed_trunc(bandwidth);
2153 }
2154
2155 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2156 {
2157         /* First calcualte the latency in ns */
2158         u32 mc_latency = 2000; /* 2000 ns. */
2159         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2160         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2161         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2162         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2163         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2164                 (wm->num_heads * cursor_line_pair_return_time);
2165         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2166         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2167         fixed20_12 a, b, c;
2168
2169         if (wm->num_heads == 0)
2170                 return 0;
2171
2172         a.full = dfixed_const(2);
2173         b.full = dfixed_const(1);
2174         if ((wm->vsc.full > a.full) ||
2175             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2176             (wm->vtaps >= 5) ||
2177             ((wm->vsc.full >= a.full) && wm->interlaced))
2178                 max_src_lines_per_dst_line = 4;
2179         else
2180                 max_src_lines_per_dst_line = 2;
2181
2182         a.full = dfixed_const(available_bandwidth);
2183         b.full = dfixed_const(wm->num_heads);
2184         a.full = dfixed_div(a, b);
2185
2186         b.full = dfixed_const(1000);
2187         c.full = dfixed_const(wm->disp_clk);
2188         b.full = dfixed_div(c, b);
2189         c.full = dfixed_const(wm->bytes_per_pixel);
2190         b.full = dfixed_mul(b, c);
2191
2192         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2193
2194         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2195         b.full = dfixed_const(1000);
2196         c.full = dfixed_const(lb_fill_bw);
2197         b.full = dfixed_div(c, b);
2198         a.full = dfixed_div(a, b);
2199         line_fill_time = dfixed_trunc(a);
2200
2201         if (line_fill_time < wm->active_time)
2202                 return latency;
2203         else
2204                 return latency + (line_fill_time - wm->active_time);
2205
2206 }
2207
2208 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2209 {
2210         if (evergreen_average_bandwidth(wm) <=
2211             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2212                 return true;
2213         else
2214                 return false;
2215 };
2216
2217 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2218 {
2219         if (evergreen_average_bandwidth(wm) <=
2220             (evergreen_available_bandwidth(wm) / wm->num_heads))
2221                 return true;
2222         else
2223                 return false;
2224 };
2225
2226 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2227 {
2228         u32 lb_partitions = wm->lb_size / wm->src_width;
2229         u32 line_time = wm->active_time + wm->blank_time;
2230         u32 latency_tolerant_lines;
2231         u32 latency_hiding;
2232         fixed20_12 a;
2233
2234         a.full = dfixed_const(1);
2235         if (wm->vsc.full > a.full)
2236                 latency_tolerant_lines = 1;
2237         else {
2238                 if (lb_partitions <= (wm->vtaps + 1))
2239                         latency_tolerant_lines = 1;
2240                 else
2241                         latency_tolerant_lines = 2;
2242         }
2243
2244         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2245
2246         if (evergreen_latency_watermark(wm) <= latency_hiding)
2247                 return true;
2248         else
2249                 return false;
2250 }
2251
2252 static void evergreen_program_watermarks(struct radeon_device *rdev,
2253                                          struct radeon_crtc *radeon_crtc,
2254                                          u32 lb_size, u32 num_heads)
2255 {
2256         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2257         struct evergreen_wm_params wm_low, wm_high;
2258         u32 dram_channels;
2259         u32 pixel_period;
2260         u32 line_time = 0;
2261         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2262         u32 priority_a_mark = 0, priority_b_mark = 0;
2263         u32 priority_a_cnt = PRIORITY_OFF;
2264         u32 priority_b_cnt = PRIORITY_OFF;
2265         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2266         u32 tmp, arb_control3;
2267         fixed20_12 a, b, c;
2268
2269         if (radeon_crtc->base.enabled && num_heads && mode) {
2270                 pixel_period = 1000000 / (u32)mode->clock;
2271                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2272                 priority_a_cnt = 0;
2273                 priority_b_cnt = 0;
2274                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2275
2276                 /* watermark for high clocks */
2277                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2278                         wm_high.yclk =
2279                                 radeon_dpm_get_mclk(rdev, false) * 10;
2280                         wm_high.sclk =
2281                                 radeon_dpm_get_sclk(rdev, false) * 10;
2282                 } else {
2283                         wm_high.yclk = rdev->pm.current_mclk * 10;
2284                         wm_high.sclk = rdev->pm.current_sclk * 10;
2285                 }
2286
2287                 wm_high.disp_clk = mode->clock;
2288                 wm_high.src_width = mode->crtc_hdisplay;
2289                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2290                 wm_high.blank_time = line_time - wm_high.active_time;
2291                 wm_high.interlaced = false;
2292                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2293                         wm_high.interlaced = true;
2294                 wm_high.vsc = radeon_crtc->vsc;
2295                 wm_high.vtaps = 1;
2296                 if (radeon_crtc->rmx_type != RMX_OFF)
2297                         wm_high.vtaps = 2;
2298                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2299                 wm_high.lb_size = lb_size;
2300                 wm_high.dram_channels = dram_channels;
2301                 wm_high.num_heads = num_heads;
2302
2303                 /* watermark for low clocks */
2304                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2305                         wm_low.yclk =
2306                                 radeon_dpm_get_mclk(rdev, true) * 10;
2307                         wm_low.sclk =
2308                                 radeon_dpm_get_sclk(rdev, true) * 10;
2309                 } else {
2310                         wm_low.yclk = rdev->pm.current_mclk * 10;
2311                         wm_low.sclk = rdev->pm.current_sclk * 10;
2312                 }
2313
2314                 wm_low.disp_clk = mode->clock;
2315                 wm_low.src_width = mode->crtc_hdisplay;
2316                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2317                 wm_low.blank_time = line_time - wm_low.active_time;
2318                 wm_low.interlaced = false;
2319                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2320                         wm_low.interlaced = true;
2321                 wm_low.vsc = radeon_crtc->vsc;
2322                 wm_low.vtaps = 1;
2323                 if (radeon_crtc->rmx_type != RMX_OFF)
2324                         wm_low.vtaps = 2;
2325                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2326                 wm_low.lb_size = lb_size;
2327                 wm_low.dram_channels = dram_channels;
2328                 wm_low.num_heads = num_heads;
2329
2330                 /* set for high clocks */
2331                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2332                 /* set for low clocks */
2333                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2334
2335                 /* possibly force display priority to high */
2336                 /* should really do this at mode validation time... */
2337                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2338                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2339                     !evergreen_check_latency_hiding(&wm_high) ||
2340                     (rdev->disp_priority == 2)) {
2341                         DRM_DEBUG_KMS("force priority a to high\n");
2342                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2343                 }
2344                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2345                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2346                     !evergreen_check_latency_hiding(&wm_low) ||
2347                     (rdev->disp_priority == 2)) {
2348                         DRM_DEBUG_KMS("force priority b to high\n");
2349                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2350                 }
2351
2352                 a.full = dfixed_const(1000);
2353                 b.full = dfixed_const(mode->clock);
2354                 b.full = dfixed_div(b, a);
2355                 c.full = dfixed_const(latency_watermark_a);
2356                 c.full = dfixed_mul(c, b);
2357                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2358                 c.full = dfixed_div(c, a);
2359                 a.full = dfixed_const(16);
2360                 c.full = dfixed_div(c, a);
2361                 priority_a_mark = dfixed_trunc(c);
2362                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2363
2364                 a.full = dfixed_const(1000);
2365                 b.full = dfixed_const(mode->clock);
2366                 b.full = dfixed_div(b, a);
2367                 c.full = dfixed_const(latency_watermark_b);
2368                 c.full = dfixed_mul(c, b);
2369                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2370                 c.full = dfixed_div(c, a);
2371                 a.full = dfixed_const(16);
2372                 c.full = dfixed_div(c, a);
2373                 priority_b_mark = dfixed_trunc(c);
2374                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2375
2376                 /* Save number of lines the linebuffer leads before the scanout */
2377                 radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2378         }
2379
2380         /* select wm A */
2381         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2382         tmp = arb_control3;
2383         tmp &= ~LATENCY_WATERMARK_MASK(3);
2384         tmp |= LATENCY_WATERMARK_MASK(1);
2385         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2386         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2387                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2388                 LATENCY_HIGH_WATERMARK(line_time)));
2389         /* select wm B */
2390         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2391         tmp &= ~LATENCY_WATERMARK_MASK(3);
2392         tmp |= LATENCY_WATERMARK_MASK(2);
2393         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2394         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2395                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2396                 LATENCY_HIGH_WATERMARK(line_time)));
2397         /* restore original selection */
2398         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2399
2400         /* write the priority marks */
2401         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2402         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2403
2404         /* save values for DPM */
2405         radeon_crtc->line_time = line_time;
2406         radeon_crtc->wm_high = latency_watermark_a;
2407         radeon_crtc->wm_low = latency_watermark_b;
2408 }
2409
2410 /**
2411  * evergreen_bandwidth_update - update display watermarks callback.
2412  *
2413  * @rdev: radeon_device pointer
2414  *
2415  * Update the display watermarks based on the requested mode(s)
2416  * (evergreen+).
2417  */
2418 void evergreen_bandwidth_update(struct radeon_device *rdev)
2419 {
2420         struct drm_display_mode *mode0 = NULL;
2421         struct drm_display_mode *mode1 = NULL;
2422         u32 num_heads = 0, lb_size;
2423         int i;
2424
2425         if (!rdev->mode_info.mode_config_initialized)
2426                 return;
2427
2428         radeon_update_display_priority(rdev);
2429
2430         for (i = 0; i < rdev->num_crtc; i++) {
2431                 if (rdev->mode_info.crtcs[i]->base.enabled)
2432                         num_heads++;
2433         }
2434         for (i = 0; i < rdev->num_crtc; i += 2) {
2435                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2436                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2437                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2438                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2439                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2440                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2441         }
2442 }
2443
2444 /**
2445  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2446  *
2447  * @rdev: radeon_device pointer
2448  *
2449  * Wait for the MC (memory controller) to be idle.
2450  * (evergreen+).
2451  * Returns 0 if the MC is idle, -1 if not.
2452  */
2453 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2454 {
2455         unsigned i;
2456         u32 tmp;
2457
2458         for (i = 0; i < rdev->usec_timeout; i++) {
2459                 /* read MC_STATUS */
2460                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2461                 if (!tmp)
2462                         return 0;
2463                 udelay(1);
2464         }
2465         return -1;
2466 }
2467
2468 /*
2469  * GART
2470  */
2471 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2472 {
2473         unsigned i;
2474         u32 tmp;
2475
2476         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2477
2478         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2479         for (i = 0; i < rdev->usec_timeout; i++) {
2480                 /* read MC_STATUS */
2481                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2482                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2483                 if (tmp == 2) {
2484                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2485                         return;
2486                 }
2487                 if (tmp) {
2488                         return;
2489                 }
2490                 udelay(1);
2491         }
2492 }
2493
2494 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2495 {
2496         u32 tmp;
2497         int r;
2498
2499         if (rdev->gart.robj == NULL) {
2500                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2501                 return -EINVAL;
2502         }
2503         r = radeon_gart_table_vram_pin(rdev);
2504         if (r)
2505                 return r;
2506         /* Setup L2 cache */
2507         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2508                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2509                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2510         WREG32(VM_L2_CNTL2, 0);
2511         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2512         /* Setup TLB control */
2513         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2514                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2515                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2516                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2517         if (rdev->flags & RADEON_IS_IGP) {
2518                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2519                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2520                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2521         } else {
2522                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2523                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2524                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2525                 if ((rdev->family == CHIP_JUNIPER) ||
2526                     (rdev->family == CHIP_CYPRESS) ||
2527                     (rdev->family == CHIP_HEMLOCK) ||
2528                     (rdev->family == CHIP_BARTS))
2529                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2530         }
2531         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2532         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2533         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2534         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2535         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2536         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2537         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2538         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2539                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2540         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2541                         (u32)(rdev->dummy_page.addr >> 12));
2542         WREG32(VM_CONTEXT1_CNTL, 0);
2543
2544         evergreen_pcie_gart_tlb_flush(rdev);
2545         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2546                  (unsigned)(rdev->mc.gtt_size >> 20),
2547                  (unsigned long long)rdev->gart.table_addr);
2548         rdev->gart.ready = true;
2549         return 0;
2550 }
2551
2552 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2553 {
2554         u32 tmp;
2555
2556         /* Disable all tables */
2557         WREG32(VM_CONTEXT0_CNTL, 0);
2558         WREG32(VM_CONTEXT1_CNTL, 0);
2559
2560         /* Setup L2 cache */
2561         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2562                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2563         WREG32(VM_L2_CNTL2, 0);
2564         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2565         /* Setup TLB control */
2566         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2567         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2568         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2569         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2570         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2571         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2572         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2573         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2574         radeon_gart_table_vram_unpin(rdev);
2575 }
2576
2577 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2578 {
2579         evergreen_pcie_gart_disable(rdev);
2580         radeon_gart_table_vram_free(rdev);
2581         radeon_gart_fini(rdev);
2582 }
2583
2584
2585 static void evergreen_agp_enable(struct radeon_device *rdev)
2586 {
2587         u32 tmp;
2588
2589         /* Setup L2 cache */
2590         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2591                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2592                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2593         WREG32(VM_L2_CNTL2, 0);
2594         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2595         /* Setup TLB control */
2596         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2597                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2598                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2599                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2600         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2601         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2602         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2603         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2604         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2605         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2606         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2607         WREG32(VM_CONTEXT0_CNTL, 0);
2608         WREG32(VM_CONTEXT1_CNTL, 0);
2609 }
2610
2611 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2612 {
2613         u32 crtc_enabled, tmp, frame_count, blackout;
2614         int i, j;
2615
2616         if (!ASIC_IS_NODCE(rdev)) {
2617                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2618                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2619
2620                 /* disable VGA render */
2621                 WREG32(VGA_RENDER_CONTROL, 0);
2622         }
2623         /* blank the display controllers */
2624         for (i = 0; i < rdev->num_crtc; i++) {
2625                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2626                 if (crtc_enabled) {
2627                         save->crtc_enabled[i] = true;
2628                         if (ASIC_IS_DCE6(rdev)) {
2629                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2630                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2631                                         radeon_wait_for_vblank(rdev, i);
2632                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2633                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2634                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2635                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2636                                 }
2637                         } else {
2638                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2639                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2640                                         radeon_wait_for_vblank(rdev, i);
2641                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2642                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2643                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2644                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2645                                 }
2646                         }
2647                         /* wait for the next frame */
2648                         frame_count = radeon_get_vblank_counter(rdev, i);
2649                         for (j = 0; j < rdev->usec_timeout; j++) {
2650                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2651                                         break;
2652                                 udelay(1);
2653                         }
2654
2655                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2656                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2657                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2658                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2659                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2660                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2661                         save->crtc_enabled[i] = false;
2662                         /* ***** */
2663                 } else {
2664                         save->crtc_enabled[i] = false;
2665                 }
2666         }
2667
2668         radeon_mc_wait_for_idle(rdev);
2669
2670         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2671         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2672                 /* Block CPU access */
2673                 WREG32(BIF_FB_EN, 0);
2674                 /* blackout the MC */
2675                 blackout &= ~BLACKOUT_MODE_MASK;
2676                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2677         }
2678         /* wait for the MC to settle */
2679         udelay(100);
2680
2681         /* lock double buffered regs */
2682         for (i = 0; i < rdev->num_crtc; i++) {
2683                 if (save->crtc_enabled[i]) {
2684                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2685                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2686                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2687                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2688                         }
2689                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2690                         if (!(tmp & 1)) {
2691                                 tmp |= 1;
2692                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2693                         }
2694                 }
2695         }
2696 }
2697
2698 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2699 {
2700         u32 tmp, frame_count;
2701         int i, j;
2702
2703         /* update crtc base addresses */
2704         for (i = 0; i < rdev->num_crtc; i++) {
2705                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2706                        upper_32_bits(rdev->mc.vram_start));
2707                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2708                        upper_32_bits(rdev->mc.vram_start));
2709                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2710                        (u32)rdev->mc.vram_start);
2711                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2712                        (u32)rdev->mc.vram_start);
2713         }
2714
2715         if (!ASIC_IS_NODCE(rdev)) {
2716                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2717                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2718         }
2719
2720         /* unlock regs and wait for update */
2721         for (i = 0; i < rdev->num_crtc; i++) {
2722                 if (save->crtc_enabled[i]) {
2723                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2724                         if ((tmp & 0x7) != 3) {
2725                                 tmp &= ~0x7;
2726                                 tmp |= 0x3;
2727                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2728                         }
2729                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2730                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2731                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2732                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2733                         }
2734                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2735                         if (tmp & 1) {
2736                                 tmp &= ~1;
2737                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2738                         }
2739                         for (j = 0; j < rdev->usec_timeout; j++) {
2740                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2741                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2742                                         break;
2743                                 udelay(1);
2744                         }
2745                 }
2746         }
2747
2748         /* unblackout the MC */
2749         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2750         tmp &= ~BLACKOUT_MODE_MASK;
2751         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2752         /* allow CPU access */
2753         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2754
2755         for (i = 0; i < rdev->num_crtc; i++) {
2756                 if (save->crtc_enabled[i]) {
2757                         if (ASIC_IS_DCE6(rdev)) {
2758                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2759                                 tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2760                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2761                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2762                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2763                         } else {
2764                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2765                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2766                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2767                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2768                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2769                         }
2770                         /* wait for the next frame */
2771                         frame_count = radeon_get_vblank_counter(rdev, i);
2772                         for (j = 0; j < rdev->usec_timeout; j++) {
2773                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2774                                         break;
2775                                 udelay(1);
2776                         }
2777                 }
2778         }
2779         if (!ASIC_IS_NODCE(rdev)) {
2780                 /* Unlock vga access */
2781                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2782                 mdelay(1);
2783                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2784         }
2785 }
2786
2787 void evergreen_mc_program(struct radeon_device *rdev)
2788 {
2789         struct evergreen_mc_save save;
2790         u32 tmp;
2791         int i, j;
2792
2793         /* Initialize HDP */
2794         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2795                 WREG32((0x2c14 + j), 0x00000000);
2796                 WREG32((0x2c18 + j), 0x00000000);
2797                 WREG32((0x2c1c + j), 0x00000000);
2798                 WREG32((0x2c20 + j), 0x00000000);
2799                 WREG32((0x2c24 + j), 0x00000000);
2800         }
2801         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2802
2803         evergreen_mc_stop(rdev, &save);
2804         if (evergreen_mc_wait_for_idle(rdev)) {
2805                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2806         }
2807         /* Lockout access through VGA aperture*/
2808         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2809         /* Update configuration */
2810         if (rdev->flags & RADEON_IS_AGP) {
2811                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2812                         /* VRAM before AGP */
2813                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2814                                 rdev->mc.vram_start >> 12);
2815                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2816                                 rdev->mc.gtt_end >> 12);
2817                 } else {
2818                         /* VRAM after AGP */
2819                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2820                                 rdev->mc.gtt_start >> 12);
2821                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2822                                 rdev->mc.vram_end >> 12);
2823                 }
2824         } else {
2825                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2826                         rdev->mc.vram_start >> 12);
2827                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2828                         rdev->mc.vram_end >> 12);
2829         }
2830         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2831         /* llano/ontario only */
2832         if ((rdev->family == CHIP_PALM) ||
2833             (rdev->family == CHIP_SUMO) ||
2834             (rdev->family == CHIP_SUMO2)) {
2835                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2836                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2837                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2838                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2839         }
2840         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2841         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2842         WREG32(MC_VM_FB_LOCATION, tmp);
2843         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2844         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2845         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2846         if (rdev->flags & RADEON_IS_AGP) {
2847                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2848                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2849                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2850         } else {
2851                 WREG32(MC_VM_AGP_BASE, 0);
2852                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2853                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2854         }
2855         if (evergreen_mc_wait_for_idle(rdev)) {
2856                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2857         }
2858         evergreen_mc_resume(rdev, &save);
2859         /* we need to own VRAM, so turn off the VGA renderer here
2860          * to stop it overwriting our objects */
2861         rv515_vga_render_disable(rdev);
2862 }
2863
2864 /*
2865  * CP.
2866  */
2867 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2868 {
2869         struct radeon_ring *ring = &rdev->ring[ib->ring];
2870         u32 next_rptr;
2871
2872         /* set to DX10/11 mode */
2873         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2874         radeon_ring_write(ring, 1);
2875
2876         if (ring->rptr_save_reg) {
2877                 next_rptr = ring->wptr + 3 + 4;
2878                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2879                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2880                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2881                 radeon_ring_write(ring, next_rptr);
2882         } else if (rdev->wb.enabled) {
2883                 next_rptr = ring->wptr + 5 + 4;
2884                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2885                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2886                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2887                 radeon_ring_write(ring, next_rptr);
2888                 radeon_ring_write(ring, 0);
2889         }
2890
2891         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2892         radeon_ring_write(ring,
2893 #ifdef __BIG_ENDIAN
2894                           (2 << 0) |
2895 #endif
2896                           (ib->gpu_addr & 0xFFFFFFFC));
2897         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2898         radeon_ring_write(ring, ib->length_dw);
2899 }
2900
2901
2902 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2903 {
2904         const __be32 *fw_data;
2905         int i;
2906
2907         if (!rdev->me_fw || !rdev->pfp_fw)
2908                 return -EINVAL;
2909
2910         r700_cp_stop(rdev);
2911         WREG32(CP_RB_CNTL,
2912 #ifdef __BIG_ENDIAN
2913                BUF_SWAP_32BIT |
2914 #endif
2915                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2916
2917         fw_data = (const __be32 *)rdev->pfp_fw->data;
2918         WREG32(CP_PFP_UCODE_ADDR, 0);
2919         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2920                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2921         WREG32(CP_PFP_UCODE_ADDR, 0);
2922
2923         fw_data = (const __be32 *)rdev->me_fw->data;
2924         WREG32(CP_ME_RAM_WADDR, 0);
2925         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2926                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2927
2928         WREG32(CP_PFP_UCODE_ADDR, 0);
2929         WREG32(CP_ME_RAM_WADDR, 0);
2930         WREG32(CP_ME_RAM_RADDR, 0);
2931         return 0;
2932 }
2933
2934 static int evergreen_cp_start(struct radeon_device *rdev)
2935 {
2936         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2937         int r, i;
2938         uint32_t cp_me;
2939
2940         r = radeon_ring_lock(rdev, ring, 7);
2941         if (r) {
2942                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2943                 return r;
2944         }
2945         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2946         radeon_ring_write(ring, 0x1);
2947         radeon_ring_write(ring, 0x0);
2948         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2949         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2950         radeon_ring_write(ring, 0);
2951         radeon_ring_write(ring, 0);
2952         radeon_ring_unlock_commit(rdev, ring, false);
2953
2954         cp_me = 0xff;
2955         WREG32(CP_ME_CNTL, cp_me);
2956
2957         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2958         if (r) {
2959                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2960                 return r;
2961         }
2962
2963         /* setup clear context state */
2964         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2965         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2966
2967         for (i = 0; i < evergreen_default_size; i++)
2968                 radeon_ring_write(ring, evergreen_default_state[i]);
2969
2970         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2971         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2972
2973         /* set clear context state */
2974         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2975         radeon_ring_write(ring, 0);
2976
2977         /* SQ_VTX_BASE_VTX_LOC */
2978         radeon_ring_write(ring, 0xc0026f00);
2979         radeon_ring_write(ring, 0x00000000);
2980         radeon_ring_write(ring, 0x00000000);
2981         radeon_ring_write(ring, 0x00000000);
2982
2983         /* Clear consts */
2984         radeon_ring_write(ring, 0xc0036f00);
2985         radeon_ring_write(ring, 0x00000bc4);
2986         radeon_ring_write(ring, 0xffffffff);
2987         radeon_ring_write(ring, 0xffffffff);
2988         radeon_ring_write(ring, 0xffffffff);
2989
2990         radeon_ring_write(ring, 0xc0026900);
2991         radeon_ring_write(ring, 0x00000316);
2992         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2993         radeon_ring_write(ring, 0x00000010); /*  */
2994
2995         radeon_ring_unlock_commit(rdev, ring, false);
2996
2997         return 0;
2998 }
2999
3000 static int evergreen_cp_resume(struct radeon_device *rdev)
3001 {
3002         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3003         u32 tmp;
3004         u32 rb_bufsz;
3005         int r;
3006
3007         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3008         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3009                                  SOFT_RESET_PA |
3010                                  SOFT_RESET_SH |
3011                                  SOFT_RESET_VGT |
3012                                  SOFT_RESET_SPI |
3013                                  SOFT_RESET_SX));
3014         RREG32(GRBM_SOFT_RESET);
3015         mdelay(15);
3016         WREG32(GRBM_SOFT_RESET, 0);
3017         RREG32(GRBM_SOFT_RESET);
3018
3019         /* Set ring buffer size */
3020         rb_bufsz = order_base_2(ring->ring_size / 8);
3021         tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3022 #ifdef __BIG_ENDIAN
3023         tmp |= BUF_SWAP_32BIT;
3024 #endif
3025         WREG32(CP_RB_CNTL, tmp);
3026         WREG32(CP_SEM_WAIT_TIMER, 0x0);
3027         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3028
3029         /* Set the write pointer delay */
3030         WREG32(CP_RB_WPTR_DELAY, 0);
3031
3032         /* Initialize the ring buffer's read and write pointers */
3033         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3034         WREG32(CP_RB_RPTR_WR, 0);
3035         ring->wptr = 0;
3036         WREG32(CP_RB_WPTR, ring->wptr);
3037
3038         /* set the wb address whether it's enabled or not */
3039         WREG32(CP_RB_RPTR_ADDR,
3040                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3041         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3042         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3043
3044         if (rdev->wb.enabled)
3045                 WREG32(SCRATCH_UMSK, 0xff);
3046         else {
3047                 tmp |= RB_NO_UPDATE;
3048                 WREG32(SCRATCH_UMSK, 0);
3049         }
3050
3051         mdelay(1);
3052         WREG32(CP_RB_CNTL, tmp);
3053
3054         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3055         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3056
3057         evergreen_cp_start(rdev);
3058         ring->ready = true;
3059         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3060         if (r) {
3061                 ring->ready = false;
3062                 return r;
3063         }
3064         return 0;
3065 }
3066
3067 /*
3068  * Core functions
3069  */
3070 static void evergreen_gpu_init(struct radeon_device *rdev)
3071 {
3072         u32 gb_addr_config;
3073         u32 mc_shared_chmap, mc_arb_ramcfg;
3074         u32 sx_debug_1;
3075         u32 smx_dc_ctl0;
3076         u32 sq_config;
3077         u32 sq_lds_resource_mgmt;
3078         u32 sq_gpr_resource_mgmt_1;
3079         u32 sq_gpr_resource_mgmt_2;
3080         u32 sq_gpr_resource_mgmt_3;
3081         u32 sq_thread_resource_mgmt;
3082         u32 sq_thread_resource_mgmt_2;
3083         u32 sq_stack_resource_mgmt_1;
3084         u32 sq_stack_resource_mgmt_2;
3085         u32 sq_stack_resource_mgmt_3;
3086         u32 vgt_cache_invalidation;
3087         u32 hdp_host_path_cntl, tmp;
3088         u32 disabled_rb_mask;
3089         int i, j, ps_thread_count;
3090
3091         switch (rdev->family) {
3092         case CHIP_CYPRESS:
3093         case CHIP_HEMLOCK:
3094                 rdev->config.evergreen.num_ses = 2;
3095                 rdev->config.evergreen.max_pipes = 4;
3096                 rdev->config.evergreen.max_tile_pipes = 8;
3097                 rdev->config.evergreen.max_simds = 10;
3098                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3099                 rdev->config.evergreen.max_gprs = 256;
3100                 rdev->config.evergreen.max_threads = 248;
3101                 rdev->config.evergreen.max_gs_threads = 32;
3102                 rdev->config.evergreen.max_stack_entries = 512;
3103                 rdev->config.evergreen.sx_num_of_sets = 4;
3104                 rdev->config.evergreen.sx_max_export_size = 256;
3105                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3106                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3107                 rdev->config.evergreen.max_hw_contexts = 8;
3108                 rdev->config.evergreen.sq_num_cf_insts = 2;
3109
3110                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3111                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3112                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3113                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3114                 break;
3115         case CHIP_JUNIPER:
3116                 rdev->config.evergreen.num_ses = 1;
3117                 rdev->config.evergreen.max_pipes = 4;
3118                 rdev->config.evergreen.max_tile_pipes = 4;
3119                 rdev->config.evergreen.max_simds = 10;
3120                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3121                 rdev->config.evergreen.max_gprs = 256;
3122                 rdev->config.evergreen.max_threads = 248;
3123                 rdev->config.evergreen.max_gs_threads = 32;
3124                 rdev->config.evergreen.max_stack_entries = 512;
3125                 rdev->config.evergreen.sx_num_of_sets = 4;
3126                 rdev->config.evergreen.sx_max_export_size = 256;
3127                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3128                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3129                 rdev->config.evergreen.max_hw_contexts = 8;
3130                 rdev->config.evergreen.sq_num_cf_insts = 2;
3131
3132                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3133                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3134                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3135                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3136                 break;
3137         case CHIP_REDWOOD:
3138                 rdev->config.evergreen.num_ses = 1;
3139                 rdev->config.evergreen.max_pipes = 4;
3140                 rdev->config.evergreen.max_tile_pipes = 4;
3141                 rdev->config.evergreen.max_simds = 5;
3142                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3143                 rdev->config.evergreen.max_gprs = 256;
3144                 rdev->config.evergreen.max_threads = 248;
3145                 rdev->config.evergreen.max_gs_threads = 32;
3146                 rdev->config.evergreen.max_stack_entries = 256;
3147                 rdev->config.evergreen.sx_num_of_sets = 4;
3148                 rdev->config.evergreen.sx_max_export_size = 256;
3149                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3150                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3151                 rdev->config.evergreen.max_hw_contexts = 8;
3152                 rdev->config.evergreen.sq_num_cf_insts = 2;
3153
3154                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3155                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3156                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3157                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3158                 break;
3159         case CHIP_CEDAR:
3160         default:
3161                 rdev->config.evergreen.num_ses = 1;
3162                 rdev->config.evergreen.max_pipes = 2;
3163                 rdev->config.evergreen.max_tile_pipes = 2;
3164                 rdev->config.evergreen.max_simds = 2;
3165                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3166                 rdev->config.evergreen.max_gprs = 256;
3167                 rdev->config.evergreen.max_threads = 192;
3168                 rdev->config.evergreen.max_gs_threads = 16;
3169                 rdev->config.evergreen.max_stack_entries = 256;
3170                 rdev->config.evergreen.sx_num_of_sets = 4;
3171                 rdev->config.evergreen.sx_max_export_size = 128;
3172                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3173                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3174                 rdev->config.evergreen.max_hw_contexts = 4;
3175                 rdev->config.evergreen.sq_num_cf_insts = 1;
3176
3177                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3178                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3179                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3180                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3181                 break;
3182         case CHIP_PALM:
3183                 rdev->config.evergreen.num_ses = 1;
3184                 rdev->config.evergreen.max_pipes = 2;
3185                 rdev->config.evergreen.max_tile_pipes = 2;
3186                 rdev->config.evergreen.max_simds = 2;
3187                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3188                 rdev->config.evergreen.max_gprs = 256;
3189                 rdev->config.evergreen.max_threads = 192;
3190                 rdev->config.evergreen.max_gs_threads = 16;
3191                 rdev->config.evergreen.max_stack_entries = 256;
3192                 rdev->config.evergreen.sx_num_of_sets = 4;
3193                 rdev->config.evergreen.sx_max_export_size = 128;
3194                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3195                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3196                 rdev->config.evergreen.max_hw_contexts = 4;
3197                 rdev->config.evergreen.sq_num_cf_insts = 1;
3198
3199                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3200                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3201                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3202                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3203                 break;
3204         case CHIP_SUMO:
3205                 rdev->config.evergreen.num_ses = 1;
3206                 rdev->config.evergreen.max_pipes = 4;
3207                 rdev->config.evergreen.max_tile_pipes = 4;
3208                 if (rdev->pdev->device == 0x9648)
3209                         rdev->config.evergreen.max_simds = 3;
3210                 else if ((rdev->pdev->device == 0x9647) ||
3211                          (rdev->pdev->device == 0x964a))
3212                         rdev->config.evergreen.max_simds = 4;
3213                 else
3214                         rdev->config.evergreen.max_simds = 5;
3215                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3216                 rdev->config.evergreen.max_gprs = 256;
3217                 rdev->config.evergreen.max_threads = 248;
3218                 rdev->config.evergreen.max_gs_threads = 32;
3219                 rdev->config.evergreen.max_stack_entries = 256;
3220                 rdev->config.evergreen.sx_num_of_sets = 4;
3221                 rdev->config.evergreen.sx_max_export_size = 256;
3222                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3223                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3224                 rdev->config.evergreen.max_hw_contexts = 8;
3225                 rdev->config.evergreen.sq_num_cf_insts = 2;
3226
3227                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3228                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3229                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3230                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3231                 break;
3232         case CHIP_SUMO2:
3233                 rdev->config.evergreen.num_ses = 1;
3234                 rdev->config.evergreen.max_pipes = 4;
3235                 rdev->config.evergreen.max_tile_pipes = 4;
3236                 rdev->config.evergreen.max_simds = 2;
3237                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3238                 rdev->config.evergreen.max_gprs = 256;
3239                 rdev->config.evergreen.max_threads = 248;
3240                 rdev->config.evergreen.max_gs_threads = 32;
3241                 rdev->config.evergreen.max_stack_entries = 512;
3242                 rdev->config.evergreen.sx_num_of_sets = 4;
3243                 rdev->config.evergreen.sx_max_export_size = 256;
3244                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3245                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3246                 rdev->config.evergreen.max_hw_contexts = 4;
3247                 rdev->config.evergreen.sq_num_cf_insts = 2;
3248
3249                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3250                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3251                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3252                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3253                 break;
3254         case CHIP_BARTS:
3255                 rdev->config.evergreen.num_ses = 2;
3256                 rdev->config.evergreen.max_pipes = 4;
3257                 rdev->config.evergreen.max_tile_pipes = 8;
3258                 rdev->config.evergreen.max_simds = 7;
3259                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3260                 rdev->config.evergreen.max_gprs = 256;
3261                 rdev->config.evergreen.max_threads = 248;
3262                 rdev->config.evergreen.max_gs_threads = 32;
3263                 rdev->config.evergreen.max_stack_entries = 512;
3264                 rdev->config.evergreen.sx_num_of_sets = 4;
3265                 rdev->config.evergreen.sx_max_export_size = 256;
3266                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3267                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3268                 rdev->config.evergreen.max_hw_contexts = 8;
3269                 rdev->config.evergreen.sq_num_cf_insts = 2;
3270
3271                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3272                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3273                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3274                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3275                 break;
3276         case CHIP_TURKS:
3277                 rdev->config.evergreen.num_ses = 1;
3278                 rdev->config.evergreen.max_pipes = 4;
3279                 rdev->config.evergreen.max_tile_pipes = 4;
3280                 rdev->config.evergreen.max_simds = 6;
3281                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3282                 rdev->config.evergreen.max_gprs = 256;
3283                 rdev->config.evergreen.max_threads = 248;
3284                 rdev->config.evergreen.max_gs_threads = 32;
3285                 rdev->config.evergreen.max_stack_entries = 256;
3286                 rdev->config.evergreen.sx_num_of_sets = 4;
3287                 rdev->config.evergreen.sx_max_export_size = 256;
3288                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3289                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3290                 rdev->config.evergreen.max_hw_contexts = 8;
3291                 rdev->config.evergreen.sq_num_cf_insts = 2;
3292
3293                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3294                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3295                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3296                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3297                 break;
3298         case CHIP_CAICOS:
3299                 rdev->config.evergreen.num_ses = 1;
3300                 rdev->config.evergreen.max_pipes = 2;
3301                 rdev->config.evergreen.max_tile_pipes = 2;
3302                 rdev->config.evergreen.max_simds = 2;
3303                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3304                 rdev->config.evergreen.max_gprs = 256;
3305                 rdev->config.evergreen.max_threads = 192;
3306                 rdev->config.evergreen.max_gs_threads = 16;
3307                 rdev->config.evergreen.max_stack_entries = 256;
3308                 rdev->config.evergreen.sx_num_of_sets = 4;
3309                 rdev->config.evergreen.sx_max_export_size = 128;
3310                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3311                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3312                 rdev->config.evergreen.max_hw_contexts = 4;
3313                 rdev->config.evergreen.sq_num_cf_insts = 1;
3314
3315                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3316                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3317                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3318                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3319                 break;
3320         }
3321
3322         /* Initialize HDP */
3323         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3324                 WREG32((0x2c14 + j), 0x00000000);
3325                 WREG32((0x2c18 + j), 0x00000000);
3326                 WREG32((0x2c1c + j), 0x00000000);
3327                 WREG32((0x2c20 + j), 0x00000000);
3328                 WREG32((0x2c24 + j), 0x00000000);
3329         }
3330
3331         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3332         WREG32(SRBM_INT_CNTL, 0x1);
3333         WREG32(SRBM_INT_ACK, 0x1);
3334
3335         evergreen_fix_pci_max_read_req_size(rdev);
3336
3337         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3338         if ((rdev->family == CHIP_PALM) ||
3339             (rdev->family == CHIP_SUMO) ||
3340             (rdev->family == CHIP_SUMO2))
3341                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3342         else
3343                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3344
3345         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3346          * not have bank info, so create a custom tiling dword.
3347          * bits 3:0   num_pipes
3348          * bits 7:4   num_banks
3349          * bits 11:8  group_size
3350          * bits 15:12 row_size
3351          */
3352         rdev->config.evergreen.tile_config = 0;
3353         switch (rdev->config.evergreen.max_tile_pipes) {
3354         case 1:
3355         default:
3356                 rdev->config.evergreen.tile_config |= (0 << 0);
3357                 break;
3358         case 2:
3359                 rdev->config.evergreen.tile_config |= (1 << 0);
3360                 break;
3361         case 4:
3362                 rdev->config.evergreen.tile_config |= (2 << 0);
3363                 break;
3364         case 8:
3365                 rdev->config.evergreen.tile_config |= (3 << 0);
3366                 break;
3367         }
3368         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3369         if (rdev->flags & RADEON_IS_IGP)
3370                 rdev->config.evergreen.tile_config |= 1 << 4;
3371         else {
3372                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3373                 case 0: /* four banks */
3374                         rdev->config.evergreen.tile_config |= 0 << 4;
3375                         break;
3376                 case 1: /* eight banks */
3377                         rdev->config.evergreen.tile_config |= 1 << 4;
3378                         break;
3379                 case 2: /* sixteen banks */
3380                 default:
3381                         rdev->config.evergreen.tile_config |= 2 << 4;
3382                         break;
3383                 }
3384         }
3385         rdev->config.evergreen.tile_config |= 0 << 8;
3386         rdev->config.evergreen.tile_config |=
3387                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3388
3389         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3390                 u32 efuse_straps_4;
3391                 u32 efuse_straps_3;
3392
3393                 efuse_straps_4 = RREG32_RCU(0x204);
3394                 efuse_straps_3 = RREG32_RCU(0x203);
3395                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3396                       ((efuse_straps_3 & 0xf0000000) >> 28));
3397         } else {
3398                 tmp = 0;
3399                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3400                         u32 rb_disable_bitmap;
3401
3402                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3403                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3404                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3405                         tmp <<= 4;
3406                         tmp |= rb_disable_bitmap;
3407                 }
3408         }
3409         /* enabled rb are just the one not disabled :) */
3410         disabled_rb_mask = tmp;
3411         tmp = 0;
3412         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3413                 tmp |= (1 << i);
3414         /* if all the backends are disabled, fix it up here */
3415         if ((disabled_rb_mask & tmp) == tmp) {
3416                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3417                         disabled_rb_mask &= ~(1 << i);
3418         }
3419
3420         for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3421                 u32 simd_disable_bitmap;
3422
3423                 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3424                 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3425                 simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3426                 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3427                 tmp <<= 16;
3428                 tmp |= simd_disable_bitmap;
3429         }
3430         rdev->config.evergreen.active_simds = hweight32(~tmp);
3431
3432         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3433         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3434
3435         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3436         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3437         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3438         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3439         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3440         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3441         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3442
3443         if ((rdev->config.evergreen.max_backends == 1) &&
3444             (rdev->flags & RADEON_IS_IGP)) {
3445                 if ((disabled_rb_mask & 3) == 1) {
3446                         /* RB0 disabled, RB1 enabled */
3447                         tmp = 0x11111111;
3448                 } else {
3449                         /* RB1 disabled, RB0 enabled */
3450                         tmp = 0x00000000;
3451                 }
3452         } else {
3453                 tmp = gb_addr_config & NUM_PIPES_MASK;
3454                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3455                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3456         }
3457         WREG32(GB_BACKEND_MAP, tmp);
3458
3459         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3460         WREG32(CGTS_TCC_DISABLE, 0);
3461         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3462         WREG32(CGTS_USER_TCC_DISABLE, 0);
3463
3464         /* set HW defaults for 3D engine */
3465         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3466                                      ROQ_IB2_START(0x2b)));
3467
3468         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3469
3470         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3471                              SYNC_GRADIENT |
3472                              SYNC_WALKER |
3473                              SYNC_ALIGNER));
3474
3475         sx_debug_1 = RREG32(SX_DEBUG_1);
3476         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3477         WREG32(SX_DEBUG_1, sx_debug_1);
3478
3479
3480         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3481         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3482         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3483         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3484
3485         if (rdev->family <= CHIP_SUMO2)
3486                 WREG32(SMX_SAR_CTL0, 0x00010000);
3487
3488         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3489                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3490                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3491
3492         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3493                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3494                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3495
3496         WREG32(VGT_NUM_INSTANCES, 1);
3497         WREG32(SPI_CONFIG_CNTL, 0);
3498         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3499         WREG32(CP_PERFMON_CNTL, 0);
3500
3501         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3502                                   FETCH_FIFO_HIWATER(0x4) |
3503                                   DONE_FIFO_HIWATER(0xe0) |
3504                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3505
3506         sq_config = RREG32(SQ_CONFIG);
3507         sq_config &= ~(PS_PRIO(3) |
3508                        VS_PRIO(3) |
3509                        GS_PRIO(3) |
3510                        ES_PRIO(3));
3511         sq_config |= (VC_ENABLE |
3512                       EXPORT_SRC_C |
3513                       PS_PRIO(0) |
3514                       VS_PRIO(1) |
3515                       GS_PRIO(2) |
3516                       ES_PRIO(3));
3517
3518         switch (rdev->family) {
3519         case CHIP_CEDAR:
3520         case CHIP_PALM:
3521         case CHIP_SUMO:
3522         case CHIP_SUMO2:
3523         case CHIP_CAICOS:
3524                 /* no vertex cache */
3525                 sq_config &= ~VC_ENABLE;
3526                 break;
3527         default:
3528                 break;
3529         }
3530
3531         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3532
3533         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3534         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3535         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3536         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3537         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3538         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3539         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3540
3541         switch (rdev->family) {
3542         case CHIP_CEDAR:
3543         case CHIP_PALM:
3544         case CHIP_SUMO:
3545         case CHIP_SUMO2:
3546                 ps_thread_count = 96;
3547                 break;
3548         default:
3549                 ps_thread_count = 128;
3550                 break;
3551         }
3552
3553         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3554         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3555         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3556         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3557         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3558         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3559
3560         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3561         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3562         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3563         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3564         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3565         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3566
3567         WREG32(SQ_CONFIG, sq_config);
3568         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3569         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3570         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3571         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3572         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3573         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3574         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3575         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3576         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3577         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3578
3579         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3580                                           FORCE_EOV_MAX_REZ_CNT(255)));
3581
3582         switch (rdev->family) {
3583         case CHIP_CEDAR:
3584         case CHIP_PALM:
3585         case CHIP_SUMO:
3586         case CHIP_SUMO2:
3587         case CHIP_CAICOS:
3588                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3589                 break;
3590         default:
3591                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3592                 break;
3593         }
3594         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3595         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3596
3597         WREG32(VGT_GS_VERTEX_REUSE, 16);
3598         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3599         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3600
3601         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3602         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3603
3604         WREG32(CB_PERF_CTR0_SEL_0, 0);
3605         WREG32(CB_PERF_CTR0_SEL_1, 0);
3606         WREG32(CB_PERF_CTR1_SEL_0, 0);
3607         WREG32(CB_PERF_CTR1_SEL_1, 0);
3608         WREG32(CB_PERF_CTR2_SEL_0, 0);
3609         WREG32(CB_PERF_CTR2_SEL_1, 0);
3610         WREG32(CB_PERF_CTR3_SEL_0, 0);
3611         WREG32(CB_PERF_CTR3_SEL_1, 0);
3612
3613         /* clear render buffer base addresses */
3614         WREG32(CB_COLOR0_BASE, 0);
3615         WREG32(CB_COLOR1_BASE, 0);
3616         WREG32(CB_COLOR2_BASE, 0);
3617         WREG32(CB_COLOR3_BASE, 0);
3618         WREG32(CB_COLOR4_BASE, 0);
3619         WREG32(CB_COLOR5_BASE, 0);
3620         WREG32(CB_COLOR6_BASE, 0);
3621         WREG32(CB_COLOR7_BASE, 0);
3622         WREG32(CB_COLOR8_BASE, 0);
3623         WREG32(CB_COLOR9_BASE, 0);
3624         WREG32(CB_COLOR10_BASE, 0);
3625         WREG32(CB_COLOR11_BASE, 0);
3626
3627         /* set the shader const cache sizes to 0 */
3628         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3629                 WREG32(i, 0);
3630         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3631                 WREG32(i, 0);
3632
3633         tmp = RREG32(HDP_MISC_CNTL);
3634         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3635         WREG32(HDP_MISC_CNTL, tmp);
3636
3637         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3638         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3639
3640         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3641
3642         udelay(50);
3643
3644 }
3645
3646 int evergreen_mc_init(struct radeon_device *rdev)
3647 {
3648         u32 tmp;
3649         int chansize, numchan;
3650
3651         /* Get VRAM informations */
3652         rdev->mc.vram_is_ddr = true;
3653         if ((rdev->family == CHIP_PALM) ||
3654             (rdev->family == CHIP_SUMO) ||
3655             (rdev->family == CHIP_SUMO2))
3656                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3657         else
3658                 tmp = RREG32(MC_ARB_RAMCFG);
3659         if (tmp & CHANSIZE_OVERRIDE) {
3660                 chansize = 16;
3661         } else if (tmp & CHANSIZE_MASK) {
3662                 chansize = 64;
3663         } else {
3664                 chansize = 32;
3665         }
3666         tmp = RREG32(MC_SHARED_CHMAP);
3667         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3668         case 0:
3669         default:
3670                 numchan = 1;
3671                 break;
3672         case 1:
3673                 numchan = 2;
3674                 break;
3675         case 2:
3676                 numchan = 4;
3677                 break;
3678         case 3:
3679                 numchan = 8;
3680                 break;
3681         }
3682         rdev->mc.vram_width = numchan * chansize;
3683         /* Could aper size report 0 ? */
3684         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3685         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3686         /* Setup GPU memory space */
3687         if ((rdev->family == CHIP_PALM) ||
3688             (rdev->family == CHIP_SUMO) ||
3689             (rdev->family == CHIP_SUMO2)) {
3690                 /* size in bytes on fusion */
3691                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3692                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3693         } else {
3694                 /* size in MB on evergreen/cayman/tn */
3695                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3696                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3697         }
3698         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3699         r700_vram_gtt_location(rdev, &rdev->mc);
3700         radeon_update_bandwidth_info(rdev);
3701
3702         return 0;
3703 }
3704
3705 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3706 {
3707         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3708                 RREG32(GRBM_STATUS));
3709         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3710                 RREG32(GRBM_STATUS_SE0));
3711         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3712                 RREG32(GRBM_STATUS_SE1));
3713         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3714                 RREG32(SRBM_STATUS));
3715         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3716                 RREG32(SRBM_STATUS2));
3717         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3718                 RREG32(CP_STALLED_STAT1));
3719         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3720                 RREG32(CP_STALLED_STAT2));
3721         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3722                 RREG32(CP_BUSY_STAT));
3723         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3724                 RREG32(CP_STAT));
3725         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3726                 RREG32(DMA_STATUS_REG));
3727         if (rdev->family >= CHIP_CAYMAN) {
3728                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3729                          RREG32(DMA_STATUS_REG + 0x800));
3730         }
3731 }
3732
3733 bool evergreen_is_display_hung(struct radeon_device *rdev)
3734 {
3735         u32 crtc_hung = 0;
3736         u32 crtc_status[6];
3737         u32 i, j, tmp;
3738
3739         for (i = 0; i < rdev->num_crtc; i++) {
3740                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3741                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3742                         crtc_hung |= (1 << i);
3743                 }
3744         }
3745
3746         for (j = 0; j < 10; j++) {
3747                 for (i = 0; i < rdev->num_crtc; i++) {
3748                         if (crtc_hung & (1 << i)) {
3749                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3750                                 if (tmp != crtc_status[i])
3751                                         crtc_hung &= ~(1 << i);
3752                         }
3753                 }
3754                 if (crtc_hung == 0)
3755                         return false;
3756                 udelay(100);
3757         }
3758
3759         return true;
3760 }
3761
3762 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3763 {
3764         u32 reset_mask = 0;
3765         u32 tmp;
3766
3767         /* GRBM_STATUS */
3768         tmp = RREG32(GRBM_STATUS);
3769         if (tmp & (PA_BUSY | SC_BUSY |
3770                    SH_BUSY | SX_BUSY |
3771                    TA_BUSY | VGT_BUSY |
3772                    DB_BUSY | CB_BUSY |
3773                    SPI_BUSY | VGT_BUSY_NO_DMA))
3774                 reset_mask |= RADEON_RESET_GFX;
3775
3776         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3777                    CP_BUSY | CP_COHERENCY_BUSY))
3778                 reset_mask |= RADEON_RESET_CP;
3779
3780         if (tmp & GRBM_EE_BUSY)
3781                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3782
3783         /* DMA_STATUS_REG */
3784         tmp = RREG32(DMA_STATUS_REG);
3785         if (!(tmp & DMA_IDLE))
3786                 reset_mask |= RADEON_RESET_DMA;
3787
3788         /* SRBM_STATUS2 */
3789         tmp = RREG32(SRBM_STATUS2);
3790         if (tmp & DMA_BUSY)
3791                 reset_mask |= RADEON_RESET_DMA;
3792
3793         /* SRBM_STATUS */
3794         tmp = RREG32(SRBM_STATUS);
3795         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3796                 reset_mask |= RADEON_RESET_RLC;
3797
3798         if (tmp & IH_BUSY)
3799                 reset_mask |= RADEON_RESET_IH;
3800
3801         if (tmp & SEM_BUSY)
3802                 reset_mask |= RADEON_RESET_SEM;
3803
3804         if (tmp & GRBM_RQ_PENDING)
3805                 reset_mask |= RADEON_RESET_GRBM;
3806
3807         if (tmp & VMC_BUSY)
3808                 reset_mask |= RADEON_RESET_VMC;
3809
3810         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3811                    MCC_BUSY | MCD_BUSY))
3812                 reset_mask |= RADEON_RESET_MC;
3813
3814         if (evergreen_is_display_hung(rdev))
3815                 reset_mask |= RADEON_RESET_DISPLAY;
3816
3817         /* VM_L2_STATUS */
3818         tmp = RREG32(VM_L2_STATUS);
3819         if (tmp & L2_BUSY)
3820                 reset_mask |= RADEON_RESET_VMC;
3821
3822         /* Skip MC reset as it's mostly likely not hung, just busy */
3823         if (reset_mask & RADEON_RESET_MC) {
3824                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3825                 reset_mask &= ~RADEON_RESET_MC;
3826         }
3827
3828         return reset_mask;
3829 }
3830
3831 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3832 {
3833         struct evergreen_mc_save save;
3834         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3835         u32 tmp;
3836
3837         if (reset_mask == 0)
3838                 return;
3839
3840         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3841
3842         evergreen_print_gpu_status_regs(rdev);
3843
3844         /* Disable CP parsing/prefetching */
3845         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3846
3847         if (reset_mask & RADEON_RESET_DMA) {
3848                 /* Disable DMA */
3849                 tmp = RREG32(DMA_RB_CNTL);
3850                 tmp &= ~DMA_RB_ENABLE;
3851                 WREG32(DMA_RB_CNTL, tmp);
3852         }
3853
3854         udelay(50);
3855
3856         evergreen_mc_stop(rdev, &save);
3857         if (evergreen_mc_wait_for_idle(rdev)) {
3858                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3859         }
3860
3861         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3862                 grbm_soft_reset |= SOFT_RESET_DB |
3863                         SOFT_RESET_CB |
3864                         SOFT_RESET_PA |
3865                         SOFT_RESET_SC |
3866                         SOFT_RESET_SPI |
3867                         SOFT_RESET_SX |
3868                         SOFT_RESET_SH |
3869                         SOFT_RESET_TC |
3870                         SOFT_RESET_TA |
3871                         SOFT_RESET_VC |
3872                         SOFT_RESET_VGT;
3873         }
3874
3875         if (reset_mask & RADEON_RESET_CP) {
3876                 grbm_soft_reset |= SOFT_RESET_CP |
3877                         SOFT_RESET_VGT;
3878
3879                 srbm_soft_reset |= SOFT_RESET_GRBM;
3880         }
3881
3882         if (reset_mask & RADEON_RESET_DMA)
3883                 srbm_soft_reset |= SOFT_RESET_DMA;
3884
3885         if (reset_mask & RADEON_RESET_DISPLAY)
3886                 srbm_soft_reset |= SOFT_RESET_DC;
3887
3888         if (reset_mask & RADEON_RESET_RLC)
3889                 srbm_soft_reset |= SOFT_RESET_RLC;
3890
3891         if (reset_mask & RADEON_RESET_SEM)
3892                 srbm_soft_reset |= SOFT_RESET_SEM;
3893
3894         if (reset_mask & RADEON_RESET_IH)
3895                 srbm_soft_reset |= SOFT_RESET_IH;
3896
3897         if (reset_mask & RADEON_RESET_GRBM)
3898                 srbm_soft_reset |= SOFT_RESET_GRBM;
3899
3900         if (reset_mask & RADEON_RESET_VMC)
3901                 srbm_soft_reset |= SOFT_RESET_VMC;
3902
3903         if (!(rdev->flags & RADEON_IS_IGP)) {
3904                 if (reset_mask & RADEON_RESET_MC)
3905                         srbm_soft_reset |= SOFT_RESET_MC;
3906         }
3907
3908         if (grbm_soft_reset) {
3909                 tmp = RREG32(GRBM_SOFT_RESET);
3910                 tmp |= grbm_soft_reset;
3911                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3912                 WREG32(GRBM_SOFT_RESET, tmp);
3913                 tmp = RREG32(GRBM_SOFT_RESET);
3914
3915                 udelay(50);
3916
3917                 tmp &= ~grbm_soft_reset;
3918                 WREG32(GRBM_SOFT_RESET, tmp);
3919                 tmp = RREG32(GRBM_SOFT_RESET);
3920         }
3921
3922         if (srbm_soft_reset) {
3923                 tmp = RREG32(SRBM_SOFT_RESET);
3924                 tmp |= srbm_soft_reset;
3925                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3926                 WREG32(SRBM_SOFT_RESET, tmp);
3927                 tmp = RREG32(SRBM_SOFT_RESET);
3928
3929                 udelay(50);
3930
3931                 tmp &= ~srbm_soft_reset;
3932                 WREG32(SRBM_SOFT_RESET, tmp);
3933                 tmp = RREG32(SRBM_SOFT_RESET);
3934         }
3935
3936         /* Wait a little for things to settle down */
3937         udelay(50);
3938
3939         evergreen_mc_resume(rdev, &save);
3940         udelay(50);
3941
3942         evergreen_print_gpu_status_regs(rdev);
3943 }
3944
3945 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3946 {
3947         struct evergreen_mc_save save;
3948         u32 tmp, i;
3949
3950         dev_info(rdev->dev, "GPU pci config reset\n");
3951
3952         /* disable dpm? */
3953
3954         /* Disable CP parsing/prefetching */
3955         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3956         udelay(50);
3957         /* Disable DMA */
3958         tmp = RREG32(DMA_RB_CNTL);
3959         tmp &= ~DMA_RB_ENABLE;
3960         WREG32(DMA_RB_CNTL, tmp);
3961         /* XXX other engines? */
3962
3963         /* halt the rlc */
3964         r600_rlc_stop(rdev);
3965
3966         udelay(50);
3967
3968         /* set mclk/sclk to bypass */
3969         rv770_set_clk_bypass_mode(rdev);
3970         /* disable BM */
3971         pci_clear_master(rdev->pdev);
3972         /* disable mem access */
3973         evergreen_mc_stop(rdev, &save);
3974         if (evergreen_mc_wait_for_idle(rdev)) {
3975                 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3976         }
3977         /* reset */
3978         radeon_pci_config_reset(rdev);
3979         /* wait for asic to come out of reset */
3980         for (i = 0; i < rdev->usec_timeout; i++) {
3981                 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3982                         break;
3983                 udelay(1);
3984         }
3985 }
3986
3987 int evergreen_asic_reset(struct radeon_device *rdev)
3988 {
3989         u32 reset_mask;
3990
3991         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3992
3993         if (reset_mask)
3994                 r600_set_bios_scratch_engine_hung(rdev, true);
3995
3996         /* try soft reset */
3997         evergreen_gpu_soft_reset(rdev, reset_mask);
3998
3999         reset_mask = evergreen_gpu_check_soft_reset(rdev);
4000
4001         /* try pci config reset */
4002         if (reset_mask && radeon_hard_reset)
4003                 evergreen_gpu_pci_config_reset(rdev);
4004
4005         reset_mask = evergreen_gpu_check_soft_reset(rdev);
4006
4007         if (!reset_mask)
4008                 r600_set_bios_scratch_engine_hung(rdev, false);
4009
4010         return 0;
4011 }
4012
4013 /**
4014  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4015  *
4016  * @rdev: radeon_device pointer
4017  * @ring: radeon_ring structure holding ring information
4018  *
4019  * Check if the GFX engine is locked up.
4020  * Returns true if the engine appears to be locked up, false if not.
4021  */
4022 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4023 {
4024         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4025
4026         if (!(reset_mask & (RADEON_RESET_GFX |
4027                             RADEON_RESET_COMPUTE |
4028                             RADEON_RESET_CP))) {
4029                 radeon_ring_lockup_update(rdev, ring);
4030                 return false;
4031         }
4032         return radeon_ring_test_lockup(rdev, ring);
4033 }
4034
4035 /*
4036  * RLC
4037  */
4038 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4039 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4040
4041 void sumo_rlc_fini(struct radeon_device *rdev)
4042 {
4043         int r;
4044
4045         /* save restore block */
4046         if (rdev->rlc.save_restore_obj) {
4047                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4048                 if (unlikely(r != 0))
4049                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4050                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
4051                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4052
4053                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
4054                 rdev->rlc.save_restore_obj = NULL;
4055         }
4056
4057         /* clear state block */
4058         if (rdev->rlc.clear_state_obj) {
4059                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4060                 if (unlikely(r != 0))
4061                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4062                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
4063                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4064
4065                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
4066                 rdev->rlc.clear_state_obj = NULL;
4067         }
4068
4069         /* clear state block */
4070         if (rdev->rlc.cp_table_obj) {
4071                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4072                 if (unlikely(r != 0))
4073                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4074                 radeon_bo_unpin(rdev->rlc.cp_table_obj);
4075                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4076
4077                 radeon_bo_unref(&rdev->rlc.cp_table_obj);
4078                 rdev->rlc.cp_table_obj = NULL;
4079         }
4080 }
4081
4082 #define CP_ME_TABLE_SIZE    96
4083
4084 int sumo_rlc_init(struct radeon_device *rdev)
4085 {
4086         const u32 *src_ptr;
4087         volatile u32 *dst_ptr;
4088         u32 dws, data, i, j, k, reg_num;
4089         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4090         u64 reg_list_mc_addr;
4091         const struct cs_section_def *cs_data;
4092         int r;
4093
4094         src_ptr = rdev->rlc.reg_list;
4095         dws = rdev->rlc.reg_list_size;
4096         if (rdev->family >= CHIP_BONAIRE) {
4097                 dws += (5 * 16) + 48 + 48 + 64;
4098         }
4099         cs_data = rdev->rlc.cs_data;
4100
4101         if (src_ptr) {
4102                 /* save restore block */
4103                 if (rdev->rlc.save_restore_obj == NULL) {
4104                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4105                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4106                                              NULL, &rdev->rlc.save_restore_obj);
4107                         if (r) {
4108                                 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4109                                 return r;
4110                         }
4111                 }
4112
4113                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4114                 if (unlikely(r != 0)) {
4115                         sumo_rlc_fini(rdev);
4116                         return r;
4117                 }
4118                 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4119                                   &rdev->rlc.save_restore_gpu_addr);
4120                 if (r) {
4121                         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4122                         dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4123                         sumo_rlc_fini(rdev);
4124                         return r;
4125                 }
4126
4127                 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4128                 if (r) {
4129                         dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4130                         sumo_rlc_fini(rdev);
4131                         return r;
4132                 }
4133                 /* write the sr buffer */
4134                 dst_ptr = rdev->rlc.sr_ptr;
4135                 if (rdev->family >= CHIP_TAHITI) {
4136                         /* SI */
4137                         for (i = 0; i < rdev->rlc.reg_list_size; i++)
4138                                 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4139                 } else {
4140                         /* ON/LN/TN */
4141                         /* format:
4142                          * dw0: (reg2 << 16) | reg1
4143                          * dw1: reg1 save space
4144                          * dw2: reg2 save space
4145                          */
4146                         for (i = 0; i < dws; i++) {
4147                                 data = src_ptr[i] >> 2;
4148                                 i++;
4149                                 if (i < dws)
4150                                         data |= (src_ptr[i] >> 2) << 16;
4151                                 j = (((i - 1) * 3) / 2);
4152                                 dst_ptr[j] = cpu_to_le32(data);
4153                         }
4154                         j = ((i * 3) / 2);
4155                         dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4156                 }
4157                 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4158                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4159         }
4160
4161         if (cs_data) {
4162                 /* clear state block */
4163                 if (rdev->family >= CHIP_BONAIRE) {
4164                         rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4165                 } else if (rdev->family >= CHIP_TAHITI) {
4166                         rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4167                         dws = rdev->rlc.clear_state_size + (256 / 4);
4168                 } else {
4169                         reg_list_num = 0;
4170                         dws = 0;
4171                         for (i = 0; cs_data[i].section != NULL; i++) {
4172                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4173                                         reg_list_num++;
4174                                         dws += cs_data[i].section[j].reg_count;
4175                                 }
4176                         }
4177                         reg_list_blk_index = (3 * reg_list_num + 2);
4178                         dws += reg_list_blk_index;
4179                         rdev->rlc.clear_state_size = dws;
4180                 }
4181
4182                 if (rdev->rlc.clear_state_obj == NULL) {
4183                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4184                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4185                                              NULL, &rdev->rlc.clear_state_obj);
4186                         if (r) {
4187                                 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4188                                 sumo_rlc_fini(rdev);
4189                                 return r;
4190                         }
4191                 }
4192                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4193                 if (unlikely(r != 0)) {
4194                         sumo_rlc_fini(rdev);
4195                         return r;
4196                 }
4197                 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4198                                   &rdev->rlc.clear_state_gpu_addr);
4199                 if (r) {
4200                         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4201                         dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4202                         sumo_rlc_fini(rdev);
4203                         return r;
4204                 }
4205
4206                 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4207                 if (r) {
4208                         dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4209                         sumo_rlc_fini(rdev);
4210                         return r;
4211                 }
4212                 /* set up the cs buffer */
4213                 dst_ptr = rdev->rlc.cs_ptr;
4214                 if (rdev->family >= CHIP_BONAIRE) {
4215                         cik_get_csb_buffer(rdev, dst_ptr);
4216                 } else if (rdev->family >= CHIP_TAHITI) {
4217                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4218                         dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4219                         dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4220                         dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4221                         si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4222                 } else {
4223                         reg_list_hdr_blk_index = 0;
4224                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4225                         data = upper_32_bits(reg_list_mc_addr);
4226                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4227                         reg_list_hdr_blk_index++;
4228                         for (i = 0; cs_data[i].section != NULL; i++) {
4229                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4230                                         reg_num = cs_data[i].section[j].reg_count;
4231                                         data = reg_list_mc_addr & 0xffffffff;
4232                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4233                                         reg_list_hdr_blk_index++;
4234
4235                                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4236                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4237                                         reg_list_hdr_blk_index++;
4238
4239                                         data = 0x08000000 | (reg_num * 4);
4240                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4241                                         reg_list_hdr_blk_index++;
4242
4243                                         for (k = 0; k < reg_num; k++) {
4244                                                 data = cs_data[i].section[j].extent[k];
4245                                                 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4246                                         }
4247                                         reg_list_mc_addr += reg_num * 4;
4248                                         reg_list_blk_index += reg_num;
4249                                 }
4250                         }
4251                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4252                 }
4253                 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4254                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4255         }
4256
4257         if (rdev->rlc.cp_table_size) {
4258                 if (rdev->rlc.cp_table_obj == NULL) {
4259                         r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4260                                              PAGE_SIZE, true,
4261                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4262                                              NULL, &rdev->rlc.cp_table_obj);
4263                         if (r) {
4264                                 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4265                                 sumo_rlc_fini(rdev);
4266                                 return r;
4267                         }
4268                 }
4269
4270                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4271                 if (unlikely(r != 0)) {
4272                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4273                         sumo_rlc_fini(rdev);
4274                         return r;
4275                 }
4276                 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4277                                   &rdev->rlc.cp_table_gpu_addr);
4278                 if (r) {
4279                         radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4280                         dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4281                         sumo_rlc_fini(rdev);
4282                         return r;
4283                 }
4284                 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4285                 if (r) {
4286                         dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4287                         sumo_rlc_fini(rdev);
4288                         return r;
4289                 }
4290
4291                 cik_init_cp_pg_table(rdev);
4292
4293                 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4294                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4295
4296         }
4297
4298         return 0;
4299 }
4300
4301 static void evergreen_rlc_start(struct radeon_device *rdev)
4302 {
4303         u32 mask = RLC_ENABLE;
4304
4305         if (rdev->flags & RADEON_IS_IGP) {
4306                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4307         }
4308
4309         WREG32(RLC_CNTL, mask);
4310 }
4311
4312 int evergreen_rlc_resume(struct radeon_device *rdev)
4313 {
4314         u32 i;
4315         const __be32 *fw_data;
4316
4317         if (!rdev->rlc_fw)
4318                 return -EINVAL;
4319
4320         r600_rlc_stop(rdev);
4321
4322         WREG32(RLC_HB_CNTL, 0);
4323
4324         if (rdev->flags & RADEON_IS_IGP) {
4325                 if (rdev->family == CHIP_ARUBA) {
4326                         u32 always_on_bitmap =
4327                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4328                         /* find out the number of active simds */
4329                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4330                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4331                         tmp = hweight32(~tmp);
4332                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4333                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4334                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4335                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4336                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4337                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4338                         }
4339                 } else {
4340                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4341                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4342                 }
4343                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4344                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4345         } else {
4346                 WREG32(RLC_HB_BASE, 0);
4347                 WREG32(RLC_HB_RPTR, 0);
4348                 WREG32(RLC_HB_WPTR, 0);
4349                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4350                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4351         }
4352         WREG32(RLC_MC_CNTL, 0);
4353         WREG32(RLC_UCODE_CNTL, 0);
4354
4355         fw_data = (const __be32 *)rdev->rlc_fw->data;
4356         if (rdev->family >= CHIP_ARUBA) {
4357                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4358                         WREG32(RLC_UCODE_ADDR, i);
4359                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4360                 }
4361         } else if (rdev->family >= CHIP_CAYMAN) {
4362                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4363                         WREG32(RLC_UCODE_ADDR, i);
4364                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4365                 }
4366         } else {
4367                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4368                         WREG32(RLC_UCODE_ADDR, i);
4369                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4370                 }
4371         }
4372         WREG32(RLC_UCODE_ADDR, 0);
4373
4374         evergreen_rlc_start(rdev);
4375
4376         return 0;
4377 }
4378
4379 /* Interrupts */
4380
4381 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4382 {
4383         if (crtc >= rdev->num_crtc)
4384                 return 0;
4385         else
4386                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4387 }
4388
4389 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4390 {
4391         u32 tmp;
4392
4393         if (rdev->family >= CHIP_CAYMAN) {
4394                 cayman_cp_int_cntl_setup(rdev, 0,
4395                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4396                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4397                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4398                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4399                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4400         } else
4401                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4402         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4403         WREG32(DMA_CNTL, tmp);
4404         WREG32(GRBM_INT_CNTL, 0);
4405         WREG32(SRBM_INT_CNTL, 0);
4406         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4407         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4408         if (rdev->num_crtc >= 4) {
4409                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4410                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4411         }
4412         if (rdev->num_crtc >= 6) {
4413                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4414                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4415         }
4416
4417         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4418         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4419         if (rdev->num_crtc >= 4) {
4420                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4421                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4422         }
4423         if (rdev->num_crtc >= 6) {
4424                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4425                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4426         }
4427
4428         /* only one DAC on DCE5 */
4429         if (!ASIC_IS_DCE5(rdev))
4430                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4431         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4432
4433         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4434         WREG32(DC_HPD1_INT_CONTROL, tmp);
4435         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4436         WREG32(DC_HPD2_INT_CONTROL, tmp);
4437         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4438         WREG32(DC_HPD3_INT_CONTROL, tmp);
4439         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4440         WREG32(DC_HPD4_INT_CONTROL, tmp);
4441         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4442         WREG32(DC_HPD5_INT_CONTROL, tmp);
4443         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4444         WREG32(DC_HPD6_INT_CONTROL, tmp);
4445
4446 }
4447
4448 int evergreen_irq_set(struct radeon_device *rdev)
4449 {
4450         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4451         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4452         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4453         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4454         u32 grbm_int_cntl = 0;
4455         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4456         u32 dma_cntl, dma_cntl1 = 0;
4457         u32 thermal_int = 0;
4458
4459         if (!rdev->irq.installed) {
4460                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4461                 return -EINVAL;
4462         }
4463         /* don't enable anything if the ih is disabled */
4464         if (!rdev->ih.enabled) {
4465                 r600_disable_interrupts(rdev);
4466                 /* force the active interrupt state to all disabled */
4467                 evergreen_disable_interrupt_state(rdev);
4468                 return 0;
4469         }
4470
4471         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4472         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4473         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4474         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4475         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4476         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4477         if (rdev->family == CHIP_ARUBA)
4478                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4479                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4480         else
4481                 thermal_int = RREG32(CG_THERMAL_INT) &
4482                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4483
4484         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4485         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4486         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4487         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4488         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4489         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4490
4491         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4492
4493         if (rdev->family >= CHIP_CAYMAN) {
4494                 /* enable CP interrupts on all rings */
4495                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4496                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4497                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4498                 }
4499                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4500                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4501                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4502                 }
4503                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4504                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4505                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4506                 }
4507         } else {
4508                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4509                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4510                         cp_int_cntl |= RB_INT_ENABLE;
4511                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4512                 }
4513         }
4514
4515         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4516                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4517                 dma_cntl |= TRAP_ENABLE;
4518         }
4519
4520         if (rdev->family >= CHIP_CAYMAN) {
4521                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4522                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4523                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4524                         dma_cntl1 |= TRAP_ENABLE;
4525                 }
4526         }
4527
4528         if (rdev->irq.dpm_thermal) {
4529                 DRM_DEBUG("dpm thermal\n");
4530                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4531         }
4532
4533         if (rdev->irq.crtc_vblank_int[0] ||
4534             atomic_read(&rdev->irq.pflip[0])) {
4535                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4536                 crtc1 |= VBLANK_INT_MASK;
4537         }
4538         if (rdev->irq.crtc_vblank_int[1] ||
4539             atomic_read(&rdev->irq.pflip[1])) {
4540                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4541                 crtc2 |= VBLANK_INT_MASK;
4542         }
4543         if (rdev->irq.crtc_vblank_int[2] ||
4544             atomic_read(&rdev->irq.pflip[2])) {
4545                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4546                 crtc3 |= VBLANK_INT_MASK;
4547         }
4548         if (rdev->irq.crtc_vblank_int[3] ||
4549             atomic_read(&rdev->irq.pflip[3])) {
4550                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4551                 crtc4 |= VBLANK_INT_MASK;
4552         }
4553         if (rdev->irq.crtc_vblank_int[4] ||
4554             atomic_read(&rdev->irq.pflip[4])) {
4555                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4556                 crtc5 |= VBLANK_INT_MASK;
4557         }
4558         if (rdev->irq.crtc_vblank_int[5] ||
4559             atomic_read(&rdev->irq.pflip[5])) {
4560                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4561                 crtc6 |= VBLANK_INT_MASK;
4562         }
4563         if (rdev->irq.hpd[0]) {
4564                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4565                 hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4566         }
4567         if (rdev->irq.hpd[1]) {
4568                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4569                 hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4570         }
4571         if (rdev->irq.hpd[2]) {
4572                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4573                 hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4574         }
4575         if (rdev->irq.hpd[3]) {
4576                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4577                 hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4578         }
4579         if (rdev->irq.hpd[4]) {
4580                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4581                 hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4582         }
4583         if (rdev->irq.hpd[5]) {
4584                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4585                 hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4586         }
4587         if (rdev->irq.afmt[0]) {
4588                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4589                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4590         }
4591         if (rdev->irq.afmt[1]) {
4592                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4593                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4594         }
4595         if (rdev->irq.afmt[2]) {
4596                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4597                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4598         }
4599         if (rdev->irq.afmt[3]) {
4600                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4601                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4602         }
4603         if (rdev->irq.afmt[4]) {
4604                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4605                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4606         }
4607         if (rdev->irq.afmt[5]) {
4608                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4609                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4610         }
4611
4612         if (rdev->family >= CHIP_CAYMAN) {
4613                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4614                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4615                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4616         } else
4617                 WREG32(CP_INT_CNTL, cp_int_cntl);
4618
4619         WREG32(DMA_CNTL, dma_cntl);
4620
4621         if (rdev->family >= CHIP_CAYMAN)
4622                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4623
4624         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4625
4626         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4627         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4628         if (rdev->num_crtc >= 4) {
4629                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4630                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4631         }
4632         if (rdev->num_crtc >= 6) {
4633                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4634                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4635         }
4636
4637         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4638                GRPH_PFLIP_INT_MASK);
4639         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4640                GRPH_PFLIP_INT_MASK);
4641         if (rdev->num_crtc >= 4) {
4642                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4643                        GRPH_PFLIP_INT_MASK);
4644                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4645                        GRPH_PFLIP_INT_MASK);
4646         }
4647         if (rdev->num_crtc >= 6) {
4648                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4649                        GRPH_PFLIP_INT_MASK);
4650                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4651                        GRPH_PFLIP_INT_MASK);
4652         }
4653
4654         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4655         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4656         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4657         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4658         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4659         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4660         if (rdev->family == CHIP_ARUBA)
4661                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4662         else
4663                 WREG32(CG_THERMAL_INT, thermal_int);
4664
4665         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4666         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4667         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4668         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4669         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4670         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4671
4672         /* posting read */
4673         RREG32(SRBM_STATUS);
4674
4675         return 0;
4676 }
4677
4678 static void evergreen_irq_ack(struct radeon_device *rdev)
4679 {
4680         u32 tmp;
4681
4682         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4683         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4684         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4685         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4686         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4687         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4688         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4689         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4690         if (rdev->num_crtc >= 4) {
4691                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4692                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4693         }
4694         if (rdev->num_crtc >= 6) {
4695                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4696                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4697         }
4698
4699         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4700         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4701         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4702         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4703         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4704         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4705
4706         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4707                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4708         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4709                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4710         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4711                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4712         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4713                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4714         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4715                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4716         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4717                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4718
4719         if (rdev->num_crtc >= 4) {
4720                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4721                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4722                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4723                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4724                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4725                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4726                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4727                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4728                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4729                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4730                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4731                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4732         }
4733
4734         if (rdev->num_crtc >= 6) {
4735                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4736                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4737                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4738                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4739                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4740                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4741                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4742                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4743                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4744                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4745                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4746                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4747         }
4748
4749         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4750                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4751                 tmp |= DC_HPDx_INT_ACK;
4752                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4753         }
4754         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4755                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4756                 tmp |= DC_HPDx_INT_ACK;
4757                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4758         }
4759         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4760                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4761                 tmp |= DC_HPDx_INT_ACK;
4762                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4763         }
4764         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4765                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4766                 tmp |= DC_HPDx_INT_ACK;
4767                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4768         }
4769         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4770                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4771                 tmp |= DC_HPDx_INT_ACK;
4772                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4773         }
4774         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4775                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4776                 tmp |= DC_HPDx_INT_ACK;
4777                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4778         }
4779
4780         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4781                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4782                 tmp |= DC_HPDx_RX_INT_ACK;
4783                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4784         }
4785         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4786                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4787                 tmp |= DC_HPDx_RX_INT_ACK;
4788                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4789         }
4790         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4791                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4792                 tmp |= DC_HPDx_RX_INT_ACK;
4793                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4794         }
4795         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4796                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4797                 tmp |= DC_HPDx_RX_INT_ACK;
4798                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4799         }
4800         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4801                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4802                 tmp |= DC_HPDx_RX_INT_ACK;
4803                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4804         }
4805         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4806                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4807                 tmp |= DC_HPDx_RX_INT_ACK;
4808                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4809         }
4810
4811         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4812                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4813                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4814                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4815         }
4816         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4817                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4818                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4819                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4820         }
4821         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4822                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4823                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4824                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4825         }
4826         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4827                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4828                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4829                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4830         }
4831         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4832                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4833                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4834                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4835         }
4836         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4837                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4838                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4839                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4840         }
4841 }
4842
4843 static void evergreen_irq_disable(struct radeon_device *rdev)
4844 {
4845         r600_disable_interrupts(rdev);
4846         /* Wait and acknowledge irq */
4847         mdelay(1);
4848         evergreen_irq_ack(rdev);
4849         evergreen_disable_interrupt_state(rdev);
4850 }
4851
4852 void evergreen_irq_suspend(struct radeon_device *rdev)
4853 {
4854         evergreen_irq_disable(rdev);
4855         r600_rlc_stop(rdev);
4856 }
4857
4858 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4859 {
4860         u32 wptr, tmp;
4861
4862         if (rdev->wb.enabled)
4863                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4864         else
4865                 wptr = RREG32(IH_RB_WPTR);
4866
4867         if (wptr & RB_OVERFLOW) {
4868                 wptr &= ~RB_OVERFLOW;
4869                 /* When a ring buffer overflow happen start parsing interrupt
4870                  * from the last not overwritten vector (wptr + 16). Hopefully
4871                  * this should allow us to catchup.
4872                  */
4873                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4874                          wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4875                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4876                 tmp = RREG32(IH_RB_CNTL);
4877                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4878                 WREG32(IH_RB_CNTL, tmp);
4879         }
4880         return (wptr & rdev->ih.ptr_mask);
4881 }
4882
4883 int evergreen_irq_process(struct radeon_device *rdev)
4884 {
4885         u32 wptr;
4886         u32 rptr;
4887         u32 src_id, src_data;
4888         u32 ring_index;
4889         bool queue_hotplug = false;
4890         bool queue_hdmi = false;
4891         bool queue_dp = false;
4892         bool queue_thermal = false;
4893         u32 status, addr;
4894
4895         if (!rdev->ih.enabled || rdev->shutdown)
4896                 return IRQ_NONE;
4897
4898         wptr = evergreen_get_ih_wptr(rdev);
4899
4900 restart_ih:
4901         /* is somebody else already processing irqs? */
4902         if (atomic_xchg(&rdev->ih.lock, 1))
4903                 return IRQ_NONE;
4904
4905         rptr = rdev->ih.rptr;
4906         DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4907
4908         /* Order reading of wptr vs. reading of IH ring data */
4909         rmb();
4910
4911         /* display interrupts */
4912         evergreen_irq_ack(rdev);
4913
4914         while (rptr != wptr) {
4915                 /* wptr/rptr are in bytes! */
4916                 ring_index = rptr / 4;
4917                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4918                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4919
4920                 switch (src_id) {
4921                 case 1: /* D1 vblank/vline */
4922                         switch (src_data) {
4923                         case 0: /* D1 vblank */
4924                                 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
4925                                         DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
4926
4927                                 if (rdev->irq.crtc_vblank_int[0]) {
4928                                         drm_handle_vblank(rdev->ddev, 0);
4929                                         rdev->pm.vblank_sync = true;
4930                                         wake_up(&rdev->irq.vblank_queue);
4931                                 }
4932                                 if (atomic_read(&rdev->irq.pflip[0]))
4933                                         radeon_crtc_handle_vblank(rdev, 0);
4934                                 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4935                                 DRM_DEBUG("IH: D1 vblank\n");
4936
4937                                 break;
4938                         case 1: /* D1 vline */
4939                                 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
4940                                         DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
4941
4942                                 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4943                                 DRM_DEBUG("IH: D1 vline\n");
4944
4945                                 break;
4946                         default:
4947                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4948                                 break;
4949                         }
4950                         break;
4951                 case 2: /* D2 vblank/vline */
4952                         switch (src_data) {
4953                         case 0: /* D2 vblank */
4954                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
4955                                         DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
4956
4957                                 if (rdev->irq.crtc_vblank_int[1]) {
4958                                         drm_handle_vblank(rdev->ddev, 1);
4959                                         rdev->pm.vblank_sync = true;
4960                                         wake_up(&rdev->irq.vblank_queue);
4961                                 }
4962                                 if (atomic_read(&rdev->irq.pflip[1]))
4963                                         radeon_crtc_handle_vblank(rdev, 1);
4964                                 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4965                                 DRM_DEBUG("IH: D2 vblank\n");
4966
4967                                 break;
4968                         case 1: /* D2 vline */
4969                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
4970                                         DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
4971
4972                                 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4973                                 DRM_DEBUG("IH: D2 vline\n");
4974
4975                                 break;
4976                         default:
4977                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4978                                 break;
4979                         }
4980                         break;
4981                 case 3: /* D3 vblank/vline */
4982                         switch (src_data) {
4983                         case 0: /* D3 vblank */
4984                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
4985                                         DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
4986
4987                                 if (rdev->irq.crtc_vblank_int[2]) {
4988                                         drm_handle_vblank(rdev->ddev, 2);
4989                                         rdev->pm.vblank_sync = true;
4990                                         wake_up(&rdev->irq.vblank_queue);
4991                                 }
4992                                 if (atomic_read(&rdev->irq.pflip[2]))
4993                                         radeon_crtc_handle_vblank(rdev, 2);
4994                                 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4995                                 DRM_DEBUG("IH: D3 vblank\n");
4996
4997                                 break;
4998                         case 1: /* D3 vline */
4999                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5000                                         DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5001
5002                                 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5003                                 DRM_DEBUG("IH: D3 vline\n");
5004
5005                                 break;
5006                         default:
5007                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5008                                 break;
5009                         }
5010                         break;
5011                 case 4: /* D4 vblank/vline */
5012                         switch (src_data) {
5013                         case 0: /* D4 vblank */
5014                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5015                                         DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5016
5017                                 if (rdev->irq.crtc_vblank_int[3]) {
5018                                         drm_handle_vblank(rdev->ddev, 3);
5019                                         rdev->pm.vblank_sync = true;
5020                                         wake_up(&rdev->irq.vblank_queue);
5021                                 }
5022                                 if (atomic_read(&rdev->irq.pflip[3]))
5023                                         radeon_crtc_handle_vblank(rdev, 3);
5024                                 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5025                                 DRM_DEBUG("IH: D4 vblank\n");
5026
5027                                 break;
5028                         case 1: /* D4 vline */
5029                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5030                                         DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5031
5032                                 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5033                                 DRM_DEBUG("IH: D4 vline\n");
5034
5035                                 break;
5036                         default:
5037                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5038                                 break;
5039                         }
5040                         break;
5041                 case 5: /* D5 vblank/vline */
5042                         switch (src_data) {
5043                         case 0: /* D5 vblank */
5044                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5045                                         DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5046
5047                                 if (rdev->irq.crtc_vblank_int[4]) {
5048                                         drm_handle_vblank(rdev->ddev, 4);
5049                                         rdev->pm.vblank_sync = true;
5050                                         wake_up(&rdev->irq.vblank_queue);
5051                                 }
5052                                 if (atomic_read(&rdev->irq.pflip[4]))
5053                                         radeon_crtc_handle_vblank(rdev, 4);
5054                                 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5055                                 DRM_DEBUG("IH: D5 vblank\n");
5056
5057                                 break;
5058                         case 1: /* D5 vline */
5059                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5060                                         DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5061
5062                                 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5063                                 DRM_DEBUG("IH: D5 vline\n");
5064
5065                                 break;
5066                         default:
5067                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5068                                 break;
5069                         }
5070                         break;
5071                 case 6: /* D6 vblank/vline */
5072                         switch (src_data) {
5073                         case 0: /* D6 vblank */
5074                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5075                                         DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5076
5077                                 if (rdev->irq.crtc_vblank_int[5]) {
5078                                         drm_handle_vblank(rdev->ddev, 5);
5079                                         rdev->pm.vblank_sync = true;
5080                                         wake_up(&rdev->irq.vblank_queue);
5081                                 }
5082                                 if (atomic_read(&rdev->irq.pflip[5]))
5083                                         radeon_crtc_handle_vblank(rdev, 5);
5084                                 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5085                                 DRM_DEBUG("IH: D6 vblank\n");
5086
5087                                 break;
5088                         case 1: /* D6 vline */
5089                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5090                                         DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5091
5092                                 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5093                                 DRM_DEBUG("IH: D6 vline\n");
5094
5095                                 break;
5096                         default:
5097                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5098                                 break;
5099                         }
5100                         break;
5101                 case 8: /* D1 page flip */
5102                 case 10: /* D2 page flip */
5103                 case 12: /* D3 page flip */
5104                 case 14: /* D4 page flip */
5105                 case 16: /* D5 page flip */
5106                 case 18: /* D6 page flip */
5107                         DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5108                         if (radeon_use_pflipirq > 0)
5109                                 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5110                         break;
5111                 case 42: /* HPD hotplug */
5112                         switch (src_data) {
5113                         case 0:
5114                                 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5115                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5116
5117                                 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5118                                 queue_hotplug = true;
5119                                 DRM_DEBUG("IH: HPD1\n");
5120                                 break;
5121                         case 1:
5122                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5123                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5124
5125                                 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5126                                 queue_hotplug = true;
5127                                 DRM_DEBUG("IH: HPD2\n");
5128                                 break;
5129                         case 2:
5130                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5131                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5132
5133                                 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5134                                 queue_hotplug = true;
5135                                 DRM_DEBUG("IH: HPD3\n");
5136                                 break;
5137                         case 3:
5138                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5139                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5140
5141                                 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5142                                 queue_hotplug = true;
5143                                 DRM_DEBUG("IH: HPD4\n");
5144                                 break;
5145                         case 4:
5146                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5147                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5148
5149                                 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5150                                 queue_hotplug = true;
5151                                 DRM_DEBUG("IH: HPD5\n");
5152                                 break;
5153                         case 5:
5154                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5155                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5156
5157                                 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5158                                 queue_hotplug = true;
5159                                 DRM_DEBUG("IH: HPD6\n");
5160                                 break;
5161                         case 6:
5162                                 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5163                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5164
5165                                 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5166                                 queue_dp = true;
5167                                 DRM_DEBUG("IH: HPD_RX 1\n");
5168                                 break;
5169                         case 7:
5170                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5171                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5172
5173                                 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5174                                 queue_dp = true;
5175                                 DRM_DEBUG("IH: HPD_RX 2\n");
5176                                 break;
5177                         case 8:
5178                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5179                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5180
5181                                 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5182                                 queue_dp = true;
5183                                 DRM_DEBUG("IH: HPD_RX 3\n");
5184                                 break;
5185                         case 9:
5186                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5187                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5188
5189                                 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5190                                 queue_dp = true;
5191                                 DRM_DEBUG("IH: HPD_RX 4\n");
5192                                 break;
5193                         case 10:
5194                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5195                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5196
5197                                 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5198                                 queue_dp = true;
5199                                 DRM_DEBUG("IH: HPD_RX 5\n");
5200                                 break;
5201                         case 11:
5202                                 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5203                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5204
5205                                 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5206                                 queue_dp = true;
5207                                 DRM_DEBUG("IH: HPD_RX 6\n");
5208                                 break;
5209                         default:
5210                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5211                                 break;
5212                         }
5213                         break;
5214                 case 44: /* hdmi */
5215                         switch (src_data) {
5216                         case 0:
5217                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5218                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5219
5220                                 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5221                                 queue_hdmi = true;
5222                                 DRM_DEBUG("IH: HDMI0\n");
5223                                 break;
5224                         case 1:
5225                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5226                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5227
5228                                 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5229                                 queue_hdmi = true;
5230                                 DRM_DEBUG("IH: HDMI1\n");
5231                                 break;
5232                         case 2:
5233                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5234                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5235
5236                                 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5237                                 queue_hdmi = true;
5238                                 DRM_DEBUG("IH: HDMI2\n");
5239                                 break;
5240                         case 3:
5241                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5242                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5243
5244                                 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5245                                 queue_hdmi = true;
5246                                 DRM_DEBUG("IH: HDMI3\n");
5247                                 break;
5248                         case 4:
5249                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5250                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5251
5252                                 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5253                                 queue_hdmi = true;
5254                                 DRM_DEBUG("IH: HDMI4\n");
5255                                 break;
5256                         case 5:
5257                                 if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5258                                         DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5259
5260                                 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5261                                 queue_hdmi = true;
5262                                 DRM_DEBUG("IH: HDMI5\n");
5263                                 break;
5264                         default:
5265                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5266                                 break;
5267                         }
5268                 case 96:
5269                         DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5270                         WREG32(SRBM_INT_ACK, 0x1);
5271                         break;
5272                 case 124: /* UVD */
5273                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5274                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5275                         break;
5276                 case 146:
5277                 case 147:
5278                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5279                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5280                         /* reset addr and status */
5281                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5282                         if (addr == 0x0 && status == 0x0)
5283                                 break;
5284                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5285                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5286                                 addr);
5287                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5288                                 status);
5289                         cayman_vm_decode_fault(rdev, status, addr);
5290                         break;
5291                 case 176: /* CP_INT in ring buffer */
5292                 case 177: /* CP_INT in IB1 */
5293                 case 178: /* CP_INT in IB2 */
5294                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5295                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5296                         break;
5297                 case 181: /* CP EOP event */
5298                         DRM_DEBUG("IH: CP EOP\n");
5299                         if (rdev->family >= CHIP_CAYMAN) {
5300                                 switch (src_data) {
5301                                 case 0:
5302                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5303                                         break;
5304                                 case 1:
5305                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5306                                         break;
5307                                 case 2:
5308                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5309                                         break;
5310                                 }
5311                         } else
5312                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5313                         break;
5314                 case 224: /* DMA trap event */
5315                         DRM_DEBUG("IH: DMA trap\n");
5316                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5317                         break;
5318                 case 230: /* thermal low to high */
5319                         DRM_DEBUG("IH: thermal low to high\n");
5320                         rdev->pm.dpm.thermal.high_to_low = false;
5321                         queue_thermal = true;
5322                         break;
5323                 case 231: /* thermal high to low */
5324                         DRM_DEBUG("IH: thermal high to low\n");
5325                         rdev->pm.dpm.thermal.high_to_low = true;
5326                         queue_thermal = true;
5327                         break;
5328                 case 233: /* GUI IDLE */
5329                         DRM_DEBUG("IH: GUI idle\n");
5330                         break;
5331                 case 244: /* DMA trap event */
5332                         if (rdev->family >= CHIP_CAYMAN) {
5333                                 DRM_DEBUG("IH: DMA1 trap\n");
5334                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5335                         }
5336                         break;
5337                 default:
5338                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5339                         break;
5340                 }
5341
5342                 /* wptr/rptr are in bytes! */
5343                 rptr += 16;
5344                 rptr &= rdev->ih.ptr_mask;
5345                 WREG32(IH_RB_RPTR, rptr);
5346         }
5347         if (queue_dp)
5348                 schedule_work(&rdev->dp_work);
5349         if (queue_hotplug)
5350                 schedule_delayed_work(&rdev->hotplug_work, 0);
5351         if (queue_hdmi)
5352                 schedule_work(&rdev->audio_work);
5353         if (queue_thermal && rdev->pm.dpm_enabled)
5354                 schedule_work(&rdev->pm.dpm.thermal.work);
5355         rdev->ih.rptr = rptr;
5356         atomic_set(&rdev->ih.lock, 0);
5357
5358         /* make sure wptr hasn't changed while processing */
5359         wptr = evergreen_get_ih_wptr(rdev);
5360         if (wptr != rptr)
5361                 goto restart_ih;
5362
5363         return IRQ_HANDLED;
5364 }
5365
5366 static int evergreen_startup(struct radeon_device *rdev)
5367 {
5368         struct radeon_ring *ring;
5369         int r;
5370
5371         /* enable pcie gen2 link */
5372         evergreen_pcie_gen2_enable(rdev);
5373         /* enable aspm */
5374         evergreen_program_aspm(rdev);
5375
5376         /* scratch needs to be initialized before MC */
5377         r = r600_vram_scratch_init(rdev);
5378         if (r)
5379                 return r;
5380
5381         evergreen_mc_program(rdev);
5382
5383         if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5384                 r = ni_mc_load_microcode(rdev);
5385                 if (r) {
5386                         DRM_ERROR("Failed to load MC firmware!\n");
5387                         return r;
5388                 }
5389         }
5390
5391         if (rdev->flags & RADEON_IS_AGP) {
5392                 evergreen_agp_enable(rdev);
5393         } else {
5394                 r = evergreen_pcie_gart_enable(rdev);
5395                 if (r)
5396                         return r;
5397         }
5398         evergreen_gpu_init(rdev);
5399
5400         /* allocate rlc buffers */
5401         if (rdev->flags & RADEON_IS_IGP) {
5402                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5403                 rdev->rlc.reg_list_size =
5404                         (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5405                 rdev->rlc.cs_data = evergreen_cs_data;
5406                 r = sumo_rlc_init(rdev);
5407                 if (r) {
5408                         DRM_ERROR("Failed to init rlc BOs!\n");
5409                         return r;
5410                 }
5411         }
5412
5413         /* allocate wb buffer */
5414         r = radeon_wb_init(rdev);
5415         if (r)
5416                 return r;
5417
5418         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5419         if (r) {
5420                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5421                 return r;
5422         }
5423
5424         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5425         if (r) {
5426                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5427                 return r;
5428         }
5429
5430         r = uvd_v2_2_resume(rdev);
5431         if (!r) {
5432                 r = radeon_fence_driver_start_ring(rdev,
5433                                                    R600_RING_TYPE_UVD_INDEX);
5434                 if (r)
5435                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5436         }
5437
5438         if (r)
5439                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5440
5441         /* Enable IRQ */
5442         if (!rdev->irq.installed) {
5443                 r = radeon_irq_kms_init(rdev);
5444                 if (r)
5445                         return r;
5446         }
5447
5448         r = r600_irq_init(rdev);
5449         if (r) {
5450                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5451                 radeon_irq_kms_fini(rdev);
5452                 return r;
5453         }
5454         evergreen_irq_set(rdev);
5455
5456         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5457         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5458                              RADEON_CP_PACKET2);
5459         if (r)
5460                 return r;
5461
5462         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5463         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5464                              DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5465         if (r)
5466                 return r;
5467
5468         r = evergreen_cp_load_microcode(rdev);
5469         if (r)
5470                 return r;
5471         r = evergreen_cp_resume(rdev);
5472         if (r)
5473                 return r;
5474         r = r600_dma_resume(rdev);
5475         if (r)
5476                 return r;
5477
5478         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5479         if (ring->ring_size) {
5480                 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5481                                      RADEON_CP_PACKET2);
5482                 if (!r)
5483                         r = uvd_v1_0_init(rdev);
5484
5485                 if (r)
5486                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5487         }
5488
5489         r = radeon_ib_pool_init(rdev);
5490         if (r) {
5491                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5492                 return r;
5493         }
5494
5495         r = radeon_audio_init(rdev);
5496         if (r) {
5497                 DRM_ERROR("radeon: audio init failed\n");
5498                 return r;
5499         }
5500
5501         return 0;
5502 }
5503
5504 int evergreen_resume(struct radeon_device *rdev)
5505 {
5506         int r;
5507
5508         /* reset the asic, the gfx blocks are often in a bad state
5509          * after the driver is unloaded or after a resume
5510          */
5511         if (radeon_asic_reset(rdev))
5512                 dev_warn(rdev->dev, "GPU reset failed !\n");
5513         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5514          * posting will perform necessary task to bring back GPU into good
5515          * shape.
5516          */
5517         /* post card */
5518         atom_asic_init(rdev->mode_info.atom_context);
5519
5520         /* init golden registers */
5521         evergreen_init_golden_registers(rdev);
5522
5523         if (rdev->pm.pm_method == PM_METHOD_DPM)
5524                 radeon_pm_resume(rdev);
5525
5526         rdev->accel_working = true;
5527         r = evergreen_startup(rdev);
5528         if (r) {
5529                 DRM_ERROR("evergreen startup failed on resume\n");
5530                 rdev->accel_working = false;
5531                 return r;
5532         }
5533
5534         return r;
5535
5536 }
5537
5538 int evergreen_suspend(struct radeon_device *rdev)
5539 {
5540         radeon_pm_suspend(rdev);
5541         radeon_audio_fini(rdev);
5542         uvd_v1_0_fini(rdev);
5543         radeon_uvd_suspend(rdev);
5544         r700_cp_stop(rdev);
5545         r600_dma_stop(rdev);
5546         evergreen_irq_suspend(rdev);
5547         radeon_wb_disable(rdev);
5548         evergreen_pcie_gart_disable(rdev);
5549
5550         return 0;
5551 }
5552
5553 /* Plan is to move initialization in that function and use
5554  * helper function so that radeon_device_init pretty much
5555  * do nothing more than calling asic specific function. This
5556  * should also allow to remove a bunch of callback function
5557  * like vram_info.
5558  */
5559 int evergreen_init(struct radeon_device *rdev)
5560 {
5561         int r;
5562
5563         /* Read BIOS */
5564         if (!radeon_get_bios(rdev)) {
5565                 if (ASIC_IS_AVIVO(rdev))
5566                         return -EINVAL;
5567         }
5568         /* Must be an ATOMBIOS */
5569         if (!rdev->is_atom_bios) {
5570                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5571                 return -EINVAL;
5572         }
5573         r = radeon_atombios_init(rdev);
5574         if (r)
5575                 return r;
5576         /* reset the asic, the gfx blocks are often in a bad state
5577          * after the driver is unloaded or after a resume
5578          */
5579         if (radeon_asic_reset(rdev))
5580                 dev_warn(rdev->dev, "GPU reset failed !\n");
5581         /* Post card if necessary */
5582         if (!radeon_card_posted(rdev)) {
5583                 if (!rdev->bios) {
5584                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5585                         return -EINVAL;
5586                 }
5587                 DRM_INFO("GPU not posted. posting now...\n");
5588                 atom_asic_init(rdev->mode_info.atom_context);
5589         }
5590         /* init golden registers */
5591         evergreen_init_golden_registers(rdev);
5592         /* Initialize scratch registers */
5593         r600_scratch_init(rdev);
5594         /* Initialize surface registers */
5595         radeon_surface_init(rdev);
5596         /* Initialize clocks */
5597         radeon_get_clock_info(rdev->ddev);
5598         /* Fence driver */
5599         r = radeon_fence_driver_init(rdev);
5600         if (r)
5601                 return r;
5602         /* initialize AGP */
5603         if (rdev->flags & RADEON_IS_AGP) {
5604                 r = radeon_agp_init(rdev);
5605                 if (r)
5606                         radeon_agp_disable(rdev);
5607         }
5608         /* initialize memory controller */
5609         r = evergreen_mc_init(rdev);
5610         if (r)
5611                 return r;
5612         /* Memory manager */
5613         r = radeon_bo_init(rdev);
5614         if (r)
5615                 return r;
5616
5617         if (ASIC_IS_DCE5(rdev)) {
5618                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5619                         r = ni_init_microcode(rdev);
5620                         if (r) {
5621                                 DRM_ERROR("Failed to load firmware!\n");
5622                                 return r;
5623                         }
5624                 }
5625         } else {
5626                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5627                         r = r600_init_microcode(rdev);
5628                         if (r) {
5629                                 DRM_ERROR("Failed to load firmware!\n");
5630                                 return r;
5631                         }
5632                 }
5633         }
5634
5635         /* Initialize power management */
5636         radeon_pm_init(rdev);
5637
5638         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5639         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5640
5641         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5642         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5643
5644         r = radeon_uvd_init(rdev);
5645         if (!r) {
5646                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5647                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5648                                4096);
5649         }
5650
5651         rdev->ih.ring_obj = NULL;
5652         r600_ih_ring_init(rdev, 64 * 1024);
5653
5654         r = r600_pcie_gart_init(rdev);
5655         if (r)
5656                 return r;
5657
5658         rdev->accel_working = true;
5659         r = evergreen_startup(rdev);
5660         if (r) {
5661                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5662                 r700_cp_fini(rdev);
5663                 r600_dma_fini(rdev);
5664                 r600_irq_fini(rdev);
5665                 if (rdev->flags & RADEON_IS_IGP)
5666                         sumo_rlc_fini(rdev);
5667                 radeon_wb_fini(rdev);
5668                 radeon_ib_pool_fini(rdev);
5669                 radeon_irq_kms_fini(rdev);
5670                 evergreen_pcie_gart_fini(rdev);
5671                 rdev->accel_working = false;
5672         }
5673
5674         /* Don't start up if the MC ucode is missing on BTC parts.
5675          * The default clocks and voltages before the MC ucode
5676          * is loaded are not suffient for advanced operations.
5677          */
5678         if (ASIC_IS_DCE5(rdev)) {
5679                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5680                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5681                         return -EINVAL;
5682                 }
5683         }
5684
5685         return 0;
5686 }
5687
5688 void evergreen_fini(struct radeon_device *rdev)
5689 {
5690         radeon_pm_fini(rdev);
5691         radeon_audio_fini(rdev);
5692         r700_cp_fini(rdev);
5693         r600_dma_fini(rdev);
5694         r600_irq_fini(rdev);
5695         if (rdev->flags & RADEON_IS_IGP)
5696                 sumo_rlc_fini(rdev);
5697         radeon_wb_fini(rdev);
5698         radeon_ib_pool_fini(rdev);
5699         radeon_irq_kms_fini(rdev);
5700         uvd_v1_0_fini(rdev);
5701         radeon_uvd_fini(rdev);
5702         evergreen_pcie_gart_fini(rdev);
5703         r600_vram_scratch_fini(rdev);
5704         radeon_gem_fini(rdev);
5705         radeon_fence_driver_fini(rdev);
5706         radeon_agp_fini(rdev);
5707         radeon_bo_fini(rdev);
5708         radeon_atombios_fini(rdev);
5709         kfree(rdev->bios);
5710         rdev->bios = NULL;
5711 }
5712
5713 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5714 {
5715         u32 link_width_cntl, speed_cntl;
5716
5717         if (radeon_pcie_gen2 == 0)
5718                 return;
5719
5720         if (rdev->flags & RADEON_IS_IGP)
5721                 return;
5722
5723         if (!(rdev->flags & RADEON_IS_PCIE))
5724                 return;
5725
5726         /* x2 cards have a special sequence */
5727         if (ASIC_IS_X2(rdev))
5728                 return;
5729
5730         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5731                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5732                 return;
5733
5734         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5735         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5736                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5737                 return;
5738         }
5739
5740         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5741
5742         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5743             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5744
5745                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5746                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5747                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5748
5749                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5750                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5751                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5752
5753                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5754                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5755                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5756
5757                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5758                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5759                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5760
5761                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5762                 speed_cntl |= LC_GEN2_EN_STRAP;
5763                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5764
5765         } else {
5766                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5767                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5768                 if (1)
5769                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5770                 else
5771                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5772                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5773         }
5774 }
5775
5776 void evergreen_program_aspm(struct radeon_device *rdev)
5777 {
5778         u32 data, orig;
5779         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5780         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5781         /* fusion_platform = true
5782          * if the system is a fusion system
5783          * (APU or DGPU in a fusion system).
5784          * todo: check if the system is a fusion platform.
5785          */
5786         bool fusion_platform = false;
5787
5788         if (radeon_aspm == 0)
5789                 return;
5790
5791         if (!(rdev->flags & RADEON_IS_PCIE))
5792                 return;
5793
5794         switch (rdev->family) {
5795         case CHIP_CYPRESS:
5796         case CHIP_HEMLOCK:
5797         case CHIP_JUNIPER:
5798         case CHIP_REDWOOD:
5799         case CHIP_CEDAR:
5800         case CHIP_SUMO:
5801         case CHIP_SUMO2:
5802         case CHIP_PALM:
5803         case CHIP_ARUBA:
5804                 disable_l0s = true;
5805                 break;
5806         default:
5807                 disable_l0s = false;
5808                 break;
5809         }
5810
5811         if (rdev->flags & RADEON_IS_IGP)
5812                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5813
5814         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5815         if (fusion_platform)
5816                 data &= ~MULTI_PIF;
5817         else
5818                 data |= MULTI_PIF;
5819         if (data != orig)
5820                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5821
5822         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5823         if (fusion_platform)
5824                 data &= ~MULTI_PIF;
5825         else
5826                 data |= MULTI_PIF;
5827         if (data != orig)
5828                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5829
5830         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5831         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5832         if (!disable_l0s) {
5833                 if (rdev->family >= CHIP_BARTS)
5834                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5835                 else
5836                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5837         }
5838
5839         if (!disable_l1) {
5840                 if (rdev->family >= CHIP_BARTS)
5841                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5842                 else
5843                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5844
5845                 if (!disable_plloff_in_l1) {
5846                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5847                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5848                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5849                         if (data != orig)
5850                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5851
5852                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5853                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5854                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5855                         if (data != orig)
5856                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5857
5858                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5859                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5860                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5861                         if (data != orig)
5862                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5863
5864                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5865                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5866                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5867                         if (data != orig)
5868                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5869
5870                         if (rdev->family >= CHIP_BARTS) {
5871                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5872                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5873                                 data |= PLL_RAMP_UP_TIME_0(4);
5874                                 if (data != orig)
5875                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5876
5877                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5878                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5879                                 data |= PLL_RAMP_UP_TIME_1(4);
5880                                 if (data != orig)
5881                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5882
5883                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5884                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5885                                 data |= PLL_RAMP_UP_TIME_0(4);
5886                                 if (data != orig)
5887                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5888
5889                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5890                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5891                                 data |= PLL_RAMP_UP_TIME_1(4);
5892                                 if (data != orig)
5893                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5894                         }
5895
5896                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5897                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5898                         data |= LC_DYN_LANES_PWR_STATE(3);
5899                         if (data != orig)
5900                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5901
5902                         if (rdev->family >= CHIP_BARTS) {
5903                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5904                                 data &= ~LS2_EXIT_TIME_MASK;
5905                                 data |= LS2_EXIT_TIME(1);
5906                                 if (data != orig)
5907                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5908
5909                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5910                                 data &= ~LS2_EXIT_TIME_MASK;
5911                                 data |= LS2_EXIT_TIME(1);
5912                                 if (data != orig)
5913                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5914                         }
5915                 }
5916         }
5917
5918         /* evergreen parts only */
5919         if (rdev->family < CHIP_BARTS)
5920                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5921
5922         if (pcie_lc_cntl != pcie_lc_cntl_old)
5923                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5924 }